mirror of
https://github.com/oven-sh/bun
synced 2026-02-20 15:51:46 +00:00
Compare commits
448 Commits
jarred/lin
...
ben/v8-obj
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e24e56ba1b | ||
|
|
0c91ddf367 | ||
|
|
8cb0b5db21 | ||
|
|
1976e5bc00 | ||
|
|
f520715622 | ||
|
|
a4264cef23 | ||
|
|
09f002934c | ||
|
|
89dfe9beb6 | ||
|
|
4ac415f58d | ||
|
|
acd8567fa0 | ||
|
|
ba2ea6fbb2 | ||
|
|
36c621b6b1 | ||
|
|
bab5fec95f | ||
|
|
e6b30a90de | ||
|
|
fea302ee1d | ||
|
|
2ffcccc5b4 | ||
|
|
11d7a9d5e9 | ||
|
|
55cdf69415 | ||
|
|
ac8f9052a2 | ||
|
|
5a525d3042 | ||
|
|
6fd06dd023 | ||
|
|
df9d18659c | ||
|
|
d8ac4c59ff | ||
|
|
3309a8479c | ||
|
|
3896b0e29f | ||
|
|
c4f4d7c872 | ||
|
|
ebdd678da5 | ||
|
|
7529cd76b5 | ||
|
|
9eeef3f5df | ||
|
|
2b1a10629b | ||
|
|
0a37423baf | ||
|
|
1a9307da08 | ||
|
|
b005ef43d4 | ||
|
|
078fdd3787 | ||
|
|
dc58c42453 | ||
|
|
b53c25e5f8 | ||
|
|
e97c65fd1e | ||
|
|
5a108c5027 | ||
|
|
0f1d5d5dab | ||
|
|
6415cc3e92 | ||
|
|
8d34846d19 | ||
|
|
781998cf00 | ||
|
|
02a75070fb | ||
|
|
ac8db43485 | ||
|
|
94ee538dc6 | ||
|
|
9cdda49485 | ||
|
|
2c84840222 | ||
|
|
dafa9946e4 | ||
|
|
74d5b93ffc | ||
|
|
886c31f0c5 | ||
|
|
1bac09488d | ||
|
|
83a256013f | ||
|
|
384988f26c | ||
|
|
fe62a61404 | ||
|
|
ef8fd12e43 | ||
|
|
999324a50c | ||
|
|
8ace981fbc | ||
|
|
02ff16d95c | ||
|
|
1d188dbc55 | ||
|
|
f16d802eb1 | ||
|
|
eb8ed27a4a | ||
|
|
5eb053fa3b | ||
|
|
f9af7be5ae | ||
|
|
1367e5e85a | ||
|
|
d55b5cc169 | ||
|
|
fa2e00f109 | ||
|
|
9993d72fee | ||
|
|
fd75ca7585 | ||
|
|
a53db001db | ||
|
|
1a5c05adca | ||
|
|
58d02e467f | ||
|
|
63596c3f8c | ||
|
|
996847bcad | ||
|
|
33c91fe3fa | ||
|
|
7fd072f4af | ||
|
|
15a8e72790 | ||
|
|
64d77e33f6 | ||
|
|
babc907bfe | ||
|
|
83c5d8a942 | ||
|
|
5a8e98cec2 | ||
|
|
d4237b0757 | ||
|
|
766a9cf4f2 | ||
|
|
98a709fb1b | ||
|
|
715ff7f323 | ||
|
|
df1744f0da | ||
|
|
5bd344281f | ||
|
|
b70458c3e4 | ||
|
|
3f686222d4 | ||
|
|
36fc324523 | ||
|
|
a5bd94f582 | ||
|
|
4fae1b4475 | ||
|
|
2fa60f2d12 | ||
|
|
6d79edaa15 | ||
|
|
dc2929d4e1 | ||
|
|
5bc45e2721 | ||
|
|
fe7f5fa731 | ||
|
|
30edb594a8 | ||
|
|
1961a9acc8 | ||
|
|
9482a0afdf | ||
|
|
a1312066b3 | ||
|
|
85a3299115 | ||
|
|
3ea71a9672 | ||
|
|
bf945f6dbb | ||
|
|
a366135bd2 | ||
|
|
eec5abd0da | ||
|
|
cede04b019 | ||
|
|
cf1863236a | ||
|
|
bd3517197c | ||
|
|
2c93e917a9 | ||
|
|
5e6b509100 | ||
|
|
c229da8d9a | ||
|
|
4304368fc0 | ||
|
|
460d6edbda | ||
|
|
9628ee76fc | ||
|
|
9fd6a04460 | ||
|
|
a13a020d4c | ||
|
|
3a245dd248 | ||
|
|
b972ed6540 | ||
|
|
dfa3a9a369 | ||
|
|
444766833c | ||
|
|
f7d459eea5 | ||
|
|
7d018fb323 | ||
|
|
5f08478229 | ||
|
|
d861347dc5 | ||
|
|
1eb5ecb563 | ||
|
|
6661ab6022 | ||
|
|
23aa4f2959 | ||
|
|
9302b42919 | ||
|
|
b9ead441c1 | ||
|
|
24dbef7713 | ||
|
|
28c40babd2 | ||
|
|
35465d3a29 | ||
|
|
7aaf935711 | ||
|
|
bfca627dfa | ||
|
|
22e37a5c8d | ||
|
|
960514364e | ||
|
|
98078e7639 | ||
|
|
62d973f19f | ||
|
|
5cbb6926f5 | ||
|
|
077ee55211 | ||
|
|
adb31c0752 | ||
|
|
ab55477c2d | ||
|
|
ef23b8e60c | ||
|
|
e6528f81c9 | ||
|
|
1481cc2730 | ||
|
|
e6c87bddee | ||
|
|
f7b2e2a795 | ||
|
|
3aaa240233 | ||
|
|
9d74b5bdc8 | ||
|
|
d74a192345 | ||
|
|
76a3dc268d | ||
|
|
d2c821bbf6 | ||
|
|
ff334da585 | ||
|
|
d96629e053 | ||
|
|
c527058f14 | ||
|
|
3efd445084 | ||
|
|
84c91bf7e1 | ||
|
|
9f7c6e34cb | ||
|
|
d44969769f | ||
|
|
ff0f9d5f4d | ||
|
|
c63c55cbb1 | ||
|
|
6d09772a13 | ||
|
|
df33f2b2a2 | ||
|
|
923303047f | ||
|
|
3876ecfde8 | ||
|
|
2680deb5d3 | ||
|
|
e1aadd0d7a | ||
|
|
7a6efad44e | ||
|
|
4ed0c36063 | ||
|
|
b75c605a75 | ||
|
|
7da9e7c45d | ||
|
|
30d06dec47 | ||
|
|
3674493aa4 | ||
|
|
cacbaba524 | ||
|
|
0d7d789ebd | ||
|
|
1aa35089d6 | ||
|
|
1de1745085 | ||
|
|
639e9a83d5 | ||
|
|
9db3379cc5 | ||
|
|
c5c55c7ce4 | ||
|
|
43326b0b2d | ||
|
|
680f842948 | ||
|
|
363a4934d0 | ||
|
|
98f9e276b0 | ||
|
|
ce1286efef | ||
|
|
fd84ace83b | ||
|
|
483af7c33c | ||
|
|
6fbe3d8214 | ||
|
|
c552cb40d1 | ||
|
|
63cf732ab4 | ||
|
|
6303af3ce0 | ||
|
|
9104bd7210 | ||
|
|
b5c91a4b7e | ||
|
|
82239371ab | ||
|
|
26526cba38 | ||
|
|
214b3ccca0 | ||
|
|
ada020b69f | ||
|
|
deb6ff5e6c | ||
|
|
f25599a6e8 | ||
|
|
de64683b22 | ||
|
|
c6d508972f | ||
|
|
2f30e19835 | ||
|
|
0081ab4738 | ||
|
|
6f6ea0d6f3 | ||
|
|
622432e843 | ||
|
|
80eb6d00e8 | ||
|
|
b6715d2c64 | ||
|
|
f371a78568 | ||
|
|
c2cf528953 | ||
|
|
9911407f26 | ||
|
|
59c5c0fe48 | ||
|
|
e585f900c9 | ||
|
|
dc620ea837 | ||
|
|
49ab4c147a | ||
|
|
b2a4df68c3 | ||
|
|
4c0a1f2983 | ||
|
|
bec04c7341 | ||
|
|
a44b7e41d2 | ||
|
|
de5e56336c | ||
|
|
1c648063fa | ||
|
|
1c3354bc95 | ||
|
|
d5d4f53e82 | ||
|
|
7ab4dc738f | ||
|
|
ebc7045ca4 | ||
|
|
848ad19d9e | ||
|
|
1da3436266 | ||
|
|
49e496399a | ||
|
|
9b8340a5b3 | ||
|
|
8efcc61a7b | ||
|
|
4d6480050c | ||
|
|
fc2c134bc6 | ||
|
|
4c4db1da37 | ||
|
|
77e14c8482 | ||
|
|
fba5d65003 | ||
|
|
c181cf45a7 | ||
|
|
5aeb4d9f79 | ||
|
|
1d9a8b4134 | ||
|
|
30881444df | ||
|
|
a2b4e3d4c2 | ||
|
|
e5662caa33 | ||
|
|
1f1ea7bf24 | ||
|
|
175746e569 | ||
|
|
005dd776b6 | ||
|
|
81dec2657f | ||
|
|
dbd320ccfa | ||
|
|
8f8d3968a3 | ||
|
|
0bbdd880e6 | ||
|
|
51257d5668 | ||
|
|
a2ae28d158 | ||
|
|
f04991f6bb | ||
|
|
80e651aca3 | ||
|
|
a5ba02804f | ||
|
|
4199fd4515 | ||
|
|
848327d333 | ||
|
|
bfb72f84c4 | ||
|
|
e4022ec3c7 | ||
|
|
a7f34c15fc | ||
|
|
a0ebb051b0 | ||
|
|
70ca2b76c3 | ||
|
|
e5ac4f94fa | ||
|
|
d547d8a30e | ||
|
|
32d9bb3ced | ||
|
|
75df73ef90 | ||
|
|
13907c4c29 | ||
|
|
87169b6bb3 | ||
|
|
244100c32f | ||
|
|
8a78b2241d | ||
|
|
bf8b6922bb | ||
|
|
7aa05ec542 | ||
|
|
f95ae9baee | ||
|
|
f7cb2da542 | ||
|
|
d966129992 | ||
|
|
6cb5cd2a87 | ||
|
|
080a2806af | ||
|
|
92c83fcd9e | ||
|
|
277ed9d138 | ||
|
|
879cb23163 | ||
|
|
d321ee97c5 | ||
|
|
3bfeb83e7e | ||
|
|
5a18b7d2fc | ||
|
|
e75ef69fb4 | ||
|
|
78021e34ae | ||
|
|
d7187592c0 | ||
|
|
5f1b569c52 | ||
|
|
e54fe5995b | ||
|
|
a2f68989a0 | ||
|
|
4a1e01d076 | ||
|
|
dd8b0a5889 | ||
|
|
8cadf66143 | ||
|
|
77cd03dad1 | ||
|
|
82b42ed851 | ||
|
|
2de82c0b3b | ||
|
|
30df04cd35 | ||
|
|
585c8299d8 | ||
|
|
375d8da8e6 | ||
|
|
0bd8db7162 | ||
|
|
d97260869d | ||
|
|
fdb58dc861 | ||
|
|
5f118704ec | ||
|
|
610c7f5e47 | ||
|
|
1e0b20f514 | ||
|
|
f6c89f4c25 | ||
|
|
907cd8d45d | ||
|
|
ac4523e903 | ||
|
|
24574dddb2 | ||
|
|
2da57f6d7b | ||
|
|
e2c3749965 | ||
|
|
57c6a7db35 | ||
|
|
c37891471a | ||
|
|
8ba0791dc8 | ||
|
|
f9371e59f2 | ||
|
|
79ddf0e47a | ||
|
|
177f3a8622 | ||
|
|
5a5f3d6b30 | ||
|
|
4e5d759c37 | ||
|
|
1a702dfdc7 | ||
|
|
3ef84816a6 | ||
|
|
6e9b592c56 | ||
|
|
a6b5543bd8 | ||
|
|
a4759eb147 | ||
|
|
732ed2b7df | ||
|
|
63fab9a82b | ||
|
|
ff17b427c8 | ||
|
|
ca44df7c88 | ||
|
|
9daa7ea555 | ||
|
|
2f0020f00f | ||
|
|
599d27d93e | ||
|
|
696f209ec1 | ||
|
|
1a6ead667b | ||
|
|
bbf2f5d716 | ||
|
|
9574044083 | ||
|
|
822b725bec | ||
|
|
dc775f75f0 | ||
|
|
738947bdec | ||
|
|
b7efeafc03 | ||
|
|
f5d1a17a5c | ||
|
|
03024e6b4e | ||
|
|
1d61676c7b | ||
|
|
23fb63f45c | ||
|
|
0be71edf3f | ||
|
|
6b50deb7b7 | ||
|
|
6ad3e6a5e3 | ||
|
|
b1dce1e241 | ||
|
|
cc42052039 | ||
|
|
ecf5aea071 | ||
|
|
79d21a0d02 | ||
|
|
43949151b1 | ||
|
|
16aad326e4 | ||
|
|
1a6f2d38da | ||
|
|
c6149d36b3 | ||
|
|
34e493f945 | ||
|
|
866b301626 | ||
|
|
cabc0fa0e6 | ||
|
|
d703354fcd | ||
|
|
37036f2eb0 | ||
|
|
ff0dc62314 | ||
|
|
f05f13780e | ||
|
|
4d74855fd7 | ||
|
|
5088a360b5 | ||
|
|
891b1907ae | ||
|
|
ae988642fb | ||
|
|
75e442c170 | ||
|
|
8808af1c99 | ||
|
|
b9d2a03ffc | ||
|
|
157b56cca5 | ||
|
|
caaeae123a | ||
|
|
20235a0d22 | ||
|
|
ae19489250 | ||
|
|
242c48f302 | ||
|
|
110849355c | ||
|
|
36fd3115f1 | ||
|
|
7d9b876968 | ||
|
|
40f0da1254 | ||
|
|
aea3964abd | ||
|
|
780bff781d | ||
|
|
c6a2ab5165 | ||
|
|
ef1c660708 | ||
|
|
11f8d3cb24 | ||
|
|
3ac9c3cc1c | ||
|
|
aa0f54cb93 | ||
|
|
8a3f882ef7 | ||
|
|
b7dd57ac32 | ||
|
|
cf1c7772f3 | ||
|
|
329d5e2af5 | ||
|
|
0098678a1d | ||
|
|
bf4c2caa11 | ||
|
|
226f42e04a | ||
|
|
96d19fcfe2 | ||
|
|
58483426cd | ||
|
|
25f7ef7338 | ||
|
|
412806bb22 | ||
|
|
4c87406391 | ||
|
|
5f7b96b58f | ||
|
|
f1151a84ad | ||
|
|
cdc68a2237 | ||
|
|
e866793eb3 | ||
|
|
cf9c418bcb | ||
|
|
138ef1328e | ||
|
|
e5e6d7ca43 | ||
|
|
cb81fc5445 | ||
|
|
d8caf7f9fa | ||
|
|
6f8ceb0ea9 | ||
|
|
02b589b2ce | ||
|
|
55d59ebf1f | ||
|
|
e1bc6c55d5 | ||
|
|
e42dede529 | ||
|
|
73ef93ffa3 | ||
|
|
475f71a2a1 | ||
|
|
af6035ce36 | ||
|
|
bfa395d1d5 | ||
|
|
76bb5b8619 | ||
|
|
6354e608a7 | ||
|
|
28d9527189 | ||
|
|
fbcd843c58 | ||
|
|
6b0c2383d5 | ||
|
|
f1a748fcab | ||
|
|
87296405a7 | ||
|
|
4dfbabd590 | ||
|
|
ea1135a464 | ||
|
|
f1755df6f0 | ||
|
|
b01f67857f | ||
|
|
9fe7ea340d | ||
|
|
68ba6b9e79 | ||
|
|
a703d2d019 | ||
|
|
5137213f86 | ||
|
|
c98da7daf7 | ||
|
|
25252c9b46 | ||
|
|
21ff566d69 | ||
|
|
a36a01e235 | ||
|
|
9ae870546b | ||
|
|
a4b0817cd3 | ||
|
|
c2a5451e93 | ||
|
|
150ae032e8 | ||
|
|
37ee951448 | ||
|
|
b8c70ba6cf | ||
|
|
cbcf9506d9 | ||
|
|
92bd629e60 | ||
|
|
e7031b07ae | ||
|
|
41a5ebe09f | ||
|
|
cd97c21038 | ||
|
|
57d22908d1 | ||
|
|
749c51d71a | ||
|
|
80bbad6568 | ||
|
|
da1b3d2007 | ||
|
|
050a4b5c71 | ||
|
|
6f52b649da | ||
|
|
bbc621adff | ||
|
|
71c223e111 |
@@ -10,9 +10,10 @@ steps:
|
||||
blocked_state: "running"
|
||||
|
||||
- label: ":pipeline:"
|
||||
command: "buildkite-agent pipeline upload .buildkite/ci.yml"
|
||||
agents:
|
||||
queue: "build-linux"
|
||||
queue: "build-darwin"
|
||||
command:
|
||||
- ".buildkite/scripts/prepare-build.sh"
|
||||
|
||||
- if: "build.branch == 'main' && !build.pull_request.repository.fork"
|
||||
label: ":github:"
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
62
.buildkite/scripts/build-bun.sh
Executable file
62
.buildkite/scripts/build-bun.sh
Executable file
@@ -0,0 +1,62 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
source "$(dirname "$0")/env.sh"
|
||||
|
||||
function run_command() {
|
||||
set -x
|
||||
"$@"
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
cwd="$(pwd)"
|
||||
|
||||
mkdir -p build
|
||||
source "$(dirname "$0")/download-artifact.sh" "build/bun-deps/**" --step "$BUILDKITE_GROUP_KEY-build-deps"
|
||||
source "$(dirname "$0")/download-artifact.sh" "build/bun-zig.o" --step "$BUILDKITE_GROUP_KEY-build-zig"
|
||||
source "$(dirname "$0")/download-artifact.sh" "build/bun-cpp-objects.a" --step "$BUILDKITE_GROUP_KEY-build-cpp" --split
|
||||
cd build
|
||||
|
||||
run_command cmake .. "${CMAKE_FLAGS[@]}" \
|
||||
-GNinja \
|
||||
-DBUN_LINK_ONLY="1" \
|
||||
-DNO_CONFIGURE_DEPENDS="1" \
|
||||
-DBUN_ZIG_OBJ_DIR="$cwd/build" \
|
||||
-DBUN_CPP_ARCHIVE="$cwd/build/bun-cpp-objects.a" \
|
||||
-DBUN_DEPS_OUT_DIR="$cwd/build/bun-deps" \
|
||||
-DCMAKE_BUILD_TYPE="$CMAKE_BUILD_TYPE" \
|
||||
-DCPU_TARGET="$CPU_TARGET" \
|
||||
-DUSE_LTO="$USE_LTO" \
|
||||
-DUSE_DEBUG_JSC="$USE_DEBUG_JSC" \
|
||||
-DCANARY="$CANARY" \
|
||||
-DGIT_SHA="$GIT_SHA"
|
||||
run_command ninja -v -j "$CPUS"
|
||||
run_command ls
|
||||
|
||||
tag="bun-$BUILDKITE_GROUP_KEY"
|
||||
if [ "$USE_LTO" == "OFF" ]; then
|
||||
# Remove OS check when LTO is enabled on macOS again
|
||||
if [[ "$tag" == *"darwin"* ]]; then
|
||||
tag="$tag-nolto"
|
||||
fi
|
||||
fi
|
||||
|
||||
for name in bun bun-profile; do
|
||||
dir="$tag"
|
||||
if [ "$name" == "bun-profile" ]; then
|
||||
dir="$tag-profile"
|
||||
fi
|
||||
run_command chmod +x "$name"
|
||||
run_command "./$name" --revision
|
||||
run_command mkdir -p "$dir"
|
||||
run_command mv "$name" "$dir/$name"
|
||||
run_command zip -r "$dir.zip" "$dir"
|
||||
source "$cwd/.buildkite/scripts/upload-artifact.sh" "$dir.zip"
|
||||
# temporary disable this so CI can run
|
||||
# this is failing because $name is now in $dir/$name and if changed to $dir/$name we get ENOENT reading "bun:internal-for-testing"
|
||||
# if [ "$name" == "bun-profile" ]; then
|
||||
# export BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING="1"
|
||||
# run_command "./$name" -e "require('fs').writeFileSync('./features.json', JSON.stringify(require('bun:internal-for-testing').crash_handler.getFeatureData()))"
|
||||
# source "$cwd/.buildkite/scripts/upload-artifact.sh" "features.json"
|
||||
# fi
|
||||
done
|
||||
37
.buildkite/scripts/build-cpp.sh
Executable file
37
.buildkite/scripts/build-cpp.sh
Executable file
@@ -0,0 +1,37 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
export FORCE_UPDATE_SUBMODULES=1
|
||||
|
||||
# env.sh calls update_submodules.sh
|
||||
source "$(dirname "$0")/env.sh"
|
||||
|
||||
{ set +x; } 2>/dev/null
|
||||
|
||||
function run_command() {
|
||||
set -x
|
||||
"$@"
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
mkdir -p build
|
||||
cd build
|
||||
mkdir -p tmp_modules tmp_functions js codegen
|
||||
|
||||
run_command cmake .. "${CMAKE_FLAGS[@]}" \
|
||||
-GNinja \
|
||||
-DBUN_CPP_ONLY="1" \
|
||||
-DNO_CONFIGURE_DEPENDS="1" \
|
||||
-DCMAKE_BUILD_TYPE="$CMAKE_BUILD_TYPE" \
|
||||
-DCPU_TARGET="$CPU_TARGET" \
|
||||
-DUSE_LTO="$USE_LTO" \
|
||||
-DUSE_DEBUG_JSC="$USE_DEBUG_JSC" \
|
||||
-DCANARY="$CANARY" \
|
||||
-DGIT_SHA="$GIT_SHA"
|
||||
|
||||
chmod +x compile-cpp-only.sh
|
||||
source compile-cpp-only.sh -v -j "$CPUS"
|
||||
{ set +x; } 2>/dev/null
|
||||
|
||||
cd ..
|
||||
source "$(dirname "$0")/upload-artifact.sh" "build/bun-cpp-objects.a" --split
|
||||
22
.buildkite/scripts/build-deps.sh
Executable file
22
.buildkite/scripts/build-deps.sh
Executable file
@@ -0,0 +1,22 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
source "$(dirname "$0")/env.sh"
|
||||
source "$(realpath $(dirname "$0")/../../scripts/all-dependencies.sh)"
|
||||
|
||||
artifacts=(
|
||||
libcrypto.a libssl.a libdecrepit.a
|
||||
libcares.a
|
||||
libarchive.a
|
||||
liblolhtml.a
|
||||
libmimalloc.a libmimalloc.o
|
||||
libtcc.a
|
||||
libz.a
|
||||
libzstd.a
|
||||
libdeflate.a
|
||||
liblshpack.a
|
||||
)
|
||||
|
||||
for artifact in "${artifacts[@]}"; do
|
||||
source "$(dirname "$0")/upload-artifact.sh" "build/bun-deps/$artifact"
|
||||
done
|
||||
40
.buildkite/scripts/build-old-js.sh
Executable file
40
.buildkite/scripts/build-old-js.sh
Executable file
@@ -0,0 +1,40 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
source "$(dirname "$0")/env.sh"
|
||||
|
||||
function assert_bun() {
|
||||
if ! command -v bun &>/dev/null; then
|
||||
echo "error: bun is not installed" 1>&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_make() {
|
||||
if ! command -v make &>/dev/null; then
|
||||
echo "error: make is not installed" 1>&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function run_command() {
|
||||
set -x
|
||||
"$@"
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
function build_node_fallbacks() {
|
||||
local cwd="src/node-fallbacks"
|
||||
run_command bun install --cwd "$cwd" --frozen-lockfile
|
||||
run_command bun run --cwd "$cwd" build
|
||||
}
|
||||
|
||||
function build_old_js() {
|
||||
run_command bun install --frozen-lockfile
|
||||
run_command make runtime_js fallback_decoder bun_error
|
||||
}
|
||||
|
||||
assert_bun
|
||||
assert_make
|
||||
build_node_fallbacks
|
||||
build_old_js
|
||||
101
.buildkite/scripts/build-zig.sh
Executable file
101
.buildkite/scripts/build-zig.sh
Executable file
@@ -0,0 +1,101 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
export CMAKE_FLAGS=""
|
||||
source "$(dirname "$0")/env.sh"
|
||||
|
||||
if [[ -n "$CMAKE_FLAGS" ]]; then
|
||||
echo "CMAKE_FLAGS should not be empty"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
function assert_target() {
|
||||
local arch="${2-$(uname -m)}"
|
||||
case "$(echo "$arch" | tr '[:upper:]' '[:lower:]')" in
|
||||
x64 | x86_64 | amd64)
|
||||
export ZIG_ARCH="x86_64"
|
||||
if [[ "$BUILDKITE_STEP_KEY" == *"baseline"* ]]; then
|
||||
export ZIG_CPU_TARGET="nehalem"
|
||||
else
|
||||
export ZIG_CPU_TARGET="haswell"
|
||||
fi
|
||||
;;
|
||||
aarch64 | arm64)
|
||||
export ZIG_ARCH="aarch64"
|
||||
export ZIG_CPU_TARGET="native"
|
||||
;;
|
||||
*)
|
||||
echo "error: Unsupported architecture: $arch" 1>&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
local os="${1-$(uname -s)}"
|
||||
case "$(echo "$os" | tr '[:upper:]' '[:lower:]')" in
|
||||
linux)
|
||||
export ZIG_OS="linux"
|
||||
export ZIG_TARGET="$ZIG_ARCH-linux-gnu"
|
||||
;;
|
||||
darwin)
|
||||
export ZIG_OS="macos"
|
||||
export ZIG_TARGET="$ZIG_ARCH-macos-none"
|
||||
;;
|
||||
windows)
|
||||
export ZIG_OS="windows"
|
||||
export ZIG_TARGET="$ZIG_ARCH-windows-msvc"
|
||||
;;
|
||||
*)
|
||||
echo "error: Unsupported operating system: $os" 1>&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
function run_command() {
|
||||
set -x
|
||||
"$@"
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
assert_target "$@"
|
||||
|
||||
# Since the zig build depends on files from the zig submodule,
|
||||
# make sure to update the submodule before building.
|
||||
run_command git submodule update --init --recursive --progress --depth=1 --checkout src/deps/zig
|
||||
|
||||
# TODO: Move these to be part of the CMake build
|
||||
source "$(dirname "$0")/build-old-js.sh"
|
||||
|
||||
cwd="$(pwd)"
|
||||
mkdir -p build
|
||||
cd build
|
||||
|
||||
# in buildkite this script to compile for windows is run on a macos machine
|
||||
# so the cmake windows detection for this logic is not ran
|
||||
ZIG_OPTIMIZE="ReleaseFast"
|
||||
if [[ "$ZIG_OS" == "windows" ]]; then
|
||||
ZIG_OPTIMIZE="ReleaseSafe"
|
||||
fi
|
||||
|
||||
run_command cmake .. "${CMAKE_FLAGS[@]}" \
|
||||
-GNinja \
|
||||
-DNO_CONFIGURE_DEPENDS="1" \
|
||||
-DNO_CODEGEN="0" \
|
||||
-DWEBKIT_DIR="omit" \
|
||||
-DBUN_ZIG_OBJ_DIR="$cwd/build" \
|
||||
-DZIG_LIB_DIR="$cwd/src/deps/zig/lib" \
|
||||
-DCMAKE_BUILD_TYPE="$CMAKE_BUILD_TYPE" \
|
||||
-DARCH="$ZIG_ARCH" \
|
||||
-DCPU_TARGET="$ZIG_CPU_TARGET" \
|
||||
-DZIG_TARGET="$ZIG_TARGET" \
|
||||
-DUSE_LTO="$USE_LTO" \
|
||||
-DUSE_DEBUG_JSC="$USE_DEBUG_JSC" \
|
||||
-DCANARY="$CANARY" \
|
||||
-DZIG_OPTIMIZE="$ZIG_OPTIMIZE" \
|
||||
-DGIT_SHA="$GIT_SHA"
|
||||
|
||||
export ONLY_ZIG="1"
|
||||
run_command ninja "$cwd/build/bun-zig.o" -v -j "$CPUS"
|
||||
|
||||
cd ..
|
||||
source "$(dirname "$0")/upload-artifact.sh" "build/bun-zig.o"
|
||||
47
.buildkite/scripts/download-artifact.ps1
Executable file
47
.buildkite/scripts/download-artifact.ps1
Executable file
@@ -0,0 +1,47 @@
|
||||
param (
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string[]] $Paths,
|
||||
[switch] $Split
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
function Assert-Buildkite-Agent() {
|
||||
if (-not (Get-Command "buildkite-agent" -ErrorAction SilentlyContinue)) {
|
||||
Write-Error "Cannot find buildkite-agent, please install it: https://buildkite.com/docs/agent/v3/install"
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
function Assert-Join-File() {
|
||||
if (-not (Get-Command "Join-File" -ErrorAction SilentlyContinue)) {
|
||||
Write-Error "Cannot find Join-File, please install it: https://www.powershellgallery.com/packages/FileSplitter/1.3"
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
function Download-Buildkite-Artifact() {
|
||||
param (
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string] $Path,
|
||||
)
|
||||
if ($Split) {
|
||||
& buildkite-agent artifact download "$Path.*" --debug --debug-http
|
||||
Join-File -Path "$(Resolve-Path .)\$Path" -Verbose -DeletePartFiles
|
||||
} else {
|
||||
& buildkite-agent artifact download "$Path" --debug --debug-http
|
||||
}
|
||||
if (-not (Test-Path $Path)) {
|
||||
Write-Error "Could not find artifact: $Path"
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
Assert-Buildkite-Agent
|
||||
if ($Split) {
|
||||
Assert-Join-File
|
||||
}
|
||||
|
||||
foreach ($Path in $Paths) {
|
||||
Download-Buildkite-Artifact $Path
|
||||
}
|
||||
59
.buildkite/scripts/download-artifact.sh
Executable file
59
.buildkite/scripts/download-artifact.sh
Executable file
@@ -0,0 +1,59 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
function assert_buildkite_agent() {
|
||||
if ! command -v buildkite-agent &>/dev/null; then
|
||||
echo "error: Cannot find buildkite-agent, please install it:"
|
||||
echo "https://buildkite.com/docs/agent/v3/install"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function download_buildkite_artifact() {
|
||||
# Check if at least one argument is provided
|
||||
if [ $# -eq 0 ]; then
|
||||
echo "error: No path provided for artifact download"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
local path="$1"
|
||||
shift
|
||||
local split="0"
|
||||
local args=()
|
||||
|
||||
while [ $# -gt 0 ]; do
|
||||
case "$1" in
|
||||
--split)
|
||||
split="1"
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
args+=("$1")
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ "$split" == "1" ]; then
|
||||
run_command buildkite-agent artifact download "$path.*" . "${args[@]:-}"
|
||||
run_command cat "$path".?? >"$path"
|
||||
run_command rm -f "$path".??
|
||||
else
|
||||
run_command buildkite-agent artifact download "$path" . "${args[@]:-}"
|
||||
fi
|
||||
|
||||
if [[ "$path" != *"*"* ]] && [ ! -f "$path" ]; then
|
||||
echo "error: Could not find artifact: $path"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function run_command() {
|
||||
set -x
|
||||
"$@"
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
assert_buildkite_agent
|
||||
download_buildkite_artifact "$@"
|
||||
146
.buildkite/scripts/env.sh
Executable file
146
.buildkite/scripts/env.sh
Executable file
@@ -0,0 +1,146 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
BUILDKITE_REPO=${BUILDKITE_REPO:-}
|
||||
BUILDKITE_CLEAN_CHECKOUT=${BUILDKITE_CLEAN_CHECKOUT:-}
|
||||
BUILDKITE_BRANCH=${BUILDKITE_BRANCH:-}
|
||||
CCACHE_DIR=${CCACHE_DIR:-}
|
||||
SCCACHE_DIR=${SCCACHE_DIR:-}
|
||||
ZIG_LOCAL_CACHE_DIR=${ZIG_LOCAL_CACHE_DIR:-}
|
||||
ZIG_GLOBAL_CACHE_DIR=${ZIG_GLOBAL_CACHE_DIR:-}
|
||||
BUN_DEPS_CACHE_DIR=${BUN_DEPS_CACHE_DIR:-}
|
||||
BUN_DEPS_CACHE_DIR=${BUN_DEPS_CACHE_DIR:-}
|
||||
BUILDKITE_STEP_KEY=${BUILDKITE_STEP_KEY:-}
|
||||
|
||||
ROOT_DIR="$(realpath "$(dirname "$0")/../../")"
|
||||
|
||||
# Fail if we cannot find the root directory
|
||||
if [ ! -d "$ROOT_DIR" ]; then
|
||||
echo "error: Cannot find root directory: '$ROOT_DIR'" 1>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
function assert_os() {
|
||||
local os="$(uname -s)"
|
||||
case "$os" in
|
||||
Linux)
|
||||
echo "linux"
|
||||
;;
|
||||
Darwin)
|
||||
echo "darwin"
|
||||
;;
|
||||
*)
|
||||
echo "error: Unsupported operating system: $os" 1>&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
function assert_arch() {
|
||||
local arch="$(uname -m)"
|
||||
case "$arch" in
|
||||
aarch64 | arm64)
|
||||
echo "aarch64"
|
||||
;;
|
||||
x86_64 | amd64)
|
||||
echo "x64"
|
||||
;;
|
||||
*)
|
||||
echo "error: Unknown architecture: $arch" 1>&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
function assert_build() {
|
||||
if [ -z "$BUILDKITE_REPO" ]; then
|
||||
echo "error: Cannot find repository for this build"
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "$BUILDKITE_COMMIT" ]; then
|
||||
echo "error: Cannot find commit for this build"
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "$BUILDKITE_STEP_KEY" ]; then
|
||||
echo "error: Cannot find step key for this build"
|
||||
exit 1
|
||||
fi
|
||||
if [ -n "$BUILDKITE_GROUP_KEY" ] && [[ "$BUILDKITE_STEP_KEY" != "$BUILDKITE_GROUP_KEY"* ]]; then
|
||||
echo "error: Build step '$BUILDKITE_STEP_KEY' does not start with group key '$BUILDKITE_GROUP_KEY'"
|
||||
exit 1
|
||||
fi
|
||||
# Skip os and arch checks for Zig, since it's cross-compiled on macOS
|
||||
if [[ "$BUILDKITE_STEP_KEY" != *"zig"* ]]; then
|
||||
local os="$(assert_os)"
|
||||
if [[ "$BUILDKITE_STEP_KEY" != *"$os"* ]]; then
|
||||
echo "error: Build step '$BUILDKITE_STEP_KEY' does not match operating system '$os'"
|
||||
exit 1
|
||||
fi
|
||||
local arch="$(assert_arch)"
|
||||
if [[ "$BUILDKITE_STEP_KEY" != *"$arch"* ]]; then
|
||||
echo "error: Build step '$BUILDKITE_STEP_KEY' does not match architecture '$arch'"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_buildkite_agent() {
|
||||
if (! command -v buildkite-agent &>/dev/null); then
|
||||
echo "error: Cannot find buildkite-agent, please install it:"
|
||||
echo "https://buildkite.com/docs/agent/v3/install"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function export_environment() {
|
||||
source "${ROOT_DIR}/scripts/env.sh"
|
||||
source "${ROOT_DIR}/scripts/update-submodules.sh"
|
||||
|
||||
{ set +x; } 2>/dev/null
|
||||
export GIT_SHA="$BUILDKITE_COMMIT"
|
||||
if [ "$BUILDKITE_CLEAN_CHECKOUT" == "true" ] || [ "$BUILDKITE_BRANCH" == "main" ]; then
|
||||
local tmpdir="$(mktemp -d 2>/dev/null || mktemp -d -t 'new')"
|
||||
export CCACHE_DIR="$tmpdir/.cache/ccache"
|
||||
export SCCACHE_DIR="$tmpdir/.cache/sccache"
|
||||
export ZIG_LOCAL_CACHE_DIR="$tmpdir/.cache/zig-cache"
|
||||
export ZIG_GLOBAL_CACHE_DIR="$tmpdir/.cache/zig-cache"
|
||||
export BUN_DEPS_CACHE_DIR="$tmpdir/.cache/bun-deps"
|
||||
export CCACHE_RECACHE="1"
|
||||
else
|
||||
export CCACHE_DIR="$HOME/.cache/ccache/$BUILDKITE_STEP_KEY"
|
||||
export SCCACHE_DIR="$HOME/.cache/sccache/$BUILDKITE_STEP_KEY"
|
||||
export ZIG_LOCAL_CACHE_DIR="$HOME/.cache/zig-cache/$BUILDKITE_STEP_KEY"
|
||||
export ZIG_GLOBAL_CACHE_DIR="$HOME/.cache/zig-cache/$BUILDKITE_STEP_KEY"
|
||||
export BUN_DEPS_CACHE_DIR="$HOME/.cache/bun-deps/$BUILDKITE_STEP_KEY"
|
||||
fi
|
||||
if [ "$(assert_os)" == "linux" ]; then
|
||||
export USE_LTO="ON"
|
||||
fi
|
||||
if [ "$(assert_arch)" == "aarch64" ]; then
|
||||
export CPU_TARGET="native"
|
||||
elif [[ "$BUILDKITE_STEP_KEY" == *"baseline"* ]]; then
|
||||
export CPU_TARGET="nehalem"
|
||||
else
|
||||
export CPU_TARGET="haswell"
|
||||
fi
|
||||
if $(buildkite-agent meta-data exists release &>/dev/null); then
|
||||
export CMAKE_BUILD_TYPE="$(buildkite-agent meta-data get release)"
|
||||
else
|
||||
export CMAKE_BUILD_TYPE="Release"
|
||||
fi
|
||||
if $(buildkite-agent meta-data exists canary &>/dev/null); then
|
||||
export CANARY="$(buildkite-agent meta-data get canary)"
|
||||
else
|
||||
export CANARY="1"
|
||||
fi
|
||||
if $(buildkite-agent meta-data exists assertions &>/dev/null); then
|
||||
export USE_DEBUG_JSC="$(buildkite-agent meta-data get assertions)"
|
||||
else
|
||||
export USE_DEBUG_JSC="OFF"
|
||||
fi
|
||||
}
|
||||
|
||||
assert_build
|
||||
assert_buildkite_agent
|
||||
export_environment
|
||||
97
.buildkite/scripts/prepare-build.sh
Executable file
97
.buildkite/scripts/prepare-build.sh
Executable file
@@ -0,0 +1,97 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
|
||||
function assert_build() {
|
||||
if [ -z "$BUILDKITE_REPO" ]; then
|
||||
echo "error: Cannot find repository for this build"
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "$BUILDKITE_COMMIT" ]; then
|
||||
echo "error: Cannot find commit for this build"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_buildkite_agent() {
|
||||
if ! command -v buildkite-agent &> /dev/null; then
|
||||
echo "error: Cannot find buildkite-agent, please install it:"
|
||||
echo "https://buildkite.com/docs/agent/v3/install"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_jq() {
|
||||
assert_command "jq" "jq" "https://stedolan.github.io/jq/"
|
||||
}
|
||||
|
||||
function assert_curl() {
|
||||
assert_command "curl" "curl" "https://curl.se/download.html"
|
||||
}
|
||||
|
||||
function assert_command() {
|
||||
local command="$1"
|
||||
local package="$2"
|
||||
local help_url="$3"
|
||||
if ! command -v "$command" &> /dev/null; then
|
||||
echo "warning: $command is not installed, installing..."
|
||||
if command -v brew &> /dev/null; then
|
||||
HOMEBREW_NO_AUTO_UPDATE=1 brew install "$package"
|
||||
else
|
||||
echo "error: Cannot install $command, please install it"
|
||||
if [ -n "$help_url" ]; then
|
||||
echo ""
|
||||
echo "hint: See $help_url for help"
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_release() {
|
||||
if [ "$RELEASE" == "1" ]; then
|
||||
run_command buildkite-agent meta-data set canary "0"
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_canary() {
|
||||
local canary="$(buildkite-agent meta-data get canary 2>/dev/null)"
|
||||
if [ -z "$canary" ]; then
|
||||
local repo=$(echo "$BUILDKITE_REPO" | sed -E 's#https://github.com/([^/]+)/([^/]+).git#\1/\2#g')
|
||||
local tag="$(curl -sL "https://api.github.com/repos/$repo/releases/latest" | jq -r ".tag_name")"
|
||||
if [ "$tag" == "null" ]; then
|
||||
canary="1"
|
||||
else
|
||||
local revision=$(curl -sL "https://api.github.com/repos/$repo/compare/$tag...$BUILDKITE_COMMIT" | jq -r ".ahead_by")
|
||||
if [ "$revision" == "null" ]; then
|
||||
canary="1"
|
||||
else
|
||||
canary="$revision"
|
||||
fi
|
||||
fi
|
||||
run_command buildkite-agent meta-data set canary "$canary"
|
||||
fi
|
||||
}
|
||||
|
||||
function upload_buildkite_pipeline() {
|
||||
local path="$1"
|
||||
if [ ! -f "$path" ]; then
|
||||
echo "error: Cannot find pipeline: $path"
|
||||
exit 1
|
||||
fi
|
||||
run_command buildkite-agent pipeline upload "$path"
|
||||
}
|
||||
|
||||
function run_command() {
|
||||
set -x
|
||||
"$@"
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
assert_build
|
||||
assert_buildkite_agent
|
||||
assert_jq
|
||||
assert_curl
|
||||
assert_release
|
||||
assert_canary
|
||||
upload_buildkite_pipeline ".buildkite/ci.yml"
|
||||
47
.buildkite/scripts/upload-artifact.ps1
Executable file
47
.buildkite/scripts/upload-artifact.ps1
Executable file
@@ -0,0 +1,47 @@
|
||||
param (
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string[]] $Paths,
|
||||
[switch] $Split
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
function Assert-Buildkite-Agent() {
|
||||
if (-not (Get-Command "buildkite-agent" -ErrorAction SilentlyContinue)) {
|
||||
Write-Error "Cannot find buildkite-agent, please install it: https://buildkite.com/docs/agent/v3/install"
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
function Assert-Split-File() {
|
||||
if (-not (Get-Command "Split-File" -ErrorAction SilentlyContinue)) {
|
||||
Write-Error "Cannot find Split-File, please install it: https://www.powershellgallery.com/packages/FileSplitter/1.3"
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
function Upload-Buildkite-Artifact() {
|
||||
param (
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string] $Path,
|
||||
)
|
||||
if (-not (Test-Path $Path)) {
|
||||
Write-Error "Could not find artifact: $Path"
|
||||
exit 1
|
||||
}
|
||||
if ($Split) {
|
||||
Remove-Item -Path "$Path.*" -Force
|
||||
Split-File -Path (Resolve-Path $Path) -PartSizeBytes "50MB" -Verbose
|
||||
$Path = "$Path.*"
|
||||
}
|
||||
& buildkite-agent artifact upload "$Path" --debug --debug-http
|
||||
}
|
||||
|
||||
Assert-Buildkite-Agent
|
||||
if ($Split) {
|
||||
Assert-Split-File
|
||||
}
|
||||
|
||||
foreach ($Path in $Paths) {
|
||||
Upload-Buildkite-Artifact $Path
|
||||
}
|
||||
71
.buildkite/scripts/upload-artifact.sh
Executable file
71
.buildkite/scripts/upload-artifact.sh
Executable file
@@ -0,0 +1,71 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
function assert_buildkite_agent() {
|
||||
if ! command -v buildkite-agent &>/dev/null; then
|
||||
echo "error: Cannot find buildkite-agent, please install it:"
|
||||
echo "https://buildkite.com/docs/agent/v3/install"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_split() {
|
||||
if ! command -v split &>/dev/null; then
|
||||
echo "error: Cannot find split, please install it:"
|
||||
echo "https://www.gnu.org/software/coreutils/split"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function upload_buildkite_artifact() {
|
||||
if [ -z "${1:-}" ]; then
|
||||
return
|
||||
fi
|
||||
|
||||
local path="$1"
|
||||
shift
|
||||
local split="0"
|
||||
local args=() # Initialize args as an empty array
|
||||
while true; do
|
||||
if [ -z "${1:-}" ]; then
|
||||
break
|
||||
fi
|
||||
case "$1" in
|
||||
--split)
|
||||
split="1"
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
args+=("$1")
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
done
|
||||
if [ ! -f "$path" ]; then
|
||||
echo "error: Could not find artifact: $path"
|
||||
exit 1
|
||||
fi
|
||||
if [ "$split" == "1" ]; then
|
||||
run_command rm -f "$path."*
|
||||
run_command split -b 50MB -d "$path" "$path."
|
||||
if [ "${args[@]:-}" != "" ]; then
|
||||
run_command buildkite-agent artifact upload "$path.*" "${args[@]}"
|
||||
else
|
||||
run_command buildkite-agent artifact upload "$path.*"
|
||||
fi
|
||||
elif [ "${args[@]:-}" != "" ]; then
|
||||
run_command buildkite-agent artifact upload "$path" "${args[@]:-}"
|
||||
else
|
||||
run_command buildkite-agent artifact upload "$path"
|
||||
fi
|
||||
}
|
||||
|
||||
function run_command() {
|
||||
set -x
|
||||
"$@"
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
assert_buildkite_agent
|
||||
upload_buildkite_artifact "$@"
|
||||
@@ -3,7 +3,19 @@
|
||||
set -eo pipefail
|
||||
|
||||
function assert_main() {
|
||||
if [[ "$BUILDKITE_PULL_REQUEST_REPO" && "$BUILDKITE_REPO" != "$BUILDKITE_PULL_REQUEST_REPO" ]]; then
|
||||
if [ "$RELEASE" == "1" ]; then
|
||||
echo "info: Skipping canary release because this is a release build"
|
||||
exit 0
|
||||
fi
|
||||
if [ -z "$BUILDKITE_REPO" ]; then
|
||||
echo "error: Cannot find repository for this build"
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "$BUILDKITE_COMMIT" ]; then
|
||||
echo "error: Cannot find commit for this build"
|
||||
exit 1
|
||||
fi
|
||||
if [ -n "$BUILDKITE_PULL_REQUEST_REPO" ] && [ "$BUILDKITE_REPO" != "$BUILDKITE_PULL_REQUEST_REPO" ]; then
|
||||
echo "error: Cannot upload release from a fork"
|
||||
exit 1
|
||||
fi
|
||||
@@ -18,77 +30,191 @@ function assert_main() {
|
||||
}
|
||||
|
||||
function assert_buildkite_agent() {
|
||||
if ! command -v buildkite-agent &> /dev/null; then
|
||||
if ! command -v "buildkite-agent" &> /dev/null; then
|
||||
echo "error: Cannot find buildkite-agent, please install it:"
|
||||
echo "https://buildkite.com/docs/agent/v3/install"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_gh() {
|
||||
if ! command -v gh &> /dev/null; then
|
||||
echo "warning: gh is not installed, installing..."
|
||||
function assert_github() {
|
||||
assert_command "gh" "gh" "https://github.com/cli/cli#installation"
|
||||
assert_buildkite_secret "GITHUB_TOKEN"
|
||||
# gh expects the token in $GH_TOKEN
|
||||
export GH_TOKEN="$GITHUB_TOKEN"
|
||||
}
|
||||
|
||||
function assert_aws() {
|
||||
assert_command "aws" "awscli" "https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html"
|
||||
for secret in "AWS_ACCESS_KEY_ID" "AWS_SECRET_ACCESS_KEY" "AWS_ENDPOINT"; do
|
||||
assert_buildkite_secret "$secret"
|
||||
done
|
||||
assert_buildkite_secret "AWS_BUCKET" --skip-redaction
|
||||
}
|
||||
|
||||
function assert_sentry() {
|
||||
assert_command "sentry-cli" "getsentry/tools/sentry-cli" "https://docs.sentry.io/cli/installation/"
|
||||
for secret in "SENTRY_AUTH_TOKEN" "SENTRY_ORG" "SENTRY_PROJECT"; do
|
||||
assert_buildkite_secret "$secret"
|
||||
done
|
||||
}
|
||||
|
||||
function run_command() {
|
||||
set -x
|
||||
"$@"
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
function assert_command() {
|
||||
local command="$1"
|
||||
local package="$2"
|
||||
local help_url="$3"
|
||||
if ! command -v "$command" &> /dev/null; then
|
||||
echo "warning: $command is not installed, installing..."
|
||||
if command -v brew &> /dev/null; then
|
||||
brew install gh
|
||||
HOMEBREW_NO_AUTO_UPDATE=1 run_command brew install "$package"
|
||||
else
|
||||
echo "error: Cannot install gh, please install it:"
|
||||
echo "https://github.com/cli/cli#installation"
|
||||
echo "error: Cannot install $command, please install it"
|
||||
if [ -n "$help_url" ]; then
|
||||
echo ""
|
||||
echo "hint: See $help_url for help"
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_gh_token() {
|
||||
local token=$(buildkite-agent secret get GITHUB_TOKEN)
|
||||
if [ -z "$token" ]; then
|
||||
echo "error: Cannot find GITHUB_TOKEN secret"
|
||||
function assert_buildkite_secret() {
|
||||
local key="$1"
|
||||
local value=$(buildkite-agent secret get "$key" ${@:2})
|
||||
if [ -z "$value" ]; then
|
||||
echo "error: Cannot find $key secret"
|
||||
echo ""
|
||||
echo "hint: Create a secret named GITHUB_TOKEN with a GitHub access token:"
|
||||
echo "hint: Create a secret named $key with a value:"
|
||||
echo "https://buildkite.com/docs/pipelines/buildkite-secrets"
|
||||
exit 1
|
||||
fi
|
||||
export GH_TOKEN="$token"
|
||||
export "$key"="$value"
|
||||
}
|
||||
|
||||
function download_artifact() {
|
||||
local name=$1
|
||||
buildkite-agent artifact download "$name" .
|
||||
if [ ! -f "$name" ]; then
|
||||
function release_tag() {
|
||||
local version="$1"
|
||||
if [ "$version" == "canary" ]; then
|
||||
echo "canary"
|
||||
else
|
||||
echo "bun-v$version"
|
||||
fi
|
||||
}
|
||||
|
||||
function create_sentry_release() {
|
||||
local version="$1"
|
||||
local release="$version"
|
||||
if [ "$version" == "canary" ]; then
|
||||
release="$BUILDKITE_COMMIT-canary"
|
||||
fi
|
||||
run_command sentry-cli releases new "$release" --finalize
|
||||
run_command sentry-cli releases set-commits "$release" --auto --ignore-missing
|
||||
if [ "$version" == "canary" ]; then
|
||||
run_command sentry-cli deploys new --env="canary" --release="$release"
|
||||
fi
|
||||
}
|
||||
|
||||
function download_buildkite_artifact() {
|
||||
local name="$1"
|
||||
local dir="$2"
|
||||
if [ -z "$dir" ]; then
|
||||
dir="."
|
||||
fi
|
||||
run_command buildkite-agent artifact download "$name" "$dir"
|
||||
if [ ! -f "$dir/$name" ]; then
|
||||
echo "error: Cannot find Buildkite artifact: $name"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function upload_assets() {
|
||||
local tag=$1
|
||||
local files=${@:2}
|
||||
gh release upload "$tag" $files --clobber --repo "$BUILDKITE_REPO"
|
||||
function upload_github_asset() {
|
||||
local version="$1"
|
||||
local tag="$(release_tag "$version")"
|
||||
local file="$2"
|
||||
run_command gh release upload "$tag" "$file" --clobber --repo "$BUILDKITE_REPO"
|
||||
|
||||
# Sometimes the upload fails, maybe this is a race condition in the gh CLI?
|
||||
while [ "$(gh release view "$tag" --repo "$BUILDKITE_REPO" | grep -c "$file")" -eq 0 ]; do
|
||||
echo "warn: Uploading $file to $tag failed, retrying..."
|
||||
sleep "$((RANDOM % 5 + 1))"
|
||||
run_command gh release upload "$tag" "$file" --clobber --repo "$BUILDKITE_REPO"
|
||||
done
|
||||
}
|
||||
|
||||
assert_main
|
||||
assert_buildkite_agent
|
||||
assert_gh
|
||||
assert_gh_token
|
||||
function update_github_release() {
|
||||
local version="$1"
|
||||
local tag="$(release_tag "$version")"
|
||||
if [ "$tag" == "canary" ]; then
|
||||
sleep 5 # There is possibly a race condition where this overwrites artifacts?
|
||||
run_command gh release edit "$tag" --repo "$BUILDKITE_REPO" \
|
||||
--notes "This release of Bun corresponds to the commit: $BUILDKITE_COMMIT"
|
||||
fi
|
||||
}
|
||||
|
||||
declare artifacts=(
|
||||
bun-darwin-aarch64.zip
|
||||
bun-darwin-aarch64-profile.zip
|
||||
bun-darwin-x64.zip
|
||||
bun-darwin-x64-profile.zip
|
||||
bun-linux-aarch64.zip
|
||||
bun-linux-aarch64-profile.zip
|
||||
bun-linux-x64.zip
|
||||
bun-linux-x64-profile.zip
|
||||
bun-linux-x64-baseline.zip
|
||||
bun-linux-x64-baseline-profile.zip
|
||||
bun-windows-x64.zip
|
||||
bun-windows-x64-profile.zip
|
||||
bun-windows-x64-baseline.zip
|
||||
bun-windows-x64-baseline-profile.zip
|
||||
)
|
||||
function upload_s3_file() {
|
||||
local folder="$1"
|
||||
local file="$2"
|
||||
run_command aws --endpoint-url="$AWS_ENDPOINT" s3 cp "$file" "s3://$AWS_BUCKET/$folder/$file"
|
||||
}
|
||||
|
||||
for artifact in "${artifacts[@]}"; do
|
||||
download_artifact $artifact
|
||||
done
|
||||
function create_release() {
|
||||
assert_main
|
||||
assert_buildkite_agent
|
||||
assert_github
|
||||
assert_aws
|
||||
assert_sentry
|
||||
|
||||
upload_assets "canary" "${artifacts[@]}"
|
||||
local tag="$1" # 'canary' or 'x.y.z'
|
||||
local artifacts=(
|
||||
bun-darwin-aarch64.zip
|
||||
bun-darwin-aarch64-profile.zip
|
||||
bun-darwin-x64.zip
|
||||
bun-darwin-x64-profile.zip
|
||||
bun-linux-aarch64.zip
|
||||
bun-linux-aarch64-profile.zip
|
||||
bun-linux-x64.zip
|
||||
bun-linux-x64-profile.zip
|
||||
bun-linux-x64-baseline.zip
|
||||
bun-linux-x64-baseline-profile.zip
|
||||
bun-windows-x64.zip
|
||||
bun-windows-x64-profile.zip
|
||||
bun-windows-x64-baseline.zip
|
||||
bun-windows-x64-baseline-profile.zip
|
||||
)
|
||||
|
||||
function upload_artifact() {
|
||||
local artifact="$1"
|
||||
download_buildkite_artifact "$artifact"
|
||||
if [ "$tag" == "canary" ]; then
|
||||
upload_s3_file "releases/$BUILDKITE_COMMIT-canary" "$artifact" &
|
||||
else
|
||||
upload_s3_file "releases/$BUILDKITE_COMMIT" "$artifact" &
|
||||
fi
|
||||
upload_s3_file "releases/$tag" "$artifact" &
|
||||
upload_github_asset "$tag" "$artifact" &
|
||||
wait
|
||||
}
|
||||
|
||||
for artifact in "${artifacts[@]}"; do
|
||||
upload_artifact "$artifact"
|
||||
done
|
||||
|
||||
update_github_release "$tag"
|
||||
create_sentry_release "$tag"
|
||||
}
|
||||
|
||||
function assert_canary() {
|
||||
local canary="$(buildkite-agent meta-data get canary 2>/dev/null)"
|
||||
if [ -z "$canary" ] || [ "$canary" == "0" ]; then
|
||||
echo "warn: Skipping release because this is not a canary build"
|
||||
exit 0
|
||||
fi
|
||||
}
|
||||
|
||||
assert_canary
|
||||
create_release "canary"
|
||||
|
||||
3
.gitattributes
vendored
3
.gitattributes
vendored
@@ -45,3 +45,6 @@ examples/**/* linguist-documentation
|
||||
|
||||
src/deps/*.c linguist-vendored
|
||||
src/deps/brotli/** linguist-vendored
|
||||
|
||||
test/js/node/test/fixtures linguist-vendored
|
||||
test/js/node/test/common linguist-vendored
|
||||
|
||||
312
.github/workflows/build-darwin.yml
vendored
312
.github/workflows/build-darwin.yml
vendored
@@ -1,312 +0,0 @@
|
||||
name: Build Darwin
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
actions: write
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runs-on:
|
||||
type: string
|
||||
default: macos-12-large
|
||||
tag:
|
||||
type: string
|
||||
required: true
|
||||
arch:
|
||||
type: string
|
||||
required: true
|
||||
cpu:
|
||||
type: string
|
||||
required: true
|
||||
assertions:
|
||||
type: boolean
|
||||
canary:
|
||||
type: boolean
|
||||
no-cache:
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
LLVM_VERSION: 16
|
||||
BUN_VERSION: 1.1.8
|
||||
LC_CTYPE: "en_US.UTF-8"
|
||||
LC_ALL: "en_US.UTF-8"
|
||||
|
||||
jobs:
|
||||
build-submodules:
|
||||
name: Build Submodules
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
.gitmodules
|
||||
src/deps
|
||||
scripts
|
||||
- name: Hash Submodules
|
||||
id: hash
|
||||
run: |
|
||||
print_versions() {
|
||||
git submodule | grep -v WebKit
|
||||
echo "LLVM_VERSION=${{ env.LLVM_VERSION }}"
|
||||
cat $(echo scripts/build*.sh scripts/all-dependencies.sh | tr " " "\n" | sort)
|
||||
}
|
||||
echo "hash=$(print_versions | shasum)" >> $GITHUB_OUTPUT
|
||||
- if: ${{ !inputs.no-cache }}
|
||||
name: Restore Cache
|
||||
id: cache
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: ${{ runner.temp }}/bun-deps
|
||||
key: bun-${{ inputs.tag }}-deps-${{ steps.hash.outputs.hash }}
|
||||
# TODO: Figure out how to cache homebrew dependencies
|
||||
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
name: Install Dependencies
|
||||
env:
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
run: |
|
||||
brew install \
|
||||
llvm@${{ env.LLVM_VERSION }} \
|
||||
ccache \
|
||||
rust \
|
||||
pkg-config \
|
||||
coreutils \
|
||||
libtool \
|
||||
cmake \
|
||||
libiconv \
|
||||
automake \
|
||||
openssl@1.1 \
|
||||
ninja \
|
||||
golang \
|
||||
gnu-sed --force --overwrite
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@$LLVM_VERSION
|
||||
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
name: Clone Submodules
|
||||
run: |
|
||||
./scripts/update-submodules.sh
|
||||
- name: Build Submodules
|
||||
if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
env:
|
||||
CPU_TARGET: ${{ inputs.cpu }}
|
||||
BUN_DEPS_OUT_DIR: ${{ runner.temp }}/bun-deps
|
||||
run: |
|
||||
mkdir -p $BUN_DEPS_OUT_DIR
|
||||
./scripts/all-dependencies.sh
|
||||
- name: Save Cache
|
||||
if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: ${{ runner.temp }}/bun-deps
|
||||
key: ${{ steps.cache.outputs.cache-primary-key }}
|
||||
- name: Upload bun-${{ inputs.tag }}-deps
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-deps
|
||||
path: ${{ runner.temp }}/bun-deps
|
||||
if-no-files-found: error
|
||||
build-cpp:
|
||||
name: Build C++
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
# TODO: Figure out how to cache homebrew dependencies
|
||||
- name: Install Dependencies
|
||||
env:
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
run: |
|
||||
brew install \
|
||||
llvm@${{ env.LLVM_VERSION }} \
|
||||
ccache \
|
||||
rust \
|
||||
pkg-config \
|
||||
coreutils \
|
||||
libtool \
|
||||
cmake \
|
||||
libiconv \
|
||||
automake \
|
||||
openssl@1.1 \
|
||||
ninja \
|
||||
golang \
|
||||
gnu-sed --force --overwrite
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@$LLVM_VERSION
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- if: ${{ !inputs.no-cache }}
|
||||
name: Restore Cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ runner.temp }}/ccache
|
||||
key: bun-${{ inputs.tag }}-cpp-${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }}
|
||||
restore-keys: |
|
||||
bun-${{ inputs.tag }}-cpp-
|
||||
- name: Compile
|
||||
env:
|
||||
CPU_TARGET: ${{ inputs.cpu }}
|
||||
SOURCE_DIR: ${{ github.workspace }}
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{ runner.temp }}/bun-deps
|
||||
CCACHE_DIR: ${{ runner.temp }}/ccache
|
||||
run: |
|
||||
mkdir -p $OBJ_DIR
|
||||
cd $OBJ_DIR
|
||||
cmake -S $SOURCE_DIR -B $OBJ_DIR \
|
||||
-G Ninja \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DUSE_LTO=ON \
|
||||
-DBUN_CPP_ONLY=1 \
|
||||
-DNO_CONFIGURE_DEPENDS=1
|
||||
chmod +x compile-cpp-only.sh
|
||||
./compile-cpp-only.sh -v
|
||||
- name: Upload bun-${{ inputs.tag }}-cpp
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-cpp
|
||||
path: ${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a
|
||||
if-no-files-found: error
|
||||
build-zig:
|
||||
name: Build Zig
|
||||
uses: ./.github/workflows/build-zig.yml
|
||||
with:
|
||||
os: darwin
|
||||
only-zig: true
|
||||
tag: ${{ inputs.tag }}
|
||||
arch: ${{ inputs.arch }}
|
||||
cpu: ${{ inputs.cpu }}
|
||||
assertions: ${{ inputs.assertions }}
|
||||
canary: ${{ inputs.canary }}
|
||||
no-cache: ${{ inputs.no-cache }}
|
||||
link:
|
||||
name: Link
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
needs:
|
||||
- build-submodules
|
||||
- build-cpp
|
||||
- build-zig
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
# TODO: Figure out how to cache homebrew dependencies
|
||||
- name: Install Dependencies
|
||||
env:
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
run: |
|
||||
brew install \
|
||||
llvm@${{ env.LLVM_VERSION }} \
|
||||
ccache \
|
||||
rust \
|
||||
pkg-config \
|
||||
coreutils \
|
||||
libtool \
|
||||
cmake \
|
||||
libiconv \
|
||||
automake \
|
||||
openssl@1.1 \
|
||||
ninja \
|
||||
golang \
|
||||
gnu-sed --force --overwrite
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@$LLVM_VERSION
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- name: Download bun-${{ inputs.tag }}-deps
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-deps
|
||||
path: ${{ runner.temp }}/bun-deps
|
||||
- name: Download bun-${{ inputs.tag }}-cpp
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-cpp
|
||||
path: ${{ runner.temp }}/bun-cpp-obj
|
||||
- name: Download bun-${{ inputs.tag }}-zig
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-zig
|
||||
path: ${{ runner.temp }}/release
|
||||
- if: ${{ !inputs.no-cache }}
|
||||
name: Restore Cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ runner.temp }}/ccache
|
||||
key: bun-${{ inputs.tag }}-cpp-${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }}
|
||||
restore-keys: |
|
||||
bun-${{ inputs.tag }}-cpp-
|
||||
- name: Link
|
||||
env:
|
||||
CPU_TARGET: ${{ inputs.cpu }}
|
||||
CCACHE_DIR: ${{ runner.temp }}/ccache
|
||||
run: |
|
||||
SRC_DIR=$PWD
|
||||
mkdir ${{ runner.temp }}/link-build
|
||||
cd ${{ runner.temp }}/link-build
|
||||
cmake $SRC_DIR \
|
||||
-G Ninja \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DUSE_LTO=ON \
|
||||
-DBUN_LINK_ONLY=1 \
|
||||
-DBUN_ZIG_OBJ_DIR="${{ runner.temp }}/release" \
|
||||
-DBUN_CPP_ARCHIVE="${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a" \
|
||||
-DBUN_DEPS_OUT_DIR="${{ runner.temp }}/bun-deps" \
|
||||
-DNO_CONFIGURE_DEPENDS=1
|
||||
ninja -v
|
||||
- name: Prepare
|
||||
run: |
|
||||
cd ${{ runner.temp }}/link-build
|
||||
chmod +x bun-profile bun
|
||||
mkdir -p bun-${{ inputs.tag }}-profile/ bun-${{ inputs.tag }}/
|
||||
mv bun-profile bun-${{ inputs.tag }}-profile/bun-profile
|
||||
mv bun bun-${{ inputs.tag }}/bun
|
||||
zip -r bun-${{ inputs.tag }}-profile.zip bun-${{ inputs.tag }}-profile
|
||||
zip -r bun-${{ inputs.tag }}.zip bun-${{ inputs.tag }}
|
||||
- name: Upload bun-${{ inputs.tag }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}
|
||||
path: ${{ runner.temp }}/link-build/bun-${{ inputs.tag }}.zip
|
||||
if-no-files-found: error
|
||||
- name: Upload bun-${{ inputs.tag }}-profile
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-profile
|
||||
path: ${{ runner.temp }}/link-build/bun-${{ inputs.tag }}-profile.zip
|
||||
if-no-files-found: error
|
||||
on-failure:
|
||||
if: ${{ github.repository_owner == 'oven-sh' && failure() }}
|
||||
name: On Failure
|
||||
needs: link
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Send Message
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
nodetail: true
|
||||
color: "#FF0000"
|
||||
title: ""
|
||||
description: |
|
||||
### ❌ [${{ github.event.pull_request.title }}](${{ github.event.pull_request.html_url }})
|
||||
|
||||
@${{ github.actor }}, the build for bun-${{ inputs.tag }} failed.
|
||||
|
||||
**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})**
|
||||
64
.github/workflows/build-linux.yml
vendored
64
.github/workflows/build-linux.yml
vendored
@@ -1,64 +0,0 @@
|
||||
name: Build Linux
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
actions: write
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runs-on:
|
||||
type: string
|
||||
required: true
|
||||
tag:
|
||||
type: string
|
||||
required: true
|
||||
arch:
|
||||
type: string
|
||||
required: true
|
||||
cpu:
|
||||
type: string
|
||||
required: true
|
||||
assertions:
|
||||
type: boolean
|
||||
zig-optimize:
|
||||
type: string
|
||||
canary:
|
||||
type: boolean
|
||||
no-cache:
|
||||
type: boolean
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build Linux
|
||||
uses: ./.github/workflows/build-zig.yml
|
||||
with:
|
||||
os: linux
|
||||
only-zig: false
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
tag: ${{ inputs.tag }}
|
||||
arch: ${{ inputs.arch }}
|
||||
cpu: ${{ inputs.cpu }}
|
||||
assertions: ${{ inputs.assertions }}
|
||||
zig-optimize: ${{ inputs.zig-optimize }}
|
||||
canary: ${{ inputs.canary }}
|
||||
no-cache: ${{ inputs.no-cache }}
|
||||
on-failure:
|
||||
if: ${{ github.repository_owner == 'oven-sh' && failure() }}
|
||||
name: On Failure
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Send Message
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
nodetail: true
|
||||
color: "#FF0000"
|
||||
title: ""
|
||||
description: |
|
||||
### ❌ [${{ github.event.pull_request.title }}](${{ github.event.pull_request.html_url }})
|
||||
|
||||
@${{ github.actor }}, the build for bun-${{ inputs.tag }} failed.
|
||||
|
||||
**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})**
|
||||
339
.github/workflows/build-windows.yml
vendored
339
.github/workflows/build-windows.yml
vendored
@@ -1,339 +0,0 @@
|
||||
name: Build Windows
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
actions: write
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runs-on:
|
||||
type: string
|
||||
default: windows
|
||||
tag:
|
||||
type: string
|
||||
required: true
|
||||
arch:
|
||||
type: string
|
||||
required: true
|
||||
cpu:
|
||||
type: string
|
||||
required: true
|
||||
assertions:
|
||||
type: boolean
|
||||
canary:
|
||||
type: boolean
|
||||
no-cache:
|
||||
type: boolean
|
||||
bun-version:
|
||||
type: string
|
||||
default: 1.1.7
|
||||
|
||||
env:
|
||||
# Must specify exact version of LLVM for Windows
|
||||
LLVM_VERSION: 16.0.6
|
||||
BUN_VERSION: ${{ inputs.bun-version }}
|
||||
BUN_GARBAGE_COLLECTOR_LEVEL: 1
|
||||
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: 1
|
||||
CI: true
|
||||
USE_LTO: 1
|
||||
|
||||
jobs:
|
||||
build-submodules:
|
||||
name: Build Submodules
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
steps:
|
||||
- name: Install VS2022 BuildTools 17.9.7
|
||||
run: choco install -y visualstudio2022buildtools --version=117.9.7.0 --params "--add Microsoft.VisualStudio.Component.VC.Tools.x86.x64 --installChannelUri https://aka.ms/vs/17/release/180911598_-255012421/channel"
|
||||
- name: Setup Git
|
||||
run: |
|
||||
git config --global core.autocrlf false
|
||||
git config --global core.eol lf
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
.gitmodules
|
||||
src/deps
|
||||
scripts
|
||||
- name: Hash Submodules
|
||||
id: hash
|
||||
run: |
|
||||
$data = "$(& {
|
||||
git submodule | Where-Object { $_ -notmatch 'WebKit' }
|
||||
echo "LLVM_VERSION=${{ env.LLVM_VERSION }}"
|
||||
Get-Content -Path (Get-ChildItem -Path 'scripts/build*.ps1', 'scripts/all-dependencies.ps1', 'scripts/env.ps1' | Sort-Object -Property Name).FullName | Out-String
|
||||
echo 1
|
||||
})"
|
||||
$hash = ( -join ((New-Object -TypeName System.Security.Cryptography.SHA1CryptoServiceProvider).ComputeHash([System.Text.Encoding]::UTF8.GetBytes($data)) | ForEach-Object { $_.ToString("x2") } )).Substring(0, 10)
|
||||
echo "hash=${hash}" >> $env:GITHUB_OUTPUT
|
||||
- if: ${{ !inputs.no-cache }}
|
||||
name: Restore Cache
|
||||
id: cache
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: bun-deps
|
||||
key: bun-${{ inputs.tag }}-deps-${{ steps.hash.outputs.hash }}
|
||||
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
name: Install Ninja
|
||||
run: |
|
||||
choco install -y ninja
|
||||
choco install -y llvm --version=${{ env.LLVM_VERSION }} --force
|
||||
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
name: Clone Submodules
|
||||
run: |
|
||||
.\scripts\update-submodules.ps1
|
||||
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
name: Build Dependencies
|
||||
env:
|
||||
CPU_TARGET: ${{ inputs.cpu }}
|
||||
CCACHE_DIR: ccache
|
||||
USE_LTO: 1
|
||||
run: |
|
||||
.\scripts\env.ps1 ${{ contains(inputs.tag, '-baseline') && '-Baseline' || '' }}
|
||||
choco install -y nasm --version=2.16.01
|
||||
$env:BUN_DEPS_OUT_DIR = (mkdir -Force "./bun-deps")
|
||||
.\scripts\all-dependencies.ps1
|
||||
- name: Save Cache
|
||||
if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: bun-deps
|
||||
key: ${{ steps.cache.outputs.cache-primary-key }}
|
||||
- name: Upload bun-${{ inputs.tag }}-deps
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-deps
|
||||
path: bun-deps
|
||||
if-no-files-found: error
|
||||
codegen:
|
||||
name: Codegen
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Setup Git
|
||||
run: |
|
||||
git config --global core.autocrlf false
|
||||
git config --global core.eol lf
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ inputs.bun-version }}
|
||||
- name: Codegen
|
||||
run: |
|
||||
./scripts/cross-compile-codegen.sh win32 x64
|
||||
- if: ${{ inputs.canary }}
|
||||
name: Calculate Revision
|
||||
run: |
|
||||
echo "canary_revision=$(GITHUB_TOKEN="${{ github.token }}"
|
||||
bash ./scripts/calculate-canary-revision.sh --raw)" > build-codegen-win32-x64/.canary_revision
|
||||
- name: Upload bun-${{ inputs.tag }}-codegen
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-codegen
|
||||
path: build-codegen-win32-x64
|
||||
if-no-files-found: error
|
||||
build-cpp:
|
||||
name: Build C++
|
||||
needs: codegen
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
steps:
|
||||
- name: Install VS2022 BuildTools 17.9.7
|
||||
run: choco install -y visualstudio2022buildtools --version=117.9.7.0 --params "--add Microsoft.VisualStudio.Component.VC.Tools.x86.x64 --installChannelUri https://aka.ms/vs/17/release/180911598_-255012421/channel"
|
||||
- name: Setup Git
|
||||
run: |
|
||||
git config --global core.autocrlf false
|
||||
git config --global core.eol lf
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install Ninja
|
||||
run: |
|
||||
choco install -y ninja
|
||||
choco install -y llvm --version=${{ env.LLVM_VERSION }} --force
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ inputs.bun-version }}
|
||||
- if: ${{ !inputs.no-cache }}
|
||||
name: Restore Cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ccache
|
||||
key: bun-${{ inputs.tag }}-cpp-${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }}
|
||||
restore-keys: |
|
||||
bun-${{ inputs.tag }}-cpp-
|
||||
- name: Download bun-${{ inputs.tag }}-codegen
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-codegen
|
||||
path: build
|
||||
- name: Compile
|
||||
env:
|
||||
CPU_TARGET: ${{ inputs.cpu }}
|
||||
CCACHE_DIR: ccache
|
||||
USE_LTO: 1
|
||||
run: |
|
||||
# $CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
|
||||
$CANARY_REVISION = 0
|
||||
.\scripts\env.ps1 ${{ contains(inputs.tag, '-baseline') && '-Baseline' || '' }}
|
||||
.\scripts\update-submodules.ps1
|
||||
.\scripts\build-libuv.ps1 -CloneOnly $True
|
||||
cd build
|
||||
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release `
|
||||
-DNO_CODEGEN=1 `
|
||||
-DUSE_LTO=1 `
|
||||
-DNO_CONFIGURE_DEPENDS=1 `
|
||||
"-DCANARY=${CANARY_REVISION}" `
|
||||
-DBUN_CPP_ONLY=1 ${{ contains(inputs.tag, '-baseline') && '-DUSE_BASELINE_BUILD=1' || '' }}
|
||||
if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" }
|
||||
.\compile-cpp-only.ps1 -v
|
||||
if ($LASTEXITCODE -ne 0) { throw "C++ compilation failed" }
|
||||
- name: Upload bun-${{ inputs.tag }}-cpp
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-cpp
|
||||
path: build/bun-cpp-objects.a
|
||||
if-no-files-found: error
|
||||
build-zig:
|
||||
name: Build Zig
|
||||
uses: ./.github/workflows/build-zig.yml
|
||||
with:
|
||||
os: windows
|
||||
zig-optimize: ReleaseSafe
|
||||
only-zig: true
|
||||
tag: ${{ inputs.tag }}
|
||||
arch: ${{ inputs.arch }}
|
||||
cpu: ${{ inputs.cpu }}
|
||||
assertions: ${{ inputs.assertions }}
|
||||
canary: ${{ inputs.canary }}
|
||||
no-cache: ${{ inputs.no-cache }}
|
||||
link:
|
||||
name: Link
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
needs:
|
||||
- build-submodules
|
||||
- build-cpp
|
||||
- build-zig
|
||||
- codegen
|
||||
steps:
|
||||
- name: Install VS2022 BuildTools 17.9.7
|
||||
run: choco install -y visualstudio2022buildtools --version=117.9.7.0 --params "--add Microsoft.VisualStudio.Component.VC.Tools.x86.x64 --installChannelUri https://aka.ms/vs/17/release/180911598_-255012421/channel"
|
||||
- name: Setup Git
|
||||
run: |
|
||||
git config --global core.autocrlf false
|
||||
git config --global core.eol lf
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install Ninja
|
||||
run: |
|
||||
choco install -y ninja
|
||||
choco install -y llvm --version=${{ env.LLVM_VERSION }} --force
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ inputs.bun-version }}
|
||||
- name: Download bun-${{ inputs.tag }}-deps
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-deps
|
||||
path: bun-deps
|
||||
- name: Download bun-${{ inputs.tag }}-cpp
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-cpp
|
||||
path: bun-cpp
|
||||
- name: Download bun-${{ inputs.tag }}-zig
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-zig
|
||||
path: bun-zig
|
||||
- name: Download bun-${{ inputs.tag }}-codegen
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-codegen
|
||||
path: build
|
||||
- if: ${{ !inputs.no-cache }}
|
||||
name: Restore Cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ccache
|
||||
key: bun-${{ inputs.tag }}-cpp-${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }}
|
||||
restore-keys: |
|
||||
bun-${{ inputs.tag }}-cpp-
|
||||
- name: Link
|
||||
env:
|
||||
CPU_TARGET: ${{ inputs.cpu }}
|
||||
CCACHE_DIR: ccache
|
||||
run: |
|
||||
.\scripts\update-submodules.ps1
|
||||
.\scripts\env.ps1 ${{ contains(inputs.tag, '-baseline') && '-Baseline' || '' }}
|
||||
Set-Location build
|
||||
# $CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
|
||||
$CANARY_REVISION = 0
|
||||
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release `
|
||||
-DNO_CODEGEN=1 `
|
||||
-DNO_CONFIGURE_DEPENDS=1 `
|
||||
"-DCANARY=${CANARY_REVISION}" `
|
||||
-DBUN_LINK_ONLY=1 `
|
||||
-DUSE_LTO=1 `
|
||||
"-DBUN_DEPS_OUT_DIR=$(Resolve-Path ../bun-deps)" `
|
||||
"-DBUN_CPP_ARCHIVE=$(Resolve-Path ../bun-cpp/bun-cpp-objects.a)" `
|
||||
"-DBUN_ZIG_OBJ_DIR=$(Resolve-Path ../bun-zig)" `
|
||||
${{ contains(inputs.tag, '-baseline') && '-DUSE_BASELINE_BUILD=1' || '' }}
|
||||
if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" }
|
||||
ninja -v
|
||||
if ($LASTEXITCODE -ne 0) { throw "Link failed!" }
|
||||
- name: Prepare
|
||||
run: |
|
||||
$Dist = mkdir -Force "bun-${{ inputs.tag }}"
|
||||
cp -r build\bun.exe "$Dist\bun.exe"
|
||||
Compress-Archive -Force "$Dist" "${Dist}.zip"
|
||||
$Dist = "$Dist-profile"
|
||||
MkDir -Force "$Dist"
|
||||
cp -r build\bun.exe "$Dist\bun.exe"
|
||||
cp -r build\bun.pdb "$Dist\bun.pdb"
|
||||
Compress-Archive -Force "$Dist" "$Dist.zip"
|
||||
.\build\bun.exe --print "JSON.stringify(require('bun:internal-for-testing').crash_handler.getFeatureData())" > .\features.json
|
||||
- name: Upload bun-${{ inputs.tag }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}
|
||||
path: bun-${{ inputs.tag }}.zip
|
||||
if-no-files-found: error
|
||||
- name: Upload bun-${{ inputs.tag }}-profile
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-profile
|
||||
path: bun-${{ inputs.tag }}-profile.zip
|
||||
if-no-files-found: error
|
||||
- name: Upload bun-feature-data
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-feature-data
|
||||
path: features.json
|
||||
if-no-files-found: error
|
||||
overwrite: true
|
||||
on-failure:
|
||||
if: ${{ github.repository_owner == 'oven-sh' && failure() }}
|
||||
name: On Failure
|
||||
needs: link
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Send Message
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
nodetail: true
|
||||
color: "#FF0000"
|
||||
title: ""
|
||||
description: |
|
||||
### ❌ [${{ github.event.pull_request.title }}](${{ github.event.pull_request.html_url }})
|
||||
|
||||
@${{ github.actor }}, the build for bun-${{ inputs.tag }} failed.
|
||||
|
||||
**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})**
|
||||
122
.github/workflows/build-zig.yml
vendored
122
.github/workflows/build-zig.yml
vendored
@@ -1,122 +0,0 @@
|
||||
name: Build Zig
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
actions: write
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runs-on:
|
||||
type: string
|
||||
default: ${{ github.repository_owner != 'oven-sh' && 'ubuntu-latest' || inputs.only-zig && 'namespace-profile-bun-ci-linux-x64' || inputs.arch == 'x64' && 'namespace-profile-bun-ci-linux-x64' || 'namespace-profile-bun-ci-linux-aarch64' }}
|
||||
tag:
|
||||
type: string
|
||||
required: true
|
||||
os:
|
||||
type: string
|
||||
required: true
|
||||
arch:
|
||||
type: string
|
||||
required: true
|
||||
cpu:
|
||||
type: string
|
||||
required: true
|
||||
assertions:
|
||||
type: boolean
|
||||
default: false
|
||||
zig-optimize:
|
||||
type: string # 'ReleaseSafe' or 'ReleaseFast'
|
||||
default: ReleaseFast
|
||||
canary:
|
||||
type: boolean
|
||||
default: ${{ github.ref == 'refs/heads/main' }}
|
||||
only-zig:
|
||||
type: boolean
|
||||
default: true
|
||||
no-cache:
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
jobs:
|
||||
build-zig:
|
||||
name: ${{ inputs.only-zig && 'Build Zig' || 'Build & Link' }}
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Calculate Cache Key
|
||||
id: cache
|
||||
run: |
|
||||
echo "key=${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }}" >> $GITHUB_OUTPUT
|
||||
- if: ${{ !inputs.no-cache }}
|
||||
name: Restore Cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
key: bun-${{ inputs.tag }}-docker-${{ steps.cache.outputs.key }}
|
||||
restore-keys: |
|
||||
bun-${{ inputs.tag }}-docker-
|
||||
path: |
|
||||
${{ runner.temp }}/dockercache
|
||||
- name: Setup Docker
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
install: true
|
||||
platforms: |
|
||||
linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
|
||||
- name: Build
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
push: false
|
||||
target: ${{ inputs.only-zig && 'build_release_obj' || 'artifact' }}
|
||||
cache-from: |
|
||||
type=local,src=${{ runner.temp }}/dockercache
|
||||
cache-to: |
|
||||
type=local,dest=${{ runner.temp }}/dockercache,mode=max
|
||||
outputs: |
|
||||
type=local,dest=${{ runner.temp }}/release
|
||||
platforms: |
|
||||
linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
|
||||
build-args: |
|
||||
GIT_SHA=${{ github.event.workflow_run.head_sha || github.sha }}
|
||||
TRIPLET=${{ inputs.os == 'darwin' && format('{0}-macos-none', inputs.arch == 'x64' && 'x86_64' || 'aarch64') || inputs.os == 'windows' && format('{0}-windows-msvc', inputs.arch == 'x64' && 'x86_64' || 'aarch64') || format('{0}-linux-gnu', inputs.arch == 'x64' && 'x86_64' || 'aarch64') }}
|
||||
ARCH=${{ inputs.arch == 'x64' && 'x86_64' || 'aarch64' }}
|
||||
BUILDARCH=${{ inputs.arch == 'x64' && 'amd64' || 'arm64' }}
|
||||
BUILD_MACHINE_ARCH=${{ inputs.arch == 'x64' && 'x86_64' || 'aarch64' }}
|
||||
CPU_TARGET=${{ inputs.arch == 'x64' && inputs.cpu || 'native' }}
|
||||
ASSERTIONS=${{ inputs.assertions && 'ON' || 'OFF' }}
|
||||
ZIG_OPTIMIZE=${{ inputs.zig-optimize }}
|
||||
CANARY=${{ inputs.canary && '1' || '0' }}
|
||||
- if: ${{ inputs.only-zig }}
|
||||
name: Upload bun-${{ inputs.tag }}-zig
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-zig
|
||||
path: ${{ runner.temp }}/release/bun-zig.o
|
||||
if-no-files-found: error
|
||||
- if: ${{ !inputs.only-zig }}
|
||||
name: Prepare
|
||||
run: |
|
||||
cd ${{ runner.temp }}/release
|
||||
chmod +x bun-profile bun
|
||||
mkdir bun-${{ inputs.tag }}-profile
|
||||
mkdir bun-${{ inputs.tag }}
|
||||
strip bun
|
||||
mv bun-profile bun-${{ inputs.tag }}-profile/bun-profile
|
||||
mv bun bun-${{ inputs.tag }}/bun
|
||||
zip -r bun-${{ inputs.tag }}-profile.zip bun-${{ inputs.tag }}-profile
|
||||
zip -r bun-${{ inputs.tag }}.zip bun-${{ inputs.tag }}
|
||||
- if: ${{ !inputs.only-zig }}
|
||||
name: Upload bun-${{ inputs.tag }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}
|
||||
path: ${{ runner.temp }}/release/bun-${{ inputs.tag }}.zip
|
||||
if-no-files-found: error
|
||||
- if: ${{ !inputs.only-zig }}
|
||||
name: Upload bun-${{ inputs.tag }}-profile
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-profile
|
||||
path: ${{ runner.temp }}/release/bun-${{ inputs.tag }}-profile.zip
|
||||
if-no-files-found: error
|
||||
245
.github/workflows/ci.yml
vendored
245
.github/workflows/ci.yml
vendored
@@ -1,245 +0,0 @@
|
||||
name: CI
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
actions: write
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name == 'workflow_dispatch' && inputs.run-id || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
run-id:
|
||||
type: string
|
||||
description: The workflow ID to download artifacts (skips the build step)
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- .vscode/**/*
|
||||
- docs/**/*
|
||||
- examples/**/*
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths-ignore:
|
||||
- .vscode/**/*
|
||||
- docs/**/*
|
||||
- examples/**/*
|
||||
|
||||
jobs:
|
||||
format:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Format
|
||||
uses: ./.github/workflows/run-format.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
zig-version: 0.13.0
|
||||
permissions:
|
||||
contents: write
|
||||
lint:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Lint
|
||||
uses: ./.github/workflows/run-lint.yml
|
||||
secrets: inherit
|
||||
linux-x64:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Build linux-x64
|
||||
uses: ./.github/workflows/build-linux.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
|
||||
tag: linux-x64
|
||||
arch: x64
|
||||
cpu: haswell
|
||||
canary: true
|
||||
no-cache: true
|
||||
linux-x64-baseline:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Build linux-x64-baseline
|
||||
uses: ./.github/workflows/build-linux.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
|
||||
tag: linux-x64-baseline
|
||||
arch: x64
|
||||
cpu: nehalem
|
||||
canary: true
|
||||
no-cache: true
|
||||
linux-aarch64:
|
||||
if: ${{ !inputs.run-id && github.repository_owner == 'oven-sh' }}
|
||||
name: Build linux-aarch64
|
||||
uses: ./.github/workflows/build-linux.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: namespace-profile-bun-ci-linux-aarch64
|
||||
tag: linux-aarch64
|
||||
arch: aarch64
|
||||
cpu: native
|
||||
canary: true
|
||||
no-cache: true
|
||||
darwin-x64:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Build darwin-x64
|
||||
uses: ./.github/workflows/build-darwin.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
|
||||
tag: darwin-x64
|
||||
arch: x64
|
||||
cpu: haswell
|
||||
canary: true
|
||||
darwin-x64-baseline:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Build darwin-x64-baseline
|
||||
uses: ./.github/workflows/build-darwin.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
|
||||
tag: darwin-x64-baseline
|
||||
arch: x64
|
||||
cpu: nehalem
|
||||
canary: true
|
||||
darwin-aarch64:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Build darwin-aarch64
|
||||
uses: ./.github/workflows/build-darwin.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-12' }}
|
||||
tag: darwin-aarch64
|
||||
arch: aarch64
|
||||
cpu: native
|
||||
canary: true
|
||||
windows-x64:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Build windows-x64
|
||||
uses: ./.github/workflows/build-windows.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: windows
|
||||
tag: windows-x64
|
||||
arch: x64
|
||||
cpu: haswell
|
||||
canary: true
|
||||
windows-x64-baseline:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Build windows-x64-baseline
|
||||
uses: ./.github/workflows/build-windows.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: windows
|
||||
tag: windows-x64-baseline
|
||||
arch: x64
|
||||
cpu: nehalem
|
||||
canary: true
|
||||
linux-x64-test:
|
||||
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
|
||||
name: Test linux-x64
|
||||
needs: linux-x64
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
|
||||
tag: linux-x64
|
||||
linux-x64-baseline-test:
|
||||
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
|
||||
name: Test linux-x64-baseline
|
||||
needs: linux-x64-baseline
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
|
||||
tag: linux-x64-baseline
|
||||
linux-aarch64-test:
|
||||
if: ${{ inputs.run-id || github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'}}
|
||||
name: Test linux-aarch64
|
||||
needs: linux-aarch64
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: namespace-profile-bun-ci-linux-aarch64
|
||||
tag: linux-aarch64
|
||||
darwin-x64-test:
|
||||
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
|
||||
name: Test darwin-x64
|
||||
needs: darwin-x64
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
|
||||
tag: darwin-x64
|
||||
darwin-x64-baseline-test:
|
||||
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
|
||||
name: Test darwin-x64-baseline
|
||||
needs: darwin-x64-baseline
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
|
||||
tag: darwin-x64-baseline
|
||||
darwin-aarch64-test:
|
||||
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
|
||||
name: Test darwin-aarch64
|
||||
needs: darwin-aarch64
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-12' }}
|
||||
tag: darwin-aarch64
|
||||
windows-x64-test:
|
||||
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
|
||||
name: Test windows-x64
|
||||
needs: windows-x64
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: windows
|
||||
tag: windows-x64
|
||||
windows-x64-baseline-test:
|
||||
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
|
||||
name: Test windows-x64-baseline
|
||||
needs: windows-x64-baseline
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: windows
|
||||
tag: windows-x64-baseline
|
||||
cleanup:
|
||||
if: ${{ always() }}
|
||||
name: Cleanup
|
||||
needs:
|
||||
- linux-x64
|
||||
- linux-x64-baseline
|
||||
- linux-aarch64
|
||||
- darwin-x64
|
||||
- darwin-x64-baseline
|
||||
- darwin-aarch64
|
||||
- windows-x64
|
||||
- windows-x64-baseline
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Cleanup Artifacts
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
with:
|
||||
name: |
|
||||
bun-*-cpp
|
||||
bun-*-zig
|
||||
bun-*-deps
|
||||
bun-*-codegen
|
||||
55
.github/workflows/comment.yml
vendored
55
.github/workflows/comment.yml
vendored
@@ -1,55 +0,0 @@
|
||||
name: Comment
|
||||
|
||||
permissions:
|
||||
actions: read
|
||||
pull-requests: write
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows:
|
||||
- CI
|
||||
types:
|
||||
- completed
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
name: Comment
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download Tests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: bun
|
||||
pattern: bun-*-tests
|
||||
github-token: ${{ github.token }}
|
||||
run-id: ${{ github.event.workflow_run.id }}
|
||||
- name: Setup Environment
|
||||
id: env
|
||||
shell: bash
|
||||
run: |
|
||||
echo "pr-number=$(<bun/bun-linux-x64-tests/pr-number.txt)" >> $GITHUB_OUTPUT
|
||||
- name: Generate Comment
|
||||
run: |
|
||||
cat bun/bun-*-tests/comment.md > comment.md
|
||||
if [ -s comment.md ]; then
|
||||
echo -e "❌ @${{ github.actor }}, your commit has failing tests :(\n\n$(cat comment.md)" > comment.md
|
||||
else
|
||||
echo -e "✅ @${{ github.actor }}, all tests passed!" > comment.md
|
||||
fi
|
||||
echo -e "\n**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.event.workflow_run.id }})**" >> comment.md
|
||||
echo -e "<!-- generated-comment workflow=${{ github.workflow }} -->" >> comment.md
|
||||
- name: Find Comment
|
||||
id: comment
|
||||
uses: peter-evans/find-comment@v3
|
||||
with:
|
||||
issue-number: ${{ steps.env.outputs.pr-number }}
|
||||
comment-author: github-actions[bot]
|
||||
body-includes: <!-- generated-comment workflow=${{ github.workflow }} -->
|
||||
- name: Write Comment
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
with:
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.env.outputs.pr-number }}
|
||||
body-path: comment.md
|
||||
edit-mode: replace
|
||||
183
.github/workflows/create-release-build.yml
vendored
183
.github/workflows/create-release-build.yml
vendored
@@ -1,183 +0,0 @@
|
||||
name: Create Release Build
|
||||
run-name: Compile Bun v${{ inputs.version }} by ${{ github.actor }}
|
||||
|
||||
concurrency:
|
||||
group: release
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
actions: write
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
type: string
|
||||
required: true
|
||||
description: "Release version. Example: 1.1.4. Exclude the 'v' prefix."
|
||||
tag:
|
||||
type: string
|
||||
required: true
|
||||
description: "GitHub tag to use"
|
||||
clobber:
|
||||
type: boolean
|
||||
required: false
|
||||
default: false
|
||||
description: "Overwrite existing release artifacts?"
|
||||
release:
|
||||
types:
|
||||
- created
|
||||
|
||||
jobs:
|
||||
notify-start:
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
name: Notify Start
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Send Message
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK_PUBLIC }}
|
||||
nodetail: true
|
||||
color: "#1F6FEB"
|
||||
title: "Bun v${{ inputs.version }} is compiling"
|
||||
description: |
|
||||
### @${{ github.actor }} started compiling Bun v${{inputs.version}}
|
||||
- name: Send Message
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.BUN_DISCORD_GITHUB_CHANNEL_WEBHOOK }}
|
||||
nodetail: true
|
||||
color: "#1F6FEB"
|
||||
title: "Bun v${{ inputs.version }} is compiling"
|
||||
description: |
|
||||
### @${{ github.actor }} started compiling Bun v${{inputs.version}}
|
||||
|
||||
**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})**
|
||||
linux-x64:
|
||||
name: Build linux-x64
|
||||
uses: ./.github/workflows/build-linux.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
|
||||
tag: linux-x64
|
||||
arch: x64
|
||||
cpu: haswell
|
||||
canary: false
|
||||
linux-x64-baseline:
|
||||
name: Build linux-x64-baseline
|
||||
uses: ./.github/workflows/build-linux.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
|
||||
tag: linux-x64-baseline
|
||||
arch: x64
|
||||
cpu: nehalem
|
||||
canary: false
|
||||
linux-aarch64:
|
||||
name: Build linux-aarch64
|
||||
uses: ./.github/workflows/build-linux.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: namespace-profile-bun-ci-linux-aarch64
|
||||
tag: linux-aarch64
|
||||
arch: aarch64
|
||||
cpu: native
|
||||
canary: false
|
||||
darwin-x64:
|
||||
name: Build darwin-x64
|
||||
uses: ./.github/workflows/build-darwin.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
|
||||
tag: darwin-x64
|
||||
arch: x64
|
||||
cpu: haswell
|
||||
canary: false
|
||||
darwin-x64-baseline:
|
||||
name: Build darwin-x64-baseline
|
||||
uses: ./.github/workflows/build-darwin.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
|
||||
tag: darwin-x64-baseline
|
||||
arch: x64
|
||||
cpu: nehalem
|
||||
canary: false
|
||||
darwin-aarch64:
|
||||
name: Build darwin-aarch64
|
||||
uses: ./.github/workflows/build-darwin.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-12' }}
|
||||
tag: darwin-aarch64
|
||||
arch: aarch64
|
||||
cpu: native
|
||||
canary: false
|
||||
windows-x64:
|
||||
name: Build windows-x64
|
||||
uses: ./.github/workflows/build-windows.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: windows
|
||||
tag: windows-x64
|
||||
arch: x64
|
||||
cpu: haswell
|
||||
canary: false
|
||||
windows-x64-baseline:
|
||||
name: Build windows-x64-baseline
|
||||
uses: ./.github/workflows/build-windows.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: windows
|
||||
tag: windows-x64-baseline
|
||||
arch: x64
|
||||
cpu: nehalem
|
||||
canary: false
|
||||
|
||||
upload-artifacts:
|
||||
needs:
|
||||
- linux-x64
|
||||
- linux-x64-baseline
|
||||
- linux-aarch64
|
||||
- darwin-x64
|
||||
- darwin-x64-baseline
|
||||
- darwin-aarch64
|
||||
- windows-x64
|
||||
- windows-x64-baseline
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download Artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: bun-releases
|
||||
pattern: bun-*
|
||||
merge-multiple: true
|
||||
github-token: ${{ github.token }}
|
||||
- name: Check for Artifacts
|
||||
run: |
|
||||
if [ ! -d "bun-releases" ] || [ -z "$(ls -A bun-releases)" ]; then
|
||||
echo "Error: No artifacts were downloaded or 'bun-releases' directory does not exist."
|
||||
exit 1 # Fail the job if the condition is met
|
||||
else
|
||||
echo "Artifacts downloaded successfully."
|
||||
fi
|
||||
- name: Send Message
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
nodetail: true
|
||||
color: "#FF0000"
|
||||
title: "Bun v${{ inputs.version }} release artifacts uploaded"
|
||||
- name: "Upload Artifacts"
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
# Unzip one level deep each artifact
|
||||
cd bun-releases
|
||||
for f in *.zip; do
|
||||
unzip -o $f
|
||||
done
|
||||
cd ..
|
||||
gh release upload --repo=${{ github.repository }} ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.tag || github.event.release.id }} ${{ inputs.clobber && '--clobber' || '' }} bun-releases/*.zip
|
||||
36
.github/workflows/labeled.yml
vendored
36
.github/workflows/labeled.yml
vendored
@@ -7,6 +7,42 @@ on:
|
||||
types: [labeled]
|
||||
|
||||
jobs:
|
||||
on-bug:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.label.name == 'bug' || github.event.label.name == 'crash'
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
scripts
|
||||
.github
|
||||
CMakeLists.txt
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.1.24"
|
||||
- name: "categorize bug"
|
||||
id: add-labels
|
||||
env:
|
||||
GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
|
||||
GITHUB_ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo '{"dependencies": { "@anthropic-ai/sdk": "latest" }}' > scripts/package.json && bun install --cwd=./scripts
|
||||
LABELS=$(bun scripts/label-issue.ts)
|
||||
echo "labels=$LABELS" >> $GITHUB_OUTPUT
|
||||
- name: Add labels
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: steps.add-labels.outputs.labels != ''
|
||||
with:
|
||||
actions: "add-labels"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
labels: ${{ steps.add-labels.outputs.labels }}
|
||||
on-labeled:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.label.name == 'crash' || github.event.label.name == 'needs repro'
|
||||
|
||||
32
.github/workflows/release.yml
vendored
32
.github/workflows/release.yml
vendored
@@ -1,3 +1,6 @@
|
||||
# TODO: Move this to bash scripts intead of Github Actions
|
||||
# so it can be run from Buildkite, see: .buildkite/scripts/release.sh
|
||||
|
||||
name: Release
|
||||
concurrency: release
|
||||
|
||||
@@ -63,7 +66,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.0.21"
|
||||
bun-version: "1.1.20"
|
||||
- name: Install Dependencies
|
||||
run: bun install
|
||||
- name: Sign Release
|
||||
@@ -85,10 +88,13 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
# To workaround issue
|
||||
ref: main
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.0.21"
|
||||
bun-version: "1.1.20"
|
||||
- name: Install Dependencies
|
||||
run: bun install
|
||||
- name: Release
|
||||
@@ -117,7 +123,7 @@ jobs:
|
||||
if: ${{ env.BUN_VERSION != 'canary' }}
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.0.21"
|
||||
bun-version: "1.1.20"
|
||||
- name: Setup Bun
|
||||
if: ${{ env.BUN_VERSION == 'canary' }}
|
||||
uses: ./.github/actions/setup-bun
|
||||
@@ -259,7 +265,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.0.21"
|
||||
bun-version: "1.1.20"
|
||||
- name: Install Dependencies
|
||||
run: bun install
|
||||
- name: Release
|
||||
@@ -270,6 +276,24 @@ jobs:
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
|
||||
AWS_ENDPOINT: ${{ secrets.AWS_ENDPOINT }}
|
||||
AWS_BUCKET: bun
|
||||
|
||||
notify-sentry:
|
||||
name: Notify Sentry
|
||||
runs-on: ubuntu-latest
|
||||
needs: s3
|
||||
steps:
|
||||
- name: Notify Sentry
|
||||
uses: getsentry/action-release@v1.7.0
|
||||
env:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
|
||||
with:
|
||||
ignore_missing: true
|
||||
ignore_empty: true
|
||||
version: ${{ env.BUN_VERSION }}
|
||||
environment: production
|
||||
|
||||
bump:
|
||||
name: "Bump version"
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
4
.github/workflows/run-format.yml
vendored
4
.github/workflows/run-format.yml
vendored
@@ -29,9 +29,9 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.1.8"
|
||||
bun-version: "1.1.20"
|
||||
- name: Setup Zig
|
||||
uses: goto-bus-stop/setup-zig@c7b6cdd3adba8f8b96984640ff172c37c93f73ee
|
||||
uses: mlugg/setup-zig@v1
|
||||
with:
|
||||
version: ${{ inputs.zig-version }}
|
||||
- name: Install Dependencies
|
||||
|
||||
6
.github/workflows/run-lint-cpp.yml
vendored
6
.github/workflows/run-lint-cpp.yml
vendored
@@ -3,7 +3,7 @@ name: lint-cpp
|
||||
permissions:
|
||||
contents: read
|
||||
env:
|
||||
LLVM_VERSION: 16
|
||||
LLVM_VERSION: 18
|
||||
LC_CTYPE: "en_US.UTF-8"
|
||||
LC_ALL: "en_US.UTF-8"
|
||||
|
||||
@@ -17,7 +17,7 @@ on:
|
||||
jobs:
|
||||
lint-cpp:
|
||||
name: Lint C++
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-xlarge' || 'macos-12' }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-xlarge' || 'macos-13' }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
@@ -26,7 +26,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: latest
|
||||
bun-version: 1.1.23
|
||||
- name: Install Dependencies
|
||||
env:
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
|
||||
|
||||
224
.github/workflows/run-test.yml
vendored
224
.github/workflows/run-test.yml
vendored
@@ -1,224 +0,0 @@
|
||||
name: Test
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runs-on:
|
||||
type: string
|
||||
required: true
|
||||
tag:
|
||||
type: string
|
||||
required: true
|
||||
pr-number:
|
||||
type: string
|
||||
required: true
|
||||
run-id:
|
||||
type: string
|
||||
default: ${{ github.run_id }}
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Tests
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
steps:
|
||||
- if: ${{ runner.os == 'Windows' }}
|
||||
name: Setup Git
|
||||
run: |
|
||||
git config --global core.autocrlf false
|
||||
git config --global core.eol lf
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
package.json
|
||||
bun.lockb
|
||||
test
|
||||
packages/bun-internal-test
|
||||
packages/bun-types
|
||||
- name: Setup Environment
|
||||
shell: bash
|
||||
run: |
|
||||
echo "${{ inputs.pr-number }}" > pr-number.txt
|
||||
- name: Download Bun
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}
|
||||
path: bun
|
||||
github-token: ${{ github.token }}
|
||||
run-id: ${{ inputs.run-id || github.run_id }}
|
||||
- name: Download pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 8
|
||||
- if: ${{ runner.os != 'Windows' }}
|
||||
name: Setup Bun
|
||||
shell: bash
|
||||
run: |
|
||||
unzip bun/bun-*.zip
|
||||
cd bun-*
|
||||
pwd >> $GITHUB_PATH
|
||||
- if: ${{ runner.os == 'Windows' }}
|
||||
name: Setup Cygwin
|
||||
uses: secondlife/setup-cygwin@v3
|
||||
with:
|
||||
packages: bash
|
||||
- if: ${{ runner.os == 'Windows' }}
|
||||
name: Setup Bun (Windows)
|
||||
run: |
|
||||
unzip bun/bun-*.zip
|
||||
cd bun-*
|
||||
pwd >> $env:GITHUB_PATH
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
- name: Install Dependencies
|
||||
timeout-minutes: 5
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
bun install
|
||||
- name: Install Dependencies (test)
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
bun install --cwd test
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Install Dependencies (runner)
|
||||
timeout-minutes: 5
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
bun install --cwd packages/bun-internal-test
|
||||
- name: Run Tests
|
||||
id: test
|
||||
timeout-minutes: 90
|
||||
shell: bash
|
||||
env:
|
||||
IS_BUN_CI: 1
|
||||
TMPDIR: ${{ runner.temp }}
|
||||
BUN_TAG: ${{ inputs.tag }}
|
||||
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true"
|
||||
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
|
||||
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
TEST_INFO_STRIPE: ${{ secrets.TEST_INFO_STRIPE }}
|
||||
TEST_INFO_AZURE_SERVICE_BUS: ${{ secrets.TEST_INFO_AZURE_SERVICE_BUS }}
|
||||
SHELLOPTS: igncr
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
node packages/bun-internal-test/src/runner.node.mjs $(which bun)
|
||||
- if: ${{ always() }}
|
||||
name: Upload Results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-tests
|
||||
path: |
|
||||
test-report.*
|
||||
comment.md
|
||||
pr-number.txt
|
||||
if-no-files-found: error
|
||||
overwrite: true
|
||||
- if: ${{ always() && steps.test.outputs.failing_tests != '' && github.event.pull_request && github.repository_owner == 'oven-sh' }}
|
||||
name: Send Message
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
nodetail: true
|
||||
color: "#FF0000"
|
||||
title: ""
|
||||
description: |
|
||||
### ❌ [${{ github.event.pull_request.title }}](${{ github.event.pull_request.html_url }})
|
||||
|
||||
@${{ github.actor }}, there are ${{ steps.test.outputs.failing_tests_count || 'some' }} failing tests on bun-${{ inputs.tag }}.
|
||||
|
||||
${{ steps.test.outputs.failing_tests }}
|
||||
|
||||
**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})**
|
||||
- name: Fail
|
||||
if: ${{ failure() || always() && steps.test.outputs.failing_tests != '' }}
|
||||
run: |
|
||||
echo "There are ${{ steps.test.outputs.failing_tests_count || 'some' }} failing tests on bun-${{ inputs.tag }}."
|
||||
exit 1
|
||||
test-node:
|
||||
name: Node.js Tests
|
||||
# TODO: enable when we start paying attention to the results. In the meantime, this causes CI to queue jobs wasting developer time.
|
||||
if: 0
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
steps:
|
||||
- if: ${{ runner.os == 'Windows' }}
|
||||
name: Setup Git
|
||||
run: |
|
||||
git config --global core.autocrlf false
|
||||
git config --global core.eol lf
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
test/node.js
|
||||
- name: Setup Environment
|
||||
shell: bash
|
||||
run: |
|
||||
echo "${{ inputs.pr-number }}" > pr-number.txt
|
||||
- name: Download Bun
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}
|
||||
path: bun
|
||||
github-token: ${{ github.token }}
|
||||
run-id: ${{ inputs.run-id || github.run_id }}
|
||||
- if: ${{ runner.os != 'Windows' }}
|
||||
name: Setup Bun
|
||||
shell: bash
|
||||
run: |
|
||||
unzip bun/bun-*.zip
|
||||
cd bun-*
|
||||
pwd >> $GITHUB_PATH
|
||||
- if: ${{ runner.os == 'Windows' }}
|
||||
name: Setup Cygwin
|
||||
uses: secondlife/setup-cygwin@v3
|
||||
with:
|
||||
packages: bash
|
||||
- if: ${{ runner.os == 'Windows' }}
|
||||
name: Setup Bun (Windows)
|
||||
run: |
|
||||
unzip bun/bun-*.zip
|
||||
cd bun-*
|
||||
pwd >> $env:GITHUB_PATH
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
- name: Checkout Tests
|
||||
shell: bash
|
||||
working-directory: test/node.js
|
||||
run: |
|
||||
node runner.mjs --pull
|
||||
- name: Install Dependencies
|
||||
timeout-minutes: 5
|
||||
shell: bash
|
||||
working-directory: test/node.js
|
||||
run: |
|
||||
bun install
|
||||
- name: Run Tests
|
||||
timeout-minutes: 10 # Increase when more tests are added
|
||||
shell: bash
|
||||
working-directory: test/node.js
|
||||
env:
|
||||
TMPDIR: ${{ runner.temp }}
|
||||
BUN_GARBAGE_COLLECTOR_LEVEL: "0"
|
||||
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true"
|
||||
run: |
|
||||
node runner.mjs
|
||||
- name: Upload Results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-node-tests
|
||||
path: |
|
||||
test/node.js/summary/*.json
|
||||
if-no-files-found: error
|
||||
overwrite: true
|
||||
30
.github/workflows/stale.yaml
vendored
Normal file
30
.github/workflows/stale.yaml
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
name: Close inactive issues
|
||||
on:
|
||||
# schedule:
|
||||
# - cron: "15 * * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
close-issues:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/stale@v5
|
||||
with:
|
||||
days-before-issue-close: 5
|
||||
any-of-issue-labels: "needs repro,waiting-for-author"
|
||||
exempt-issue-labels: "neverstale"
|
||||
exempt-pr-labels: "neverstale"
|
||||
remove-stale-when-updated: true
|
||||
stale-issue-label: "stale"
|
||||
stale-pr-label: "stale"
|
||||
stale-issue-message: "This issue is stale and may be closed due to inactivity. If you're still running into this, please leave a comment."
|
||||
close-issue-message: "This issue was closed because it has been inactive for 5 days since being marked as stale."
|
||||
days-before-pr-stale: 30
|
||||
days-before-pr-close: 14
|
||||
stale-pr-message: "This pull request is stale and may be closed due to inactivity."
|
||||
close-pr-message: "This pull request has been closed due to inactivity."
|
||||
repo-token: ${{ github.token }}
|
||||
operations-per-run: 1000
|
||||
82
.github/workflows/upload.yml
vendored
82
.github/workflows/upload.yml
vendored
@@ -1,82 +0,0 @@
|
||||
name: Upload Artifacts
|
||||
run-name: Canary release ${{github.sha}} upload
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows:
|
||||
- CI
|
||||
types:
|
||||
- completed
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
upload:
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
name: Upload Artifacts
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download Artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: bun
|
||||
pattern: bun-*
|
||||
merge-multiple: true
|
||||
github-token: ${{ github.token }}
|
||||
run-id: ${{ github.event.workflow_run.id }}
|
||||
- name: Check for Artifacts
|
||||
run: |
|
||||
if [ ! -d "bun" ] || [ -z "$(ls -A bun)" ]; then
|
||||
echo "Error: No artifacts were downloaded or 'bun' directory does not exist."
|
||||
exit 1 # Fail the job if the condition is met
|
||||
else
|
||||
echo "Artifacts downloaded successfully."
|
||||
fi
|
||||
- name: Upload to GitHub Releases
|
||||
uses: ncipollo/release-action@v1
|
||||
with:
|
||||
tag: canary
|
||||
name: Canary (${{ github.sha }})
|
||||
prerelease: true
|
||||
body: This canary release of Bun corresponds to the commit [${{ github.sha }}]
|
||||
allowUpdates: true
|
||||
replacesArtifacts: true
|
||||
generateReleaseNotes: true
|
||||
artifactErrorsFailBuild: true
|
||||
artifacts: bun/**/bun-*.zip
|
||||
token: ${{ github.token }}
|
||||
- name: Upload to S3 (using SHA)
|
||||
uses: shallwefootball/s3-upload-action@4350529f410221787ccf424e50133cbc1b52704e
|
||||
with:
|
||||
endpoint: ${{ secrets.AWS_ENDPOINT }}
|
||||
aws_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
|
||||
aws_bucket: ${{ secrets.AWS_BUCKET }}
|
||||
source_dir: bun
|
||||
destination_dir: releases/${{ github.event.workflow_run.head_sha || github.sha }}-canary
|
||||
- name: Upload to S3 (using tag)
|
||||
uses: shallwefootball/s3-upload-action@4350529f410221787ccf424e50133cbc1b52704e
|
||||
with:
|
||||
endpoint: ${{ secrets.AWS_ENDPOINT }}
|
||||
aws_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
|
||||
aws_bucket: ${{ secrets.AWS_BUCKET }}
|
||||
source_dir: bun
|
||||
destination_dir: releases/canary
|
||||
- name: Announce on Discord
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.BUN_DISCORD_GITHUB_CHANNEL_WEBHOOK }}
|
||||
nodetail: true
|
||||
color: "#1F6FEB"
|
||||
title: "New Bun Canary available"
|
||||
url: https://github.com/oven-sh/bun/commit/${{ github.sha }}
|
||||
description: |
|
||||
A new canary build of Bun has been automatically uploaded. To upgrade, run:
|
||||
```sh
|
||||
bun upgrade --canary
|
||||
# bun upgrade --stable <- to downgrade
|
||||
```
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -145,3 +145,4 @@ zig-cache
|
||||
zig-out
|
||||
test/node.js/upstream
|
||||
.zig-cache
|
||||
scripts/env.local
|
||||
|
||||
4
.gitmodules
vendored
4
.gitmodules
vendored
@@ -82,3 +82,7 @@ url = https://github.com/oven-sh/zig
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "src/deps/libdeflate"]
|
||||
path = src/deps/libdeflate
|
||||
url = https://github.com/ebiggers/libdeflate
|
||||
ignore = "dirty"
|
||||
|
||||
4
.lldbinit
Normal file
4
.lldbinit
Normal file
@@ -0,0 +1,4 @@
|
||||
command script import src/deps/zig/tools/lldb_pretty_printers.py
|
||||
command script import src/bun.js/WebKit/Tools/lldb/lldb_webkit.py
|
||||
|
||||
# type summary add --summary-string "${var} | inner=${var[0-30]}, source=${var[33-64]}, tag=${var[31-32]}" "unsigned long"
|
||||
122
.vscode/launch.json
generated
vendored
122
.vscode/launch.json
generated
vendored
@@ -17,6 +17,8 @@
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "1",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
@@ -32,7 +34,6 @@
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "1",
|
||||
"BUN_DEBUG_FileReader": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
@@ -53,6 +54,7 @@
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
@@ -67,6 +69,7 @@
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "0",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
@@ -81,6 +84,7 @@
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
@@ -95,6 +99,7 @@
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
@@ -109,6 +114,7 @@
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
"BUN_INSPECT": "ws://localhost:0/?wait=1",
|
||||
},
|
||||
@@ -129,6 +135,7 @@
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
"BUN_INSPECT": "ws://localhost:0/?break=1",
|
||||
},
|
||||
@@ -150,7 +157,6 @@
|
||||
"env": {
|
||||
"FORCE_COLOR": "0",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_EventLoop": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
@@ -262,6 +268,7 @@
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
@@ -276,6 +283,7 @@
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
@@ -289,7 +297,8 @@
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "0",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
@@ -304,6 +313,7 @@
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
@@ -318,6 +328,7 @@
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
@@ -332,6 +343,7 @@
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
"BUN_INSPECT": "ws://localhost:0/?wait=1",
|
||||
},
|
||||
@@ -352,6 +364,7 @@
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
"BUN_INSPECT": "ws://localhost:0/?break=1",
|
||||
},
|
||||
@@ -447,6 +460,11 @@
|
||||
"program": "node",
|
||||
"args": ["test/runner.node.mjs"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
},
|
||||
// Windows: bun test [file]
|
||||
@@ -473,7 +491,6 @@
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "1",
|
||||
@@ -500,19 +517,7 @@
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_EventLoop",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_uv",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_SYS",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_PipeWriter",
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
@@ -540,6 +545,10 @@
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "0",
|
||||
@@ -565,6 +574,10 @@
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "0",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "2",
|
||||
@@ -590,6 +603,10 @@
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "2",
|
||||
@@ -624,6 +641,10 @@
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "2",
|
||||
@@ -659,6 +680,10 @@
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "2",
|
||||
@@ -680,7 +705,10 @@
|
||||
"name": "FORCE_COLOR",
|
||||
"value": "1",
|
||||
},
|
||||
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "0",
|
||||
@@ -704,7 +732,7 @@
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "0",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
@@ -800,6 +828,10 @@
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "2",
|
||||
@@ -825,6 +857,10 @@
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "0",
|
||||
@@ -850,6 +886,10 @@
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "0",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "2",
|
||||
@@ -875,6 +915,10 @@
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "2",
|
||||
@@ -900,6 +944,10 @@
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "2",
|
||||
@@ -925,6 +973,10 @@
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "2",
|
||||
@@ -959,6 +1011,10 @@
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "2",
|
||||
@@ -1045,6 +1101,10 @@
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "0",
|
||||
@@ -1068,7 +1128,11 @@
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "0",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
@@ -1095,6 +1159,24 @@
|
||||
"program": "node",
|
||||
"args": ["test/runner.node.mjs"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "2",
|
||||
},
|
||||
],
|
||||
"console": "internalConsole",
|
||||
},
|
||||
],
|
||||
|
||||
9
.vscode/settings.json
vendored
9
.vscode/settings.json
vendored
@@ -15,6 +15,9 @@
|
||||
"src/bun.js/WebKit": true,
|
||||
"src/deps/*/**": true,
|
||||
"test/node.js/upstream": true,
|
||||
// This will fill up your whole search history.
|
||||
"test/js/node/test/fixtures": true,
|
||||
"test/js/node/test/common": true,
|
||||
},
|
||||
"search.followSymlinks": false,
|
||||
"search.useIgnoreFiles": true,
|
||||
@@ -42,8 +45,11 @@
|
||||
"editor.defaultFormatter": "ziglang.vscode-zig",
|
||||
},
|
||||
|
||||
// C++
|
||||
// lldb
|
||||
"lldb.launch.initCommands": ["command source ${workspaceFolder}/.lldbinit"],
|
||||
"lldb.verboseLogging": false,
|
||||
|
||||
// C++
|
||||
"cmake.configureOnOpen": false,
|
||||
"C_Cpp.errorSquiggles": "enabled",
|
||||
"[cpp]": {
|
||||
@@ -132,6 +138,7 @@
|
||||
},
|
||||
"files.associations": {
|
||||
"*.idl": "cpp",
|
||||
"array": "cpp",
|
||||
},
|
||||
"C_Cpp.files.exclude": {
|
||||
"**/.vscode": true,
|
||||
|
||||
284
CMakeLists.txt
284
CMakeLists.txt
@@ -3,8 +3,8 @@ cmake_policy(SET CMP0091 NEW)
|
||||
cmake_policy(SET CMP0067 NEW)
|
||||
|
||||
set(CMAKE_POLICY_DEFAULT_CMP0069 NEW)
|
||||
set(Bun_VERSION "1.1.18")
|
||||
set(WEBKIT_TAG 615e8585f96aa718b0f5158210259b83fe8440ea)
|
||||
set(Bun_VERSION "1.1.27")
|
||||
set(WEBKIT_TAG 21fc366db3de8f30dbb7f5997b9b9f5cf422ff1e)
|
||||
|
||||
set(BUN_WORKDIR "${CMAKE_CURRENT_BINARY_DIR}")
|
||||
message(STATUS "Configuring Bun ${Bun_VERSION} in ${BUN_WORKDIR}")
|
||||
@@ -15,14 +15,30 @@ set(CMAKE_C_STANDARD 17)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||
set(CMAKE_C_STANDARD_REQUIRED ON)
|
||||
|
||||
# Should not start with v
|
||||
option(ZIG_CACHE_DIR "Path to the Zig cache directory" "")
|
||||
|
||||
if(NOT ZIG_CACHE_DIR)
|
||||
SET(ZIG_CACHE_DIR "${BUN_WORKDIR}")
|
||||
cmake_path(APPEND ZIG_CACHE_DIR "zig-cache")
|
||||
endif()
|
||||
|
||||
set(LOCAL_ZIG_CACHE_DIR "${ZIG_CACHE_DIR}")
|
||||
set(GLOBAL_ZIG_CACHE_DIR "${ZIG_CACHE_DIR}")
|
||||
|
||||
cmake_path(APPEND LOCAL_ZIG_CACHE_DIR "local")
|
||||
cmake_path(APPEND GLOBAL_ZIG_CACHE_DIR "global")
|
||||
|
||||
# Used in process.version, process.versions.node, napi, and elsewhere
|
||||
set(REPORTED_NODEJS_VERSION "22.3.0")
|
||||
set(REPORTED_NODEJS_VERSION "22.6.0")
|
||||
|
||||
# Used in process.versions.modules and compared while loading V8 modules
|
||||
set(REPORTED_NODEJS_ABI_VERSION "127")
|
||||
|
||||
# WebKit uses -std=gnu++20 on non-macOS non-Windows
|
||||
# If we do not set this, it will crash at startup on the first memory allocation.
|
||||
if(NOT WIN32 AND NOT APPLE)
|
||||
set(CMAKE_CXX_EXTENSIONS ON)
|
||||
set(CMAKE_POSITION_INDEPENDENT_CODE FALSE)
|
||||
endif()
|
||||
|
||||
# --- Build Type ---
|
||||
@@ -39,6 +55,13 @@ else()
|
||||
message(STATUS "The CMake build type is: ${CMAKE_BUILD_TYPE}")
|
||||
endif()
|
||||
|
||||
if(WIN32 AND NOT CMAKE_CL_SHOWINCLUDES_PREFIX)
|
||||
# workaround until cmake fix is shipped https://github.com/ninja-build/ninja/issues/2280
|
||||
# './build/.ninja_deps' may need to be deleted, the bug is "Note: including file: ..." is saved
|
||||
# as part of some file paths
|
||||
set(CMAKE_CL_SHOWINCLUDES_PREFIX "Note: including file:")
|
||||
endif()
|
||||
|
||||
if(CMAKE_BUILD_TYPE STREQUAL "Debug")
|
||||
set(DEBUG ON)
|
||||
set(DEFAULT_ZIG_OPTIMIZE "Debug")
|
||||
@@ -51,11 +74,8 @@ elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
set(DEFAULT_ZIG_OPTIMIZE "ReleaseFast")
|
||||
|
||||
if(WIN32)
|
||||
# lld-link will strip it for you, so we can build directly to bun.exe
|
||||
# Debug symbols are in a separate file: bun.pdb
|
||||
set(bun "bun")
|
||||
|
||||
# TODO(@paperdave): Remove this
|
||||
# it is enabled for the time being to make sure to catch more bugs in the experimental windows builds
|
||||
set(DEFAULT_ZIG_OPTIMIZE "ReleaseSafe")
|
||||
else()
|
||||
if(ZIG_OPTIMIZE STREQUAL "Debug")
|
||||
@@ -68,7 +88,7 @@ endif()
|
||||
|
||||
# --- MacOS SDK ---
|
||||
if(APPLE AND DEFINED ENV{CI})
|
||||
set(CMAKE_OSX_DEPLOYMENT_TARGET "12.0")
|
||||
set(CMAKE_OSX_DEPLOYMENT_TARGET "13.0")
|
||||
endif()
|
||||
|
||||
if(APPLE AND NOT CMAKE_OSX_DEPLOYMENT_TARGET)
|
||||
@@ -112,7 +132,11 @@ endif()
|
||||
# we do some extra work afterwards to double-check, and we will rerun BUN_FIND_LLVM if the compiler did not match.
|
||||
#
|
||||
# If the user passes -DLLVM_PREFIX, most of this logic is skipped, but we still warn if invalid.
|
||||
set(LLVM_VERSION 16)
|
||||
if(WIN32 OR APPLE)
|
||||
set(LLVM_VERSION 18)
|
||||
else()
|
||||
set(LLVM_VERSION 16)
|
||||
endif()
|
||||
|
||||
macro(BUN_FIND_LLVM)
|
||||
find_program(
|
||||
@@ -146,11 +170,12 @@ macro(BUN_FIND_LLVM)
|
||||
PATHS ENV PATH ${PLATFORM_LLVM_SEARCH_PATHS}
|
||||
DOC "Path to LLVM ${LLVM_VERSION}'s llvm-strip binary"
|
||||
)
|
||||
|
||||
find_program(
|
||||
STRIP
|
||||
NAMES strip
|
||||
PATHS ENV PATH ${PLATFORM_LLVM_SEARCH_PATHS}
|
||||
DOC "Path to LLVM ${LLVM_VERSION}'s llvm-strip binary"
|
||||
DOC "Path to strip binary"
|
||||
)
|
||||
find_program(
|
||||
DSYMUTIL
|
||||
@@ -301,6 +326,7 @@ endif()
|
||||
# -- Build Flags --
|
||||
option(USE_STATIC_SQLITE "Statically link SQLite?" ${DEFAULT_ON_UNLESS_APPLE})
|
||||
option(USE_CUSTOM_ZLIB "Use Bun's recommended version of zlib" ON)
|
||||
option(USE_CUSTOM_LIBDEFLATE "Use Bun's recommended version of libdeflate" ON)
|
||||
option(USE_CUSTOM_BORINGSSL "Use Bun's recommended version of BoringSSL" ON)
|
||||
option(USE_CUSTOM_LIBARCHIVE "Use Bun's recommended version of libarchive" ON)
|
||||
option(USE_CUSTOM_MIMALLOC "Use Bun's recommended version of Mimalloc" ON)
|
||||
@@ -322,6 +348,11 @@ option(USE_STATIC_LIBATOMIC "Statically link libatomic, requires the presence of
|
||||
|
||||
option(USE_LTO "Enable Link-Time Optimization" ${DEFAULT_LTO})
|
||||
|
||||
if(APPLE AND USE_LTO)
|
||||
set(USE_LTO OFF)
|
||||
message(FATAL_ERROR "Link-Time Optimization is not supported on macOS because it requires -fuse-ld=lld and lld causes many segfaults on macOS (likely related to stack size)")
|
||||
endif()
|
||||
|
||||
if(WIN32 AND USE_LTO)
|
||||
set(CMAKE_LINKER_TYPE LLD)
|
||||
set(CMAKE_INTERPROCEDURAL_OPTIMIZATION OFF)
|
||||
@@ -448,6 +479,8 @@ elseif(NOT BUN_CPP_ONLY AND NOT BUN_LINK_ONLY AND NOT BUN_TIDY_ONLY AND NOT BUN_
|
||||
|
||||
message(STATUS "Installed Zig Compiler: ${ZIG_COMPILER}")
|
||||
set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS "build.zig")
|
||||
|
||||
message(STATUS "Using zig cache directory: ${ZIG_CACHE_DIR}")
|
||||
endif()
|
||||
|
||||
# Bun
|
||||
@@ -472,7 +505,7 @@ if(USE_UNIFIED_SOURCES)
|
||||
endif()
|
||||
|
||||
# CCache
|
||||
find_program(CCACHE_PROGRAM sccache)
|
||||
# find_program(CCACHE_PROGRAM sccache)
|
||||
find_program(CCACHE_PROGRAM ccache)
|
||||
|
||||
if(CCACHE_PROGRAM)
|
||||
@@ -634,20 +667,11 @@ file(GLOB BUN_CPP ${CONFIGURE_DEPENDS}
|
||||
"${BUN_SRC}/bun.js/bindings/sqlite/*.cpp"
|
||||
"${BUN_SRC}/bun.js/bindings/webcrypto/*.cpp"
|
||||
"${BUN_SRC}/bun.js/bindings/webcrypto/*/*.cpp"
|
||||
"${BUN_SRC}/bun.js/bindings/v8/*.cpp"
|
||||
"${BUN_SRC}/deps/picohttpparser/picohttpparser.c"
|
||||
)
|
||||
list(APPEND BUN_RAW_SOURCES ${BUN_CPP})
|
||||
|
||||
# -- Brotli --
|
||||
set(BROTLI_SRC "${CMAKE_CURRENT_SOURCE_DIR}/src/deps/brotli")
|
||||
file(GLOB BROTLI_FILES ${CONFIGURE_DEPENDS}
|
||||
"${BROTLI_SRC}/common/*.c"
|
||||
"${BROTLI_SRC}/enc/*.c"
|
||||
"${BROTLI_SRC}/dec/*.c"
|
||||
)
|
||||
list(APPEND BUN_RAW_SOURCES ${BROTLI_FILES})
|
||||
include_directories("${BUN_DEPS_DIR}/brotli/include")
|
||||
|
||||
# -- uSockets --
|
||||
set(USOCKETS_SRC "${CMAKE_CURRENT_SOURCE_DIR}/packages/bun-usockets/src")
|
||||
file(GLOB USOCKETS_FILES ${CONFIGURE_DEPENDS}
|
||||
@@ -684,6 +708,32 @@ add_custom_command(
|
||||
)
|
||||
list(APPEND BUN_RAW_SOURCES "${BUN_WORKDIR}/codegen/ZigGeneratedClasses.cpp")
|
||||
|
||||
if(NOT NO_CODEGEN)
|
||||
# --- ErrorCode Generator ---
|
||||
file(GLOB NODE_ERRORS_TS ${CONFIGURE_DEPENDS}
|
||||
"${BUN_SRC}/bun.js/bindings/ErrorCode.ts"
|
||||
)
|
||||
add_custom_command(
|
||||
OUTPUT "${BUN_WORKDIR}/codegen/ErrorCode+List.h" "${BUN_WORKDIR}/codegen/ErrorCode+Data.h" "${BUN_WORKDIR}/codegen/ErrorCode.zig"
|
||||
COMMAND ${BUN_EXECUTABLE} run "${BUN_CODEGEN_SRC}/generate-node-errors.ts" "${BUN_WORKDIR}/codegen"
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
MAIN_DEPENDENCY "${BUN_CODEGEN_SRC}/generate-node-errors.ts"
|
||||
DEPENDS ${NODE_ERRORS_TS}
|
||||
VERBATIM
|
||||
COMMENT "Generating ErrorCode.zig"
|
||||
)
|
||||
|
||||
# This needs something to force it to be regenerated
|
||||
WEBKIT_ADD_SOURCE_DEPENDENCIES(
|
||||
"${BUN_SRC}/bun.js/bindings/ErrorCode.cpp"
|
||||
"${BUN_WORKDIR}/codegen/ErrorCode+List.h"
|
||||
)
|
||||
WEBKIT_ADD_SOURCE_DEPENDENCIES(
|
||||
"${BUN_SRC}/bun.js/bindings/ErrorCode.h"
|
||||
"${BUN_WORKDIR}/codegen/ErrorCode+Data.h"
|
||||
)
|
||||
endif()
|
||||
|
||||
# --- JSSink Generator ---
|
||||
add_custom_command(
|
||||
OUTPUT "${BUN_WORKDIR}/codegen/JSSink.cpp"
|
||||
@@ -757,7 +807,7 @@ if(NOT NO_CODEGEN)
|
||||
OUTPUT ${BUN_IDENTIFIER_CACHE_OUT}
|
||||
MAIN_DEPENDENCY "${BUN_SRC}/js_lexer/identifier_data.zig"
|
||||
DEPENDS "${BUN_SRC}/js_lexer/identifier_cache.zig"
|
||||
COMMAND ${ZIG_COMPILER} run "--zig-lib-dir" "${ZIG_LIB_DIR}" "${BUN_SRC}/js_lexer/identifier_data.zig"
|
||||
COMMAND ${ZIG_COMPILER} run "--zig-lib-dir" "${ZIG_LIB_DIR}" "--cache-dir" "${LOCAL_ZIG_CACHE_DIR}" "--global-cache-dir" "${GLOBAL_ZIG_CACHE_DIR}" "${BUN_SRC}/js_lexer/identifier_data.zig"
|
||||
VERBATIM
|
||||
COMMENT "Building Identifier Cache"
|
||||
)
|
||||
@@ -778,6 +828,9 @@ if(NOT NO_CODEGEN)
|
||||
"${BUN_SRC}/js/thirdparty/*.ts"
|
||||
"${BUN_SRC}/js/internal/*.js"
|
||||
"${BUN_SRC}/js/internal/*.ts"
|
||||
"${BUN_SRC}/js/internal/cluster/*.ts"
|
||||
"${BUN_SRC}/js/internal/util/*.js"
|
||||
"${BUN_SRC}/js/internal/fs/*.ts"
|
||||
"${BUN_SRC}/js/node/*.js"
|
||||
"${BUN_SRC}/js/node/*.ts"
|
||||
"${BUN_SRC}/js/thirdparty/*.js"
|
||||
@@ -861,13 +914,24 @@ file(GLOB ZIG_FILES
|
||||
"${BUN_SRC}/*/*/*/*/*.zig"
|
||||
)
|
||||
|
||||
if(NOT BUN_ZIG_OBJ_FORMAT)
|
||||
# To use LLVM bitcode from Zig, more work needs to be done. Currently, an install of
|
||||
# LLVM 18.1.7 does not compatible with what bitcode Zig 0.13 outputs (has LLVM 18.1.7)
|
||||
# Change to "bc" to experiment, "Invalid record" means it is not valid output.
|
||||
set(BUN_ZIG_OBJ_FORMAT "obj")
|
||||
endif()
|
||||
|
||||
if(NOT BUN_ZIG_OBJ_DIR)
|
||||
set(BUN_ZIG_OBJ_DIR "${BUN_WORKDIR}/CMakeFiles")
|
||||
endif()
|
||||
|
||||
get_filename_component(BUN_ZIG_OBJ_DIR "${BUN_ZIG_OBJ_DIR}" REALPATH BASE_DIR "${CMAKE_BINARY_DIR}")
|
||||
|
||||
set(BUN_ZIG_OBJ "${BUN_ZIG_OBJ_DIR}/bun-zig.o")
|
||||
if(WIN32)
|
||||
set(BUN_ZIG_OBJ "${BUN_ZIG_OBJ_DIR}/bun-zig.o")
|
||||
else()
|
||||
set(BUN_ZIG_OBJ "${BUN_ZIG_OBJ_DIR}/bun-zig.o")
|
||||
endif()
|
||||
|
||||
set(USES_TERMINAL_NOT_IN_CI "")
|
||||
|
||||
@@ -882,6 +946,7 @@ if(NOT BUN_LINK_ONLY AND NOT BUN_CPP_ONLY)
|
||||
"${ZIG_COMPILER}" "build" "obj"
|
||||
"--zig-lib-dir" "${ZIG_LIB_DIR}"
|
||||
"--prefix" "${BUN_ZIG_OBJ_DIR}"
|
||||
"--verbose"
|
||||
"-Dgenerated-code=${BUN_WORKDIR}/codegen"
|
||||
"-freference-trace=10"
|
||||
"-Dversion=${Bun_VERSION}"
|
||||
@@ -891,10 +956,14 @@ if(NOT BUN_LINK_ONLY AND NOT BUN_CPP_ONLY)
|
||||
"-Dtarget=${ZIG_TARGET}"
|
||||
"-Denable_logs=${ENABLE_LOGS}"
|
||||
"-Dreported_nodejs_version=${REPORTED_NODEJS_VERSION}"
|
||||
"-Dobj_format=${BUN_ZIG_OBJ_FORMAT}"
|
||||
"--cache-dir" "${LOCAL_ZIG_CACHE_DIR}"
|
||||
"--global-cache-dir" "${GLOBAL_ZIG_CACHE_DIR}"
|
||||
DEPENDS
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/build.zig"
|
||||
"${ZIG_FILES}"
|
||||
"${BUN_WORKDIR}/codegen/ZigGeneratedClasses.zig"
|
||||
"${BUN_WORKDIR}/codegen/ErrorCode.zig"
|
||||
"${BUN_WORKDIR}/codegen/ResolvedSourceTag.zig"
|
||||
"${BUN_IDENTIFIER_CACHE_OUT}"
|
||||
"${BUN_SRC}/api/schema.zig"
|
||||
@@ -954,16 +1023,18 @@ set_target_properties(${bun} PROPERTIES
|
||||
VISIBILITY_INLINES_HIDDEN YES
|
||||
)
|
||||
|
||||
if(APPLE)
|
||||
add_compile_definitions("__DARWIN_NON_CANCELABLE=1")
|
||||
endif()
|
||||
|
||||
add_compile_definitions(
|
||||
|
||||
# TODO: are all of these variables strictly necessary?
|
||||
"_HAS_EXCEPTIONS=0"
|
||||
"LIBUS_USE_OPENSSL=1"
|
||||
"UWS_HTTPRESPONSE_NO_WRITEMARK=1"
|
||||
"LIBUS_USE_BORINGSSL=1"
|
||||
"WITH_BORINGSSL=1"
|
||||
"STATICALLY_LINKED_WITH_JavaScriptCore=1"
|
||||
"STATICALLY_LINKED_WITH_WTF=1"
|
||||
"STATICALLY_LINKED_WITH_BMALLOC=1"
|
||||
"BUILDING_WITH_CMAKE=1"
|
||||
"JSC_OBJC_API_ENABLED=0"
|
||||
@@ -974,11 +1045,24 @@ add_compile_definitions(
|
||||
"BUILDING_JSCONLY__"
|
||||
"BUN_DYNAMIC_JS_LOAD_PATH=\"${BUN_WORKDIR}/js\""
|
||||
"REPORTED_NODEJS_VERSION=\"${REPORTED_NODEJS_VERSION}\""
|
||||
"REPORTED_NODEJS_ABI_VERSION=${REPORTED_NODEJS_ABI_VERSION}"
|
||||
)
|
||||
|
||||
if(NOT ASSERT_ENABLED)
|
||||
if(APPLE)
|
||||
add_compile_definitions("_LIBCXX_ENABLE_ASSERTIONS=0")
|
||||
add_compile_definitions("_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_NONE")
|
||||
endif()
|
||||
|
||||
add_compile_definitions("NDEBUG=1")
|
||||
else()
|
||||
if(APPLE)
|
||||
add_compile_definitions("_LIBCXX_ENABLE_ASSERTIONS=1")
|
||||
add_compile_definitions("_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_DEBUG")
|
||||
elseif(CMAKE_SYSTEM_NAME STREQUAL "Linux")
|
||||
add_compile_definitions("_GLIBCXX_ASSERTIONS=1")
|
||||
endif()
|
||||
|
||||
add_compile_definitions("ASSERT_ENABLED=1")
|
||||
endif()
|
||||
|
||||
@@ -1048,12 +1132,25 @@ if(CMAKE_BUILD_TYPE STREQUAL "Debug")
|
||||
-Werror=uninitialized
|
||||
-Werror=conditional-uninitialized
|
||||
-Werror=suspicious-memaccess
|
||||
-Werror=int-conversion
|
||||
-Werror=nonnull
|
||||
-Werror=move
|
||||
-Werror=sometimes-uninitialized
|
||||
-Werror=unused
|
||||
-Wno-unused-function
|
||||
-Wno-nullability-completeness
|
||||
-Werror
|
||||
-fsanitize=null
|
||||
-fsanitize-recover=all
|
||||
-fsanitize=bounds
|
||||
-fsanitize=return
|
||||
-fsanitize=nullability-arg
|
||||
-fsanitize=nullability-assign
|
||||
-fsanitize=nullability-return
|
||||
-fsanitize=returns-nonnull-attribute
|
||||
-fsanitize=unreachable
|
||||
)
|
||||
target_link_libraries(${bun} PRIVATE -fsanitize=null)
|
||||
else()
|
||||
target_compile_options(${bun} PUBLIC /Od /Z7)
|
||||
endif()
|
||||
@@ -1064,7 +1161,7 @@ elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
|
||||
if(NOT WIN32)
|
||||
if(USE_LTO)
|
||||
list(APPEND LTO_FLAG "-flto=full" "-emit-llvm")
|
||||
list(APPEND LTO_FLAG "-flto=full" "-emit-llvm" "-fwhole-program-vtables" "-fforce-emit-vtables")
|
||||
endif()
|
||||
|
||||
# Leave -Werror=unused off in release builds so we avoid errors from being used in ASSERT
|
||||
@@ -1075,8 +1172,11 @@ elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
-Werror=uninitialized
|
||||
-Werror=conditional-uninitialized
|
||||
-Werror=suspicious-memaccess
|
||||
-Werror=int-conversion
|
||||
-Werror=nonnull
|
||||
-Werror=move
|
||||
-Werror=sometimes-uninitialized
|
||||
-Wno-nullability-completeness
|
||||
-Werror
|
||||
)
|
||||
else()
|
||||
@@ -1085,13 +1185,36 @@ elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
if(USE_LTO)
|
||||
target_compile_options(${bun} PUBLIC -Xclang -emit-llvm-bc)
|
||||
|
||||
# -emit-llvm seems to not be supported or under a different name on Windows.
|
||||
list(APPEND LTO_FLAG "-flto=full")
|
||||
list(APPEND LTO_LINK_FLAG "-flto=full")
|
||||
list(APPEND LTO_LINK_FLAG "/LTCG")
|
||||
list(APPEND LTO_LINK_FLAG "/OPT:REF")
|
||||
list(APPEND LTO_LINK_FLAG "/OPT:NOICF")
|
||||
endif()
|
||||
|
||||
target_compile_options(${bun} PUBLIC /O2 ${LTO_FLAG})
|
||||
target_link_options(${bun} PUBLIC ${LTO_LINK_FLAG} /DEBUG:FULL)
|
||||
target_compile_options(${bun} PUBLIC
|
||||
/O2
|
||||
${LTO_FLAG}
|
||||
/Gy
|
||||
/Gw
|
||||
/GF
|
||||
/GA
|
||||
)
|
||||
target_link_options(${bun} PUBLIC
|
||||
${LTO_LINK_FLAG}
|
||||
/DEBUG:FULL
|
||||
|
||||
/delayload:ole32.dll
|
||||
/delayload:WINMM.dll
|
||||
/delayload:dbghelp.dll
|
||||
/delayload:VCRUNTIME140_1.dll
|
||||
|
||||
# libuv loads these two immediately, but for some reason it seems to still be slightly faster to delayload them
|
||||
/delayload:WS2_32.dll
|
||||
/delayload:WSOCK32.dll
|
||||
/delayload:ADVAPI32.dll
|
||||
/delayload:IPHLPAPI.dll
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
@@ -1109,6 +1232,11 @@ else()
|
||||
# On arm macOS, we can set it to a minimum of the M1 cpu set. this might be the default already.
|
||||
target_compile_options(${bun} PUBLIC "-mcpu=apple-m1")
|
||||
endif()
|
||||
|
||||
if(NOT WIN32 AND NOT APPLE AND ARCH STREQUAL "aarch64")
|
||||
# on arm64 linux, we set a minimum of armv8
|
||||
target_compile_options(${bun} PUBLIC -march=armv8-a+crc -mtune=ampere1)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
target_compile_options(${bun} PUBLIC -ferror-limit=${ERROR_LIMIT})
|
||||
@@ -1122,23 +1250,32 @@ if(WIN32)
|
||||
"BORINGSSL_NO_CXX=1" # lol
|
||||
)
|
||||
|
||||
# set_property(TARGET ${bun} PROPERTY MSVC_RUNTIME_LIBRARY "MultiThreaded")
|
||||
set_property(TARGET ${bun} PROPERTY MSVC_RUNTIME_LIBRARY "MultiThreadedDLL")
|
||||
# set_property(TARGET ${bun} PROPERTY MSVC_RUNTIME_LIBRARY "MultiThreaded$<$<CONFIG:Debug>:Debug>")
|
||||
set_property(TARGET ${bun} PROPERTY MSVC_RUNTIME_LIBRARY "MultiThreaded")
|
||||
|
||||
target_compile_options(${bun} PUBLIC "/EHsc" "/GR-" -Xclang -fno-c++-static-destructors)
|
||||
|
||||
target_compile_options(${bun} PUBLIC "/EHsc" "/GR-")
|
||||
target_link_options(${bun} PUBLIC "/STACK:0x1200000,0x100000" "/DEF:${BUN_SRC}/symbols.def" "/errorlimit:0")
|
||||
else()
|
||||
target_compile_options(${bun} PUBLIC
|
||||
-fPIC
|
||||
-mtune=${CPU_TARGET}
|
||||
-fconstexpr-steps=2542484
|
||||
-fconstexpr-depth=54
|
||||
-fno-exceptions
|
||||
-fno-asynchronous-unwind-tables
|
||||
-fno-unwind-tables
|
||||
-fno-c++-static-destructors
|
||||
-fvisibility=hidden
|
||||
-fvisibility-inlines-hidden
|
||||
-fno-rtti
|
||||
-fno-omit-frame-pointer
|
||||
-mno-omit-leaf-frame-pointer
|
||||
-fno-pic
|
||||
-fno-pie
|
||||
-faddrsig
|
||||
-ffile-prefix-map="${CMAKE_CURRENT_SOURCE_DIR}"=.
|
||||
-ffile-prefix-map="${BUN_DEPS_DIR}"=src/deps
|
||||
-ffile-prefix-map="${BUN_DEPS_OUT_DIR}"=src/deps
|
||||
)
|
||||
endif()
|
||||
|
||||
@@ -1148,17 +1285,18 @@ if(APPLE)
|
||||
target_link_options(${bun} PUBLIC "-Wl,-stack_size,0x1200000")
|
||||
target_link_options(${bun} PUBLIC "-exported_symbols_list" "${BUN_SRC}/symbols.txt")
|
||||
set_target_properties(${bun} PROPERTIES LINK_DEPENDS "${BUN_SRC}/symbols.txt")
|
||||
|
||||
target_link_options(${bun} PUBLIC "-fno-keep-static-consts")
|
||||
target_link_libraries(${bun} PRIVATE "resolv")
|
||||
endif()
|
||||
|
||||
if(UNIX AND NOT APPLE)
|
||||
target_link_options(${bun} PUBLIC
|
||||
"-fuse-ld=lld"
|
||||
"-static-libstdc++"
|
||||
"-static-libgcc"
|
||||
"-Wl,-z,now"
|
||||
-fuse-ld=lld-${LLVM_VERSION}
|
||||
-fno-pic
|
||||
-static-libstdc++
|
||||
-static-libgcc
|
||||
"-Wl,-no-pie"
|
||||
"-Wl,-icf=safe"
|
||||
"-Wl,--as-needed"
|
||||
"-Wl,--gc-sections"
|
||||
"-Wl,-z,stack-size=12800000"
|
||||
@@ -1187,6 +1325,8 @@ if(UNIX AND NOT APPLE)
|
||||
"-rdynamic"
|
||||
"-Wl,--dynamic-list=${BUN_SRC}/symbols.dyn"
|
||||
"-Wl,--version-script=${BUN_SRC}/linker.lds"
|
||||
-Wl,-z,lazy
|
||||
-Wl,-z,norelro
|
||||
)
|
||||
|
||||
target_link_libraries(${bun} PRIVATE "c")
|
||||
@@ -1220,12 +1360,16 @@ endif()
|
||||
|
||||
# --- Stripped Binary "bun"
|
||||
if(CMAKE_BUILD_TYPE STREQUAL "Release" AND NOT WIN32 AND NOT ASSERT_ENABLED)
|
||||
# add_custom_command(
|
||||
# TARGET ${bun}
|
||||
# POST_BUILD
|
||||
# COMMAND ${DSYMUTIL} -o ${BUN_WORKDIR}/bun.dSYM ${BUN_WORKDIR}/${bun}
|
||||
# COMMENT "Stripping Symbols"
|
||||
# )
|
||||
# if(CI AND APPLE)
|
||||
if(APPLE)
|
||||
add_custom_command(
|
||||
TARGET ${bun}
|
||||
POST_BUILD
|
||||
COMMAND ${DSYMUTIL} -o ${BUN_WORKDIR}/${bun}.dSYM ${BUN_WORKDIR}/${bun}
|
||||
COMMENT "Generating .dSYM"
|
||||
)
|
||||
endif()
|
||||
|
||||
add_custom_command(
|
||||
TARGET ${bun}
|
||||
POST_BUILD
|
||||
@@ -1295,6 +1439,19 @@ else()
|
||||
target_link_libraries(${bun} PRIVATE LibArchive::LibArchive)
|
||||
endif()
|
||||
|
||||
if(USE_CUSTOM_LIBDEFLATE)
|
||||
include_directories(${BUN_DEPS_DIR}/libdeflate)
|
||||
|
||||
if(WIN32)
|
||||
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/deflate.lib")
|
||||
else()
|
||||
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/libdeflate.a")
|
||||
endif()
|
||||
else()
|
||||
find_package(LibDeflate REQUIRED)
|
||||
target_link_libraries(${bun} PRIVATE LibDeflate::LibDeflate)
|
||||
endif()
|
||||
|
||||
if(USE_CUSTOM_MIMALLOC)
|
||||
include_directories(${BUN_DEPS_DIR}/mimalloc/include)
|
||||
|
||||
@@ -1393,6 +1550,11 @@ if(USE_STATIC_SQLITE)
|
||||
"SQLITE_ENABLE_JSON1=1"
|
||||
"SQLITE_ENABLE_MATH_FUNCTIONS=1"
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
target_compile_options(sqlite3 PRIVATE /MT /U_DLL)
|
||||
endif()
|
||||
|
||||
target_link_libraries(${bun} PRIVATE sqlite3)
|
||||
message(STATUS "Using static sqlite3")
|
||||
target_compile_definitions(${bun} PRIVATE "LAZY_LOAD_SQLITE=0")
|
||||
@@ -1401,6 +1563,24 @@ else()
|
||||
target_compile_definitions(${bun} PRIVATE "LAZY_LOAD_SQLITE=1")
|
||||
endif()
|
||||
|
||||
# -- Brotli --
|
||||
set(BROTLI_SRC "${CMAKE_CURRENT_SOURCE_DIR}/src/deps/brotli")
|
||||
file(GLOB BROTLI_FILES ${CONFIGURE_DEPENDS}
|
||||
"${BROTLI_SRC}/common/*.c"
|
||||
"${BROTLI_SRC}/enc/*.c"
|
||||
"${BROTLI_SRC}/dec/*.c"
|
||||
)
|
||||
add_library(brotli STATIC ${BROTLI_FILES})
|
||||
target_include_directories(brotli PRIVATE "${BROTLI_SRC}/include")
|
||||
target_compile_definitions(brotli PRIVATE "BROTLI_STATIC")
|
||||
|
||||
if(WIN32)
|
||||
target_compile_options(brotli PRIVATE /MT /U_DLL)
|
||||
endif()
|
||||
|
||||
target_link_libraries(${bun} PRIVATE brotli)
|
||||
include_directories("${BUN_DEPS_DIR}/brotli/include")
|
||||
|
||||
if(USE_CUSTOM_LSHPACK)
|
||||
include_directories(${BUN_DEPS_DIR}/ls-hpack)
|
||||
|
||||
@@ -1418,9 +1598,11 @@ endif()
|
||||
if(NOT WIN32)
|
||||
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libWTF.a")
|
||||
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libJavaScriptCore.a")
|
||||
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libbmalloc.a")
|
||||
|
||||
if(NOT APPLE OR EXISTS "${WEBKIT_LIB_DIR}/libbmalloc.a")
|
||||
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libbmalloc.a")
|
||||
endif()
|
||||
else()
|
||||
target_link_options(${bun} PRIVATE "-static")
|
||||
target_link_libraries(${bun} PRIVATE
|
||||
"${WEBKIT_LIB_DIR}/WTF.lib"
|
||||
"${WEBKIT_LIB_DIR}/JavaScriptCore.lib"
|
||||
@@ -1430,10 +1612,10 @@ else()
|
||||
winmm
|
||||
bcrypt
|
||||
ntdll
|
||||
ucrt
|
||||
userenv
|
||||
dbghelp
|
||||
wsock32 # ws2_32 required by TransmitFile aka sendfile on windows
|
||||
delayimp.lib
|
||||
)
|
||||
endif()
|
||||
|
||||
@@ -1451,12 +1633,14 @@ endif()
|
||||
|
||||
if(BUN_TIDY_ONLY)
|
||||
find_program(CLANG_TIDY_EXE NAMES "clang-tidy")
|
||||
set(CLANG_TIDY_COMMAND "${CLANG_TIDY_EXE}" "-checks=-*,clang-analyzer-*,-clang-analyzer-webkit.UncountedLambdaCapturesChecker" "--fix" "--fix-errors" "--format-style=webkit" "--warnings-as-errors=*")
|
||||
|
||||
# webkit ones are disabled disabled because it's noisy, e.g. for JavaScriptCore/Lookup.h
|
||||
set(CLANG_TIDY_COMMAND "${CLANG_TIDY_EXE}" "-checks=-*,clang-analyzer-*,-clang-analyzer-webkit.UncountedLambdaCapturesChecker,-clang-analyzer-optin.core.EnumCastOutOfRange,-clang-analyzer-webkit.RefCntblBaseVirtualDtor" "--fix" "--fix-errors" "--format-style=webkit" "--warnings-as-errors=*")
|
||||
set_target_properties(${bun} PROPERTIES CXX_CLANG_TIDY "${CLANG_TIDY_COMMAND}")
|
||||
endif()
|
||||
|
||||
if(BUN_TIDY_ONLY_EXTRA)
|
||||
find_program(CLANG_TIDY_EXE NAMES "clang-tidy")
|
||||
set(CLANG_TIDY_COMMAND "${CLANG_TIDY_EXE}" "-checks=-*,clang-analyzer-*,performance-*,-clang-analyzer-webkit.UncountedLambdaCapturesChecker" "--fix" "--fix-errors" "--format-style=webkit" "--warnings-as-errors=*")
|
||||
set(CLANG_TIDY_COMMAND "${CLANG_TIDY_EXE}" "-checks=-*,clang-analyzer-*,performance-*,-clang-analyzer-webkit.UncountedLambdaCapturesChecker,-clang-analyzer-optin.core.EnumCastOutOfRange,-clang-analyzer-webkit.RefCntblBaseVirtualDtor" "--fix" "--fix-errors" "--format-style=webkit" "--warnings-as-errors=*")
|
||||
set_target_properties(${bun} PROPERTIES CXX_CLANG_TIDY "${CLANG_TIDY_COMMAND}")
|
||||
endif()
|
||||
|
||||
@@ -2,6 +2,11 @@ Configuring a development environment for Bun can take 10-30 minutes depending o
|
||||
|
||||
If you are using Windows, please refer to [this guide](/docs/project/building-windows)
|
||||
|
||||
{% details summary="For Ubuntu users" %}
|
||||
TL;DR: Ubuntu 22.04 is suggested.
|
||||
Bun currently requires `glibc >=2.32` in development which means if you're on Ubuntu 20.04 (glibc == 2.31), you may likely meet `error: undefined symbol: __libc_single_threaded `. You need to take extra configurations. Also, according to this [issue](https://github.com/llvm/llvm-project/issues/97314), LLVM 16 is no longer maintained on Ubuntu 24.04 (noble). And instead, you might want `brew` to install LLVM 16 for your Ubuntu 24.04.
|
||||
{% /details %}
|
||||
|
||||
## Install Dependencies
|
||||
|
||||
Using your system's package manager, install Bun's dependencies:
|
||||
@@ -107,7 +112,7 @@ $ export PATH="$PATH:/usr/lib/llvm16/bin"
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
> ⚠️ Ubuntu distributions may require installation of the C++ standard library independently. See the [troubleshooting section](#span-file-not-found-on-ubuntu) for more information.
|
||||
> ⚠️ Ubuntu distributions (<= 20.04) may require installation of the C++ standard library independently. See the [troubleshooting section](#span-file-not-found-on-ubuntu) for more information.
|
||||
|
||||
## Building Bun
|
||||
|
||||
@@ -311,3 +316,12 @@ $ bun setup -DUSE_STATIC_LIBATOMIC=OFF
|
||||
```
|
||||
|
||||
The built version of Bun may not work on other systems if compiled this way.
|
||||
|
||||
## ccache conflicts with building TinyCC on macOS
|
||||
|
||||
If you run into issues with `ccache` when building TinyCC, try reinstalling ccache
|
||||
|
||||
```bash
|
||||
brew uninstall ccache
|
||||
brew install ccache
|
||||
```
|
||||
|
||||
72
Dockerfile
72
Dockerfile
@@ -52,11 +52,8 @@ ENV CI 1
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
ENV BUILDARCH=${BUILDARCH}
|
||||
ENV BUN_DEPS_OUT_DIR=${BUN_DEPS_OUT_DIR}
|
||||
ENV USE_LTO 1
|
||||
|
||||
ENV CXX=clang++-${LLVM_VERSION}
|
||||
ENV CC=clang-${LLVM_VERSION}
|
||||
ENV AR=/usr/bin/llvm-ar-${LLVM_VERSION}
|
||||
ENV LD=lld-${LLVM_VERSION}
|
||||
ENV LC_CTYPE=en_US.UTF-8
|
||||
ENV LC_ALL=en_US.UTF-8
|
||||
|
||||
@@ -93,6 +90,8 @@ RUN install_packages \
|
||||
clangd-${LLVM_VERSION} \
|
||||
libc++-${LLVM_VERSION}-dev \
|
||||
libc++abi-${LLVM_VERSION}-dev \
|
||||
llvm-${LLVM_VERSION}-runtime \
|
||||
llvm-${LLVM_VERSION}-dev \
|
||||
make \
|
||||
cmake \
|
||||
ninja-build \
|
||||
@@ -119,6 +118,15 @@ RUN install_packages \
|
||||
&& ln -sf /usr/bin/lldb-${LLVM_VERSION} /usr/bin/lldb \
|
||||
&& ln -sf /usr/bin/clangd-${LLVM_VERSION} /usr/bin/clangd \
|
||||
&& ln -sf /usr/bin/llvm-ar-${LLVM_VERSION} /usr/bin/llvm-ar \
|
||||
&& ln -sf /usr/bin/ld.lld /usr/bin/ld \
|
||||
&& ln -sf /usr/bin/llvm-ranlib-${LLVM_VERSION} /usr/bin/ranlib \
|
||||
&& ln -sf /usr/bin/clang /usr/bin/cc \
|
||||
&& ln -sf /usr/bin/clang /usr/bin/c89 \
|
||||
&& ln -sf /usr/bin/clang /usr/bin/c99 \
|
||||
&& ln -sf /usr/bin/clang++ /usr/bin/c++ \
|
||||
&& ln -sf /usr/bin/clang++ /usr/bin/g++ \
|
||||
&& ln -sf /usr/bin/llvm-ar /usr/bin/ar \
|
||||
&& ln -sf /usr/bin/clang /usr/bin/gcc \
|
||||
&& arch="$(dpkg --print-architecture)" \
|
||||
&& case "${arch##*-}" in \
|
||||
amd64) variant="x64";; \
|
||||
@@ -131,6 +139,7 @@ RUN install_packages \
|
||||
&& ln -s /usr/bin/bun /usr/bin/bunx \
|
||||
&& rm -rf bun-linux-${variant} bun-linux-${variant}.zip \
|
||||
&& mkdir -p ${BUN_DIR} ${BUN_DEPS_OUT_DIR}
|
||||
|
||||
# && if [ -n "${SCCACHE_BUCKET}" ]; then \
|
||||
# echo "Setting up sccache" \
|
||||
# && wget https://github.com/mozilla/sccache/releases/download/v0.5.4/sccache-v0.5.4-${BUILD_MACHINE_ARCH}-unknown-linux-musl.tar.gz \
|
||||
@@ -167,13 +176,14 @@ ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/c-ares ${BUN_DIR}/src/deps/c-ares
|
||||
COPY scripts ${BUN_DIR}/scripts
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
cd $BUN_DIR \
|
||||
&& make c-ares \
|
||||
&& rm -rf ${BUN_DIR}/src/deps/c-ares ${BUN_DIR}/Makefile
|
||||
&& bash ./scripts/build-cares.sh \
|
||||
&& rm -rf ${BUN_DIR}/src/deps/c-ares ${BUN_DIR}/Makefile ${BUN_DIR}/scripts
|
||||
|
||||
FROM bun-base as lolhtml
|
||||
|
||||
@@ -204,13 +214,14 @@ ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/mimalloc ${BUN_DIR}/src/deps/mimalloc
|
||||
COPY scripts ${BUN_DIR}/scripts
|
||||
|
||||
ARG CCACHE_DIR=/ccache
|
||||
ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
cd ${BUN_DIR} \
|
||||
&& make mimalloc \
|
||||
&& bash ./scripts/build-mimalloc.sh \
|
||||
&& rm -rf src/deps/mimalloc Makefile
|
||||
|
||||
FROM bun-base as mimalloc-debug
|
||||
@@ -240,14 +251,38 @@ ARG CCACHE_DIR=/ccache
|
||||
ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt
|
||||
COPY scripts ${BUN_DIR}/scripts
|
||||
COPY src/deps/zlib ${BUN_DIR}/src/deps/zlib
|
||||
COPY package.json bun.lockb Makefile .gitmodules ${BUN_DIR}/
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
cd $BUN_DIR \
|
||||
&& make zlib \
|
||||
&& rm -rf src/deps/zlib Makefile
|
||||
&& bash ./scripts/build-zlib.sh && rm -rf src/deps/zlib scripts
|
||||
|
||||
|
||||
FROM bun-base as libdeflate
|
||||
|
||||
ARG BUN_DIR
|
||||
ARG CPU_TARGET
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
ARG CCACHE_DIR=/ccache
|
||||
ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt
|
||||
COPY scripts ${BUN_DIR}/scripts
|
||||
COPY src/deps/libdeflate ${BUN_DIR}/src/deps/libdeflate
|
||||
COPY package.json bun.lockb Makefile .gitmodules ${BUN_DIR}/
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
cd $BUN_DIR \
|
||||
&& bash ./scripts/build-libdeflate.sh && rm -rf src/deps/libdeflate scripts
|
||||
|
||||
|
||||
FROM bun-base as libarchive
|
||||
|
||||
@@ -286,6 +321,7 @@ ARG CPU_TARGET
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY scripts ${BUN_DIR}/scripts
|
||||
COPY src/deps/boringssl ${BUN_DIR}/src/deps/boringssl
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
@@ -295,7 +331,7 @@ ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
cd ${BUN_DIR} \
|
||||
&& make boringssl \
|
||||
&& bash ./scripts/build-boringssl.sh \
|
||||
&& rm -rf src/deps/boringssl Makefile
|
||||
|
||||
|
||||
@@ -311,12 +347,14 @@ ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/zstd ${BUN_DIR}/src/deps/zstd
|
||||
COPY scripts ${BUN_DIR}/scripts
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
cd $BUN_DIR \
|
||||
&& make zstd
|
||||
&& bash ./scripts/build-zstd.sh \
|
||||
&& rm -rf src/deps/zstd scripts
|
||||
|
||||
FROM bun-base as ls-hpack
|
||||
|
||||
@@ -330,12 +368,14 @@ ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/ls-hpack ${BUN_DIR}/src/deps/ls-hpack
|
||||
COPY scripts ${BUN_DIR}/scripts
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
cd $BUN_DIR \
|
||||
&& make lshpack
|
||||
&& bash ./scripts/build-lshpack.sh \
|
||||
&& rm -rf src/deps/ls-hpack scripts
|
||||
|
||||
FROM bun-base-with-zig as bun-identifier-cache
|
||||
|
||||
@@ -393,6 +433,9 @@ COPY src ${BUN_DIR}/src
|
||||
COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt
|
||||
COPY src/deps/boringssl/include ${BUN_DIR}/src/deps/boringssl/include
|
||||
|
||||
# for uWebSockets
|
||||
COPY src/deps/libdeflate ${BUN_DIR}/src/deps/libdeflate
|
||||
|
||||
ARG CCACHE_DIR=/ccache
|
||||
ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
@@ -491,11 +534,13 @@ RUN mkdir -p build bun-webkit
|
||||
|
||||
# lol
|
||||
COPY src/bun.js/bindings/sqlite/sqlite3.c ${BUN_DIR}/src/bun.js/bindings/sqlite/sqlite3.c
|
||||
COPY src/deps/brotli ${BUN_DIR}/src/deps/brotli
|
||||
|
||||
COPY src/symbols.dyn src/linker.lds ${BUN_DIR}/src/
|
||||
|
||||
COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt
|
||||
COPY --from=zlib ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=libdeflate ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=libarchive ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=boringssl ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=lolhtml ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
@@ -505,7 +550,8 @@ COPY --from=tinycc ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=c-ares ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=ls-hpack ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=bun-compile-zig-obj /tmp/bun-zig.o ${BUN_DIR}/build/bun-zig.o
|
||||
COPY --from=bun-cpp-objects ${BUN_DIR}/build/bun-cpp-objects.a ${BUN_DIR}/build/bun-cpp-objects.a
|
||||
COPY --from=bun-cpp-objects ${BUN_DIR}/build/*.a ${BUN_DIR}/build/
|
||||
COPY --from=bun-cpp-objects ${BUN_DIR}/build/*.o ${BUN_DIR}/build/
|
||||
COPY --from=bun-cpp-objects ${BUN_DIR}/bun-webkit/lib ${BUN_DIR}/bun-webkit/lib
|
||||
|
||||
WORKDIR $BUN_DIR/build
|
||||
|
||||
@@ -34,6 +34,8 @@ Bun statically links these libraries:
|
||||
| [`c-ares`](https://github.com/c-ares/c-ares) | MIT licensed |
|
||||
| [`libicu`](https://github.com/unicode-org/icu) 72 | [license here](https://github.com/unicode-org/icu/blob/main/icu4c/LICENSE) |
|
||||
| [`libbase64`](https://github.com/aklomp/base64/blob/master/LICENSE) | BSD 2-Clause |
|
||||
| [`libuv`](https://github.com/libuv/libuv) (on Windows) | MIT |
|
||||
| [`libdeflate`](https://github.com/ebiggers/libdeflate) | MIT |
|
||||
| A fork of [`uWebsockets`](https://github.com/jarred-sumner/uwebsockets) | Apache 2.0 licensed |
|
||||
| Parts of [Tigerbeetle's IO code](https://github.com/tigerbeetle/tigerbeetle/blob/532c8b70b9142c17e07737ab6d3da68d7500cbca/src/io/windows.zig#L1) | Apache 2.0 licensed |
|
||||
|
||||
|
||||
17
Makefile
17
Makefile
@@ -26,8 +26,11 @@ ifeq ($(ARCH_NAME_RAW),arm64)
|
||||
ARCH_NAME = aarch64
|
||||
DOCKER_BUILDARCH = arm64
|
||||
BREW_PREFIX_PATH = /opt/homebrew
|
||||
DEFAULT_MIN_MACOS_VERSION = 11.0
|
||||
DEFAULT_MIN_MACOS_VERSION = 13.0
|
||||
MARCH_NATIVE = -mtune=$(CPU_TARGET)
|
||||
ifeq ($(OS_NAME),linux)
|
||||
MARCH_NATIVE = -march=armv8-a+crc -mtune=ampere1
|
||||
endif
|
||||
else
|
||||
ARCH_NAME = x64
|
||||
DOCKER_BUILDARCH = amd64
|
||||
@@ -154,7 +157,12 @@ CMAKE_FLAGS_WITHOUT_RELEASE = -DCMAKE_C_COMPILER=$(CC) \
|
||||
-DCMAKE_OSX_DEPLOYMENT_TARGET=$(MIN_MACOS_VERSION) \
|
||||
$(CMAKE_CXX_COMPILER_LAUNCHER_FLAG) \
|
||||
-DCMAKE_AR=$(AR) \
|
||||
-DCMAKE_RANLIB=$(which llvm-16-ranlib 2>/dev/null || which llvm-ranlib 2>/dev/null)
|
||||
-DCMAKE_RANLIB=$(which llvm-16-ranlib 2>/dev/null || which llvm-ranlib 2>/dev/null) \
|
||||
-DCMAKE_CXX_STANDARD=20 \
|
||||
-DCMAKE_C_STANDARD=17 \
|
||||
-DCMAKE_CXX_STANDARD_REQUIRED=ON \
|
||||
-DCMAKE_C_STANDARD_REQUIRED=ON \
|
||||
-DCMAKE_CXX_EXTENSIONS=ON
|
||||
|
||||
|
||||
|
||||
@@ -181,8 +189,8 @@ endif
|
||||
|
||||
OPTIMIZATION_LEVEL=-O3 $(MARCH_NATIVE)
|
||||
DEBUG_OPTIMIZATION_LEVEL= -O1 $(MARCH_NATIVE) -gdwarf-4
|
||||
CFLAGS_WITHOUT_MARCH = $(MACOS_MIN_FLAG) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden
|
||||
BUN_CFLAGS = $(MACOS_MIN_FLAG) $(MARCH_NATIVE) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden
|
||||
CFLAGS_WITHOUT_MARCH = $(MACOS_MIN_FLAG) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden -mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -fno-asynchronous-unwind-tables -fno-unwind-tables -fno-pie -fno-pic
|
||||
BUN_CFLAGS = $(MACOS_MIN_FLAG) $(MARCH_NATIVE) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden -mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -fno-asynchronous-unwind-tables -fno-unwind-tables -fno-pie -fno-pic
|
||||
BUN_TMP_DIR := /tmp/make-bun
|
||||
CFLAGS=$(CFLAGS_WITHOUT_MARCH) $(MARCH_NATIVE)
|
||||
|
||||
@@ -1301,6 +1309,7 @@ jsc-build-mac-compile-debug:
|
||||
-DCMAKE_BUILD_TYPE=Debug \
|
||||
-DUSE_THIN_ARCHIVES=OFF \
|
||||
-DENABLE_FTL_JIT=ON \
|
||||
-DENABLE_MALLOC_HEAP_BREAKDOWN=ON \
|
||||
-DCMAKE_EXPORT_COMPILE_COMMANDS=ON \
|
||||
-DUSE_BUN_JSC_ADDITIONS=ON \
|
||||
-DENABLE_BUN_SKIP_FAILING_ASSERTIONS=ON \
|
||||
|
||||
BIN
bench/bun.lockb
BIN
bench/bun.lockb
Binary file not shown.
@@ -1,20 +1,43 @@
|
||||
import { run, bench } from "mitata";
|
||||
import { run, bench, group } from "mitata";
|
||||
import { gzipSync, gunzipSync } from "bun";
|
||||
|
||||
const data = new TextEncoder().encode("Hello World!".repeat(9999));
|
||||
const data = await Bun.file(require.resolve("@babel/standalone/babel.min.js")).arrayBuffer();
|
||||
|
||||
const compressed = gzipSync(data);
|
||||
|
||||
bench(`roundtrip - "Hello World!".repeat(9999))`, () => {
|
||||
gunzipSync(gzipSync(data));
|
||||
const libraries = ["zlib"];
|
||||
if (Bun.semver.satisfies(Bun.version.replaceAll("-debug", ""), ">=1.1.21")) {
|
||||
libraries.push("libdeflate");
|
||||
}
|
||||
const options = { library: undefined };
|
||||
const benchFn = (name, fn) => {
|
||||
if (libraries.length > 1) {
|
||||
group(name, () => {
|
||||
for (const library of libraries) {
|
||||
bench(library, () => {
|
||||
options.library = library;
|
||||
fn();
|
||||
});
|
||||
}
|
||||
});
|
||||
} else {
|
||||
options.library = libraries[0];
|
||||
bench(name, () => {
|
||||
fn();
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
benchFn(`roundtrip - @babel/standalone/babel.min.js`, () => {
|
||||
gunzipSync(gzipSync(data, options), options);
|
||||
});
|
||||
|
||||
bench(`gzipSync("Hello World!".repeat(9999)))`, () => {
|
||||
gzipSync(data);
|
||||
benchFn(`gzipSync(@babel/standalone/babel.min.js`, () => {
|
||||
gzipSync(data, options);
|
||||
});
|
||||
|
||||
bench(`gunzipSync("Hello World!".repeat(9999)))`, () => {
|
||||
gunzipSync(compressed);
|
||||
benchFn(`gunzipSync(@babel/standalone/babel.min.js`, () => {
|
||||
gunzipSync(compressed, options);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
BIN
bench/gzip/bun.lockb
Executable file
BIN
bench/gzip/bun.lockb
Executable file
Binary file not shown.
@@ -1,19 +1,22 @@
|
||||
import { run, bench } from "mitata";
|
||||
import { gzipSync, gunzipSync } from "zlib";
|
||||
import { createRequire } from "module";
|
||||
import { readFileSync } from "fs";
|
||||
|
||||
const data = new TextEncoder().encode("Hello World!".repeat(9999));
|
||||
const require = createRequire(import.meta.url);
|
||||
const data = readFileSync(require.resolve("@babel/standalone/babel.min.js"));
|
||||
|
||||
const compressed = gzipSync(data);
|
||||
|
||||
bench(`roundtrip - "Hello World!".repeat(9999))`, () => {
|
||||
bench(`roundtrip - @babel/standalone/babel.min.js)`, () => {
|
||||
gunzipSync(gzipSync(data));
|
||||
});
|
||||
|
||||
bench(`gzipSync("Hello World!".repeat(9999)))`, () => {
|
||||
bench(`gzipSync(@babel/standalone/babel.min.js))`, () => {
|
||||
gzipSync(data);
|
||||
});
|
||||
|
||||
bench(`gunzipSync("Hello World!".repeat(9999)))`, () => {
|
||||
bench(`gunzipSync(@babel/standalone/babel.min.js))`, () => {
|
||||
gunzipSync(compressed);
|
||||
});
|
||||
|
||||
|
||||
@@ -7,5 +7,8 @@
|
||||
"bench:node": "$NODE node.mjs",
|
||||
"bench:deno": "$DENO run -A --unstable deno.js",
|
||||
"bench": "bun run bench:bun && bun run bench:node && bun run bench:deno"
|
||||
},
|
||||
"dependencies": {
|
||||
"@babel/standalone": "7.24.10"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
"dependencies": {
|
||||
"@babel/core": "^7.16.10",
|
||||
"@babel/preset-react": "^7.16.7",
|
||||
"@babel/standalone": "^7.24.7",
|
||||
"@swc/core": "^1.2.133",
|
||||
"benchmark": "^2.1.4",
|
||||
"braces": "^3.0.2",
|
||||
|
||||
@@ -6,6 +6,7 @@ const App = () => (
|
||||
<html>
|
||||
<body>
|
||||
<h1>Hello World</h1>
|
||||
<p>This is an example.</p>
|
||||
</body>
|
||||
</html>
|
||||
);
|
||||
|
||||
55
bench/snippets/text-decoder-stream.mjs
Normal file
55
bench/snippets/text-decoder-stream.mjs
Normal file
@@ -0,0 +1,55 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
|
||||
const latin1 = `hello hello hello!!!! `.repeat(10240);
|
||||
|
||||
function create(src) {
|
||||
function split(str, chunkSize) {
|
||||
let chunkedHTML = [];
|
||||
let html = str;
|
||||
const encoder = new TextEncoder();
|
||||
while (html.length > 0) {
|
||||
chunkedHTML.push(encoder.encode(html.slice(0, chunkSize)));
|
||||
html = html.slice(chunkSize);
|
||||
}
|
||||
return chunkedHTML;
|
||||
}
|
||||
|
||||
async function runBench(chunks) {
|
||||
const decoder = new TextDecoderStream();
|
||||
const stream = new ReadableStream({
|
||||
pull(controller) {
|
||||
for (let chunk of chunks) {
|
||||
controller.enqueue(chunk);
|
||||
}
|
||||
controller.close();
|
||||
},
|
||||
}).pipeThrough(decoder);
|
||||
for (let reader = stream.getReader(); ; ) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// if (new TextDecoder().decode(await runBench(oneKB)) !== src) {
|
||||
// throw new Error("Benchmark failed");
|
||||
// }
|
||||
const sizes = [16 * 1024, 64 * 1024, 256 * 1024];
|
||||
for (const chunkSize of sizes) {
|
||||
const text = split(src, chunkSize);
|
||||
bench(
|
||||
`${Math.round(src.length / 1024)} KB of text in ${Math.round(chunkSize / 1024) > 0 ? Math.round(chunkSize / 1024) : (chunkSize / 1024).toFixed(2)} KB chunks`,
|
||||
async () => {
|
||||
await runBench(text);
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
create(latin1);
|
||||
create(
|
||||
// bun's old readme was extremely long
|
||||
await fetch("https://web.archive.org/web/20230119110956/https://github.com/oven-sh/bun").then(res => res.text()),
|
||||
);
|
||||
|
||||
await run();
|
||||
49
bench/snippets/text-encoder-stream.mjs
Normal file
49
bench/snippets/text-encoder-stream.mjs
Normal file
@@ -0,0 +1,49 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
|
||||
const latin1 = `hello hello hello!!!! `.repeat(10240);
|
||||
|
||||
function create(src) {
|
||||
function split(str, chunkSize) {
|
||||
let chunkedHTML = [];
|
||||
let html = str;
|
||||
while (html.length > 0) {
|
||||
chunkedHTML.push(html.slice(0, chunkSize));
|
||||
html = html.slice(chunkSize);
|
||||
}
|
||||
return chunkedHTML;
|
||||
}
|
||||
|
||||
async function runBench(chunks) {
|
||||
const encoderStream = new TextEncoderStream();
|
||||
const stream = new ReadableStream({
|
||||
pull(controller) {
|
||||
for (let chunk of chunks) {
|
||||
controller.enqueue(chunk);
|
||||
}
|
||||
controller.close();
|
||||
},
|
||||
}).pipeThrough(encoderStream);
|
||||
return await new Response(stream).bytes();
|
||||
}
|
||||
|
||||
// if (new TextDecoder().decode(await runBench(oneKB)) !== src) {
|
||||
// throw new Error("Benchmark failed");
|
||||
// }
|
||||
const sizes = [1024, 16 * 1024, 64 * 1024, 256 * 1024];
|
||||
for (const chunkSize of sizes) {
|
||||
const text = split(src, chunkSize);
|
||||
bench(
|
||||
`${Math.round(src.length / 1024)} KB of text in ${Math.round(chunkSize / 1024) > 0 ? Math.round(chunkSize / 1024) : (chunkSize / 1024).toFixed(2)} KB chunks`,
|
||||
async () => {
|
||||
await runBench(text);
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
create(latin1);
|
||||
create(
|
||||
// bun's old readme was extremely long
|
||||
await fetch("https://web.archive.org/web/20230119110956/https://github.com/oven-sh/bun").then(res => res.text()),
|
||||
);
|
||||
|
||||
await run();
|
||||
14
bench/snippets/transpiler-2.mjs
Normal file
14
bench/snippets/transpiler-2.mjs
Normal file
@@ -0,0 +1,14 @@
|
||||
import { bench, run } from "mitata";
|
||||
import { join } from "path";
|
||||
|
||||
const code = require("fs").readFileSync(
|
||||
process.argv[2] || join(import.meta.dir, "../node_modules/@babel/standalone/babel.min.js"),
|
||||
);
|
||||
|
||||
const transpiler = new Bun.Transpiler({ minify: true });
|
||||
|
||||
bench("transformSync", () => {
|
||||
transpiler.transformSync(code);
|
||||
});
|
||||
|
||||
await run();
|
||||
31
bench/sqlite/better-sqlite3.mjs
Normal file
31
bench/sqlite/better-sqlite3.mjs
Normal file
@@ -0,0 +1,31 @@
|
||||
import { run, bench } from "mitata";
|
||||
import { createRequire } from "module";
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
const db = require("better-sqlite3")("./src/northwind.sqlite");
|
||||
|
||||
{
|
||||
const sql = db.prepare(`SELECT * FROM "Order"`);
|
||||
|
||||
bench('SELECT * FROM "Order"', () => {
|
||||
sql.all();
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
const sql = db.prepare(`SELECT * FROM "Product"`);
|
||||
|
||||
bench('SELECT * FROM "Product"', () => {
|
||||
sql.all();
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
const sql = db.prepare(`SELECT * FROM "OrderDetail"`);
|
||||
|
||||
bench('SELECT * FROM "OrderDetail"', () => {
|
||||
sql.all();
|
||||
});
|
||||
}
|
||||
|
||||
await run();
|
||||
@@ -1,8 +1,9 @@
|
||||
// Run `node --experimental-sqlite bench/sqlite/node.mjs` to run the script.
|
||||
// You will need `--experimental-sqlite` flag to run this script and node v22.5.0 or higher.
|
||||
import { run, bench } from "mitata";
|
||||
import { createRequire } from "module";
|
||||
import { DatabaseSync as Database } from "node:sqlite";
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
const db = require("better-sqlite3")("./src/northwind.sqlite");
|
||||
const db = new Database("./src/northwind.sqlite");
|
||||
|
||||
{
|
||||
const sql = db.prepare(`SELECT * FROM "Order"`);
|
||||
|
||||
211
build.zig
211
build.zig
@@ -33,8 +33,6 @@ comptime {
|
||||
}
|
||||
}
|
||||
|
||||
const default_reported_nodejs_version = "22.3.0";
|
||||
|
||||
const zero_sha = "0000000000000000000000000000000000000000";
|
||||
|
||||
const BunBuildOptions = struct {
|
||||
@@ -48,9 +46,10 @@ const BunBuildOptions = struct {
|
||||
sha: []const u8,
|
||||
enable_logs: bool = false,
|
||||
tracy_callstack_depth: u16,
|
||||
reported_nodejs_version: []const u8 = default_reported_nodejs_version,
|
||||
reported_nodejs_version: Version,
|
||||
|
||||
generated_code_dir: []const u8,
|
||||
no_llvm: bool,
|
||||
|
||||
cached_options_module: ?*Module = null,
|
||||
windows_shim: ?WindowsShim = null,
|
||||
@@ -73,14 +72,7 @@ const BunBuildOptions = struct {
|
||||
opts.addOption([:0]const u8, "sha", b.allocator.dupeZ(u8, this.sha) catch @panic("OOM"));
|
||||
opts.addOption(bool, "baseline", this.isBaseline());
|
||||
opts.addOption(bool, "enable_logs", this.enable_logs);
|
||||
opts.addOption([:0]const u8, "reported_nodejs_version", b.allocator.dupeZ(u8, this.reported_nodejs_version) catch @panic("OOM"));
|
||||
if (this.reported_nodejs_version.len > 0 and this.reported_nodejs_version[0] == 'v') {
|
||||
@panic("Node.js version should not start with 'v'");
|
||||
}
|
||||
|
||||
if (this.reported_nodejs_version.len == 0) {
|
||||
@panic("Node.js version should not be empty");
|
||||
}
|
||||
opts.addOption([]const u8, "reported_nodejs_version", b.fmt("{}", .{this.reported_nodejs_version}));
|
||||
|
||||
const mod = opts.createModule();
|
||||
this.cached_options_module = mod;
|
||||
@@ -122,6 +114,23 @@ pub fn getOSGlibCVersion(os: OperatingSystem) ?Version {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn getCpuModel(os: OperatingSystem, arch: Arch) ?Target.Query.CpuModel {
|
||||
// https://github.com/oven-sh/bun/issues/12076
|
||||
if (os == .linux and arch == .aarch64) {
|
||||
return .{ .explicit = &Target.aarch64.cpu.cortex_a35 };
|
||||
}
|
||||
|
||||
// Be explicit and ensure we do not accidentally target a newer M-series chip
|
||||
if (os == .mac and arch == .aarch64) {
|
||||
return .{ .explicit = &Target.aarch64.cpu.apple_m1 };
|
||||
}
|
||||
|
||||
// note: x86_64 is dealt with in the CMake config and passed in.
|
||||
// the reason for the explicit handling on aarch64 is due to troubles
|
||||
// passing the exact target in via flags.
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn build(b: *Build) !void {
|
||||
std.log.info("zig compiler v{s}", .{builtin.zig_version_string});
|
||||
|
||||
@@ -147,6 +156,14 @@ pub fn build(b: *Build) !void {
|
||||
break :brk .{ os, arch };
|
||||
};
|
||||
|
||||
// target must be refined to support older but very popular devices on
|
||||
// aarch64, this means moving the minimum supported CPU to support certain
|
||||
// raspberry PIs. there are also a number of cloud hosts that use virtual
|
||||
// machines with surprisingly out of date versions of glibc.
|
||||
if (getCpuModel(os, arch)) |cpu_model| {
|
||||
target_query.cpu_model = cpu_model;
|
||||
}
|
||||
|
||||
target_query.os_version_min = getOSVersionMin(os);
|
||||
target_query.glibc_version = getOSGlibCVersion(os);
|
||||
|
||||
@@ -163,6 +180,10 @@ pub fn build(b: *Build) !void {
|
||||
break :ref_trace if (trace == 0) null else trace;
|
||||
};
|
||||
|
||||
const obj_format = b.option(ObjectFormat, "obj_format", "Output file for object files") orelse .obj;
|
||||
|
||||
const no_llvm = b.option(bool, "no_llvm", "Experiment with Zig self hosted backends. No stability guaranteed") orelse false;
|
||||
|
||||
var build_options = BunBuildOptions{
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
@@ -171,6 +192,7 @@ pub fn build(b: *Build) !void {
|
||||
.arch = arch,
|
||||
|
||||
.generated_code_dir = generated_code_dir,
|
||||
.no_llvm = no_llvm,
|
||||
|
||||
.version = try Version.parse(bun_version),
|
||||
.canary_revision = canary: {
|
||||
@@ -178,7 +200,10 @@ pub fn build(b: *Build) !void {
|
||||
break :canary if (rev == 0) null else rev;
|
||||
},
|
||||
|
||||
.reported_nodejs_version = b.option([]const u8, "reported_nodejs_version", "Reported Node.js version") orelse default_reported_nodejs_version,
|
||||
.reported_nodejs_version = try Version.parse(
|
||||
b.option([]const u8, "reported_nodejs_version", "Reported Node.js version") orelse
|
||||
"0.0.0-unset",
|
||||
),
|
||||
|
||||
.sha = sha: {
|
||||
const sha = b.option([]const u8, "sha", "Force the git sha") orelse
|
||||
@@ -224,7 +249,7 @@ pub fn build(b: *Build) !void {
|
||||
var step = b.step("obj", "Build Bun's Zig code as a .o file");
|
||||
var bun_obj = addBunObject(b, &build_options);
|
||||
step.dependOn(&bun_obj.step);
|
||||
step.dependOn(&b.addInstallFile(bun_obj.getEmittedBin(), "bun-zig.o").step);
|
||||
step.dependOn(addInstallObjectFile(b, bun_obj, "bun-zig", obj_format));
|
||||
}
|
||||
|
||||
// zig build windows-shim
|
||||
@@ -252,95 +277,60 @@ pub fn build(b: *Build) !void {
|
||||
|
||||
// zig build check-all
|
||||
{
|
||||
var step = b.step("check-all", "Check for semantic analysis errors on all supported platforms");
|
||||
inline for (.{
|
||||
const step = b.step("check-all", "Check for semantic analysis errors on all supported platforms");
|
||||
addMultiCheck(b, step, build_options, &.{
|
||||
.{ .os = .windows, .arch = .x86_64 },
|
||||
.{ .os = .mac, .arch = .x86_64 },
|
||||
.{ .os = .mac, .arch = .aarch64 },
|
||||
.{ .os = .linux, .arch = .x86_64 },
|
||||
.{ .os = .linux, .arch = .aarch64 },
|
||||
}) |check| {
|
||||
inline for (.{ .Debug, .ReleaseFast }) |mode| {
|
||||
const check_target = b.resolveTargetQuery(.{
|
||||
.os_tag = OperatingSystem.stdOSTag(check.os),
|
||||
.cpu_arch = check.arch,
|
||||
.os_version_min = getOSVersionMin(check.os),
|
||||
.glibc_version = getOSGlibCVersion(check.os),
|
||||
});
|
||||
|
||||
var options = BunBuildOptions{
|
||||
.target = check_target,
|
||||
.os = check.os,
|
||||
.arch = check_target.result.cpu.arch,
|
||||
.optimize = mode,
|
||||
|
||||
.canary_revision = build_options.canary_revision,
|
||||
.sha = build_options.sha,
|
||||
.tracy_callstack_depth = build_options.tracy_callstack_depth,
|
||||
.version = build_options.version,
|
||||
.reported_nodejs_version = build_options.reported_nodejs_version,
|
||||
.generated_code_dir = build_options.generated_code_dir,
|
||||
};
|
||||
var obj = addBunObject(b, &options);
|
||||
obj.generated_bin = null;
|
||||
step.dependOn(&obj.step);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// zig build check-windows
|
||||
{
|
||||
var step = b.step("check-windows", "Check for semantic analysis errors on Windows x64");
|
||||
inline for (.{
|
||||
const step = b.step("check-windows", "Check for semantic analysis errors on Windows");
|
||||
addMultiCheck(b, step, build_options, &.{
|
||||
.{ .os = .windows, .arch = .x86_64 },
|
||||
}) |check| {
|
||||
inline for (.{ .Debug, .ReleaseFast }) |mode| {
|
||||
const check_target = b.resolveTargetQuery(.{
|
||||
.os_tag = OperatingSystem.stdOSTag(check.os),
|
||||
.cpu_arch = check.arch,
|
||||
.os_version_min = getOSVersionMin(check.os),
|
||||
.glibc_version = getOSGlibCVersion(check.os),
|
||||
});
|
||||
|
||||
var options = BunBuildOptions{
|
||||
.target = check_target,
|
||||
.os = check.os,
|
||||
.arch = check_target.result.cpu.arch,
|
||||
.optimize = mode,
|
||||
|
||||
.canary_revision = build_options.canary_revision,
|
||||
.sha = build_options.sha,
|
||||
.tracy_callstack_depth = build_options.tracy_callstack_depth,
|
||||
.version = build_options.version,
|
||||
.reported_nodejs_version = build_options.reported_nodejs_version,
|
||||
.generated_code_dir = build_options.generated_code_dir,
|
||||
};
|
||||
var obj = addBunObject(b, &options);
|
||||
obj.generated_bin = null;
|
||||
step.dependOn(&obj.step);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Running `zig build` with no arguments is almost always a mistake.
|
||||
// TODO: revive this error. cannot right now since ZLS runs zig build without arguments
|
||||
{
|
||||
// const mistake_message = b.addSystemCommand(&.{
|
||||
// "echo",
|
||||
// \\
|
||||
// \\To build Bun from source, please use `bun run setup` instead of `zig build`"
|
||||
// \\For more info, see https://bun.sh/docs/project/contributing
|
||||
// \\
|
||||
// \\If you want to build the zig code in isolation, run:
|
||||
// \\ 'zig build obj -Dgenerated-code=./build/codegen [...opts]'
|
||||
// \\
|
||||
// \\If you want to test a compile without emitting an object:
|
||||
// \\ 'zig build check'
|
||||
// \\ 'zig build check-all' (run linux+mac+windows)
|
||||
// \\
|
||||
// });
|
||||
pub inline fn addMultiCheck(
|
||||
b: *Build,
|
||||
parent_step: *Step,
|
||||
root_build_options: BunBuildOptions,
|
||||
to_check: []const struct { os: OperatingSystem, arch: Arch },
|
||||
) void {
|
||||
inline for (to_check) |check| {
|
||||
inline for (.{ .Debug, .ReleaseFast }) |mode| {
|
||||
const check_target = b.resolveTargetQuery(.{
|
||||
.os_tag = OperatingSystem.stdOSTag(check.os),
|
||||
.cpu_arch = check.arch,
|
||||
.cpu_model = getCpuModel(check.os, check.arch) orelse .determined_by_cpu_arch,
|
||||
.os_version_min = getOSVersionMin(check.os),
|
||||
.glibc_version = getOSGlibCVersion(check.os),
|
||||
});
|
||||
|
||||
// b.default_step.dependOn(&mistake_message.step);
|
||||
var options: BunBuildOptions = .{
|
||||
.target = check_target,
|
||||
.os = check.os,
|
||||
.arch = check_target.result.cpu.arch,
|
||||
.optimize = mode,
|
||||
|
||||
.canary_revision = root_build_options.canary_revision,
|
||||
.sha = root_build_options.sha,
|
||||
.tracy_callstack_depth = root_build_options.tracy_callstack_depth,
|
||||
.version = root_build_options.version,
|
||||
.reported_nodejs_version = root_build_options.reported_nodejs_version,
|
||||
.generated_code_dir = root_build_options.generated_code_dir,
|
||||
.no_llvm = root_build_options.no_llvm,
|
||||
};
|
||||
|
||||
var obj = addBunObject(b, &options);
|
||||
obj.generated_bin = null;
|
||||
parent_step.dependOn(&obj.step);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -353,10 +343,15 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
|
||||
},
|
||||
.target = opts.target,
|
||||
.optimize = opts.optimize,
|
||||
.use_llvm = !opts.no_llvm,
|
||||
.use_lld = if (opts.os == .mac) false else !opts.no_llvm,
|
||||
|
||||
// https://github.com/ziglang/zig/issues/17430
|
||||
.pic = true,
|
||||
|
||||
.omit_frame_pointer = false,
|
||||
.strip = false, // stripped at the end
|
||||
});
|
||||
|
||||
obj.bundle_compiler_rt = false;
|
||||
obj.formatted_panics = true;
|
||||
obj.root_module.omit_frame_pointer = false;
|
||||
@@ -374,9 +369,10 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
|
||||
}
|
||||
|
||||
if (opts.os == .linux) {
|
||||
obj.link_emit_relocs = true;
|
||||
obj.link_eh_frame_hdr = true;
|
||||
obj.link_emit_relocs = false;
|
||||
obj.link_eh_frame_hdr = false;
|
||||
obj.link_function_sections = true;
|
||||
obj.link_data_sections = true;
|
||||
|
||||
if (opts.optimize == .Debug) {
|
||||
obj.root_module.valgrind = true;
|
||||
@@ -387,6 +383,25 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
|
||||
return obj;
|
||||
}
|
||||
|
||||
const ObjectFormat = enum {
|
||||
bc,
|
||||
obj,
|
||||
};
|
||||
|
||||
pub fn addInstallObjectFile(
|
||||
b: *Build,
|
||||
compile: *Compile,
|
||||
name: []const u8,
|
||||
out_mode: ObjectFormat,
|
||||
) *Step {
|
||||
// bin always needed to be computed or else the compilation will do nothing. zig build system bug?
|
||||
const bin = compile.getEmittedBin();
|
||||
return &b.addInstallFile(switch (out_mode) {
|
||||
.obj => bin,
|
||||
.bc => compile.getEmittedLlvmBc(),
|
||||
}, b.fmt("{s}.o", .{name})).step;
|
||||
}
|
||||
|
||||
fn exists(path: []const u8) bool {
|
||||
const file = std.fs.openFileAbsolute(path, .{ .mode = .read_only }) catch return false;
|
||||
file.close();
|
||||
@@ -438,6 +453,12 @@ fn addInternalPackages(b: *Build, obj: *Compile, opts: *BunBuildOptions) void {
|
||||
.root_source_file = .{ .cwd_relative = resolved_source_tag_path },
|
||||
});
|
||||
|
||||
const error_code_path = b.pathJoin(&.{ opts.generated_code_dir, "ErrorCode.zig" });
|
||||
validateGeneratedPath(error_code_path);
|
||||
obj.root_module.addAnonymousImport("ErrorCode", .{
|
||||
.root_source_file = .{ .cwd_relative = error_code_path },
|
||||
});
|
||||
|
||||
if (os == .windows) {
|
||||
obj.root_module.addAnonymousImport("bun_shim_impl.exe", .{
|
||||
.root_source_file = opts.windowsShim(b).exe.getEmittedBin(),
|
||||
@@ -447,7 +468,11 @@ fn addInternalPackages(b: *Build, obj: *Compile, opts: *BunBuildOptions) void {
|
||||
|
||||
fn validateGeneratedPath(path: []const u8) void {
|
||||
if (!exists(path)) {
|
||||
std.debug.panic("{s} does not exist in generated code directory!", .{std.fs.path.basename(path)});
|
||||
std.debug.panic(
|
||||
\\Generated file '{s}' is missing!
|
||||
\\
|
||||
\\Make sure to use CMake and Ninja, or pass a manual codegen folder with '-Dgenerated-code=...'
|
||||
, .{path});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -82,7 +82,7 @@ _bun_completions() {
|
||||
declare -A PACKAGE_OPTIONS;
|
||||
declare -A PM_OPTIONS;
|
||||
|
||||
local SUBCOMMANDS="dev bun create run install add remove upgrade completions discord help init pm x";
|
||||
local SUBCOMMANDS="dev bun create run install add remove upgrade completions discord help init pm x test repl update outdated link unlink build";
|
||||
|
||||
GLOBAL_OPTIONS[LONG_OPTIONS]="--use --cwd --bunfile --server-bunfile --config --disable-react-fast-refresh --disable-hmr --env-file --extension-order --jsx-factory --jsx-fragment --extension-order --jsx-factory --jsx-fragment --jsx-import-source --jsx-production --jsx-runtime --main-fields --no-summary --version --platform --public-dir --tsconfig-override --define --external --help --inject --loader --origin --port --dump-environment-variables --dump-limits --disable-bun-js";
|
||||
GLOBAL_OPTIONS[SHORT_OPTIONS]="-c -v -d -e -h -i -l -u -p";
|
||||
|
||||
@@ -179,6 +179,7 @@ complete -c bun -n "__fish_use_subcommand" -a "remove" -d "Remove a dependency f
|
||||
complete -c bun -n "__fish_use_subcommand" -a "add" -d "Add a dependency to package.json" -f
|
||||
complete -c bun -n "__fish_use_subcommand" -a "init" -d "Initialize a Bun project in this directory" -f
|
||||
complete -c bun -n "__fish_use_subcommand" -a "link" -d "Register or link a local npm package" -f
|
||||
complete -c bun -n "__fish_use_subcommand" -a "link" -d "Unregister a local npm package" -f
|
||||
complete -c bun -n "__fish_use_subcommand" -a "unlink" -d "Unregister a local npm package" -f
|
||||
complete -c bun -n "__fish_use_subcommand" -a "pm" -d "Additional package management utilities" -f
|
||||
complete -c bun -n "__fish_use_subcommand" -a "x" -d "Execute a package binary, installing if needed" -f
|
||||
complete -c bun -n "__fish_use_subcommand" -a "outdated" -d "Display the latest versions of outdated dependencies" -f
|
||||
|
||||
@@ -425,6 +425,7 @@ _bun_run_completion() {
|
||||
'--external[Exclude module from transpilation (can use * wildcards). ex: -e react]:external' \
|
||||
'-e[Exclude module from transpilation (can use * wildcards). ex: -e react]:external' \
|
||||
'--loader[Parse files with .ext:loader, e.g. --loader .js:jsx. Valid loaders: js, jsx, ts, tsx, json, toml, text, file, wasm, napi]:loader' \
|
||||
'--packages[Exclude dependencies from bundle, e.g. --packages external. Valid options: bundle, external]:packages' \
|
||||
'-l[Parse files with .ext:loader, e.g. --loader .js:jsx. Valid loaders: js, jsx, ts, tsx, json, toml, text, file, wasm, napi]:loader' \
|
||||
'--origin[Rewrite import URLs to start with --origin. Default: ""]:origin' \
|
||||
'-u[Rewrite import URLs to start with --origin. Default: ""]:origin' \
|
||||
@@ -562,6 +563,22 @@ _bun_update_completion() {
|
||||
esac
|
||||
}
|
||||
|
||||
_bun_outdated_completion() {
|
||||
_arguments -s -C \
|
||||
'--cwd[Set a specific cwd]:cwd' \
|
||||
'--verbose[Excessively verbose logging]' \
|
||||
'--no-progress[Disable the progress bar]' \
|
||||
'--help[Print this help menu]' &&
|
||||
ret=0
|
||||
|
||||
case $state in
|
||||
config)
|
||||
_bun_list_bunfig_toml
|
||||
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
_bun_test_completion() {
|
||||
_arguments -s -C \
|
||||
'1: :->cmd1' \
|
||||
@@ -668,6 +685,7 @@ _bun() {
|
||||
'add\:"Add a dependency to package.json (bun a)" '
|
||||
'remove\:"Remove a dependency from package.json (bun rm)" '
|
||||
'update\:"Update outdated dependencies & save to package.json" '
|
||||
'outdated\:"Display the latest versions of outdated dependencies" '
|
||||
'link\:"Link an npm package globally" '
|
||||
'unlink\:"Globally unlink an npm package" '
|
||||
'pm\:"More commands for managing packages" '
|
||||
@@ -739,6 +757,10 @@ _bun() {
|
||||
update)
|
||||
_bun_update_completion
|
||||
|
||||
;;
|
||||
outdated)
|
||||
_bun_outdated_completion
|
||||
|
||||
;;
|
||||
'test')
|
||||
_bun_test_completion
|
||||
@@ -818,6 +840,10 @@ _bun() {
|
||||
update)
|
||||
_bun_update_completion
|
||||
|
||||
;;
|
||||
outdated)
|
||||
_bun_outdated_completion
|
||||
|
||||
;;
|
||||
'test')
|
||||
_bun_test_completion
|
||||
|
||||
@@ -61,7 +61,7 @@ To do anything interesting we need a construct known as a "view". A view is a cl
|
||||
|
||||
The `DataView` class is a lower-level interface for reading and manipulating the data in an `ArrayBuffer`.
|
||||
|
||||
Below we create a new `DataView` and set the first byte to 5.
|
||||
Below we create a new `DataView` and set the first byte to 3.
|
||||
|
||||
```ts
|
||||
const buf = new ArrayBuffer(4);
|
||||
@@ -219,6 +219,11 @@ The following classes are typed arrays, along with a description of how they int
|
||||
|
||||
---
|
||||
|
||||
- [`Float16Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Float16Array)
|
||||
- Every two (2) bytes are interpreted as a 16-bit floating point number. Range -6.104e5 to 6.55e4.
|
||||
|
||||
---
|
||||
|
||||
- [`Float32Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Float32Array)
|
||||
- Every four (4) bytes are interpreted as a 32-bit floating point number. Range -3.4e38 to 3.4e38.
|
||||
|
||||
@@ -395,7 +400,7 @@ Bun implements `Buffer`, a Node.js API for working with binary data that pre-dat
|
||||
|
||||
```ts
|
||||
const buf = Buffer.from("hello world");
|
||||
// => Buffer(16) [ 116, 104, 105, 115, 32, 105, 115, 32, 97, 32, 115, 116, 114, 105, 110, 103 ]
|
||||
// => Buffer(11) [ 104, 101, 108, 108, 111, 32, 119, 111, 114, 108, 100 ]
|
||||
|
||||
buf.length; // => 11
|
||||
buf[0]; // => 104, ascii for 'h'
|
||||
|
||||
308
docs/api/fetch.md
Normal file
308
docs/api/fetch.md
Normal file
@@ -0,0 +1,308 @@
|
||||
Bun implements the WHATWG `fetch` standard, with some extensions to meet the needs of server-side JavaScript.
|
||||
|
||||
Bun also implements `node:http`, but `fetch` is generally recommended instead.
|
||||
|
||||
## Sending an HTTP request
|
||||
|
||||
To send an HTTP request, use `fetch`
|
||||
|
||||
```ts
|
||||
const response = await fetch("http://example.com");
|
||||
|
||||
console.log(response.status); // => 200
|
||||
|
||||
const text = await response.text(); // or response.json(), response.formData(), etc.
|
||||
```
|
||||
|
||||
`fetch` also works with HTTPS URLs.
|
||||
|
||||
```ts
|
||||
const response = await fetch("https://example.com");
|
||||
```
|
||||
|
||||
You can also pass `fetch` a [`Request`](https://developer.mozilla.org/en-US/docs/Web/API/Request) object.
|
||||
|
||||
```ts
|
||||
const request = new Request("http://example.com", {
|
||||
method: "POST",
|
||||
body: "Hello, world!",
|
||||
});
|
||||
|
||||
const response = await fetch(request);
|
||||
```
|
||||
|
||||
### Sending a POST request
|
||||
|
||||
To send a POST request, pass an object with the `method` property set to `"POST"`.
|
||||
|
||||
```ts
|
||||
const response = await fetch("http://example.com", {
|
||||
method: "POST",
|
||||
body: "Hello, world!",
|
||||
});
|
||||
```
|
||||
|
||||
`body` can be a string, a `FormData` object, an `ArrayBuffer`, a `Blob`, and more. See the [MDN documentation](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API/Using_Fetch#setting_a_body) for more information.
|
||||
|
||||
### Proxying requests
|
||||
|
||||
To proxy a request, pass an object with the `proxy` property set to a URL.
|
||||
|
||||
```ts
|
||||
const response = await fetch("http://example.com", {
|
||||
proxy: "http://proxy.com",
|
||||
});
|
||||
```
|
||||
|
||||
### Custom headers
|
||||
|
||||
To set custom headers, pass an object with the `headers` property set to an object.
|
||||
|
||||
```ts
|
||||
const response = await fetch("http://example.com", {
|
||||
headers: {
|
||||
"X-Custom-Header": "value",
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
You can also set headers using the [Headers](https://developer.mozilla.org/en-US/docs/Web/API/Headers) object.
|
||||
|
||||
```ts
|
||||
const headers = new Headers();
|
||||
headers.append("X-Custom-Header", "value");
|
||||
|
||||
const response = await fetch("http://example.com", {
|
||||
headers,
|
||||
});
|
||||
```
|
||||
|
||||
### Response bodies
|
||||
|
||||
To read the response body, use one of the following methods:
|
||||
|
||||
- `response.text(): Promise<string>`: Returns a promise that resolves with the response body as a string.
|
||||
- `response.json(): Promise<any>`: Returns a promise that resolves with the response body as a JSON object.
|
||||
- `response.formData(): Promise<FormData>`: Returns a promise that resolves with the response body as a `FormData` object.
|
||||
- `response.bytes(): Promise<Uint8Array>`: Returns a promise that resolves with the response body as a `Uint8Array`.
|
||||
- `response.arrayBuffer(): Promise<ArrayBuffer>`: Returns a promise that resolves with the response body as an `ArrayBuffer`.
|
||||
- `response.blob(): Promise<Blob>`: Returns a promise that resolves with the response body as a `Blob`.
|
||||
|
||||
#### Streaming response bodies
|
||||
|
||||
You can use async iterators to stream the response body.
|
||||
|
||||
```ts
|
||||
const response = await fetch("http://example.com");
|
||||
|
||||
for await (const chunk of response.body) {
|
||||
console.log(chunk);
|
||||
}
|
||||
```
|
||||
|
||||
You can also more directly access the `ReadableStream` object.
|
||||
|
||||
```ts
|
||||
const response = await fetch("http://example.com");
|
||||
|
||||
const stream = response.body;
|
||||
|
||||
const reader = stream.getReader();
|
||||
const { value, done } = await reader.read();
|
||||
```
|
||||
|
||||
### Fetching a URL with a timeout
|
||||
|
||||
To fetch a URL with a timeout, use `AbortSignal.timeout`:
|
||||
|
||||
```ts
|
||||
const response = await fetch("http://example.com", {
|
||||
signal: AbortSignal.timeout(1000),
|
||||
});
|
||||
```
|
||||
|
||||
#### Canceling a request
|
||||
|
||||
To cancel a request, use an `AbortController`:
|
||||
|
||||
```ts
|
||||
const controller = new AbortController();
|
||||
|
||||
const response = await fetch("http://example.com", {
|
||||
signal: controller.signal,
|
||||
});
|
||||
|
||||
controller.abort();
|
||||
```
|
||||
|
||||
### Unix domain sockets
|
||||
|
||||
To fetch a URL using a Unix domain socket, use the `unix: string` option:
|
||||
|
||||
```ts
|
||||
const response = await fetch("https://hostname/a/path", {
|
||||
unix: "/var/run/path/to/unix.sock",
|
||||
method: "POST",
|
||||
body: JSON.stringify({ message: "Hello from Bun!" }),
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### TLS
|
||||
|
||||
To use a client certificate, use the `tls` option:
|
||||
|
||||
```ts
|
||||
await fetch("https://example.com", {
|
||||
tls: {
|
||||
key: Bun.file("/path/to/key.pem"),
|
||||
cert: Bun.file("/path/to/cert.pem"),
|
||||
// ca: [Bun.file("/path/to/ca.pem")],
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
#### Custom TLS Validation
|
||||
|
||||
To customize the TLS validation, use the `checkServerIdentity` option in `tls`
|
||||
|
||||
```ts
|
||||
await fetch("https://example.com", {
|
||||
tls: {
|
||||
checkServerIdentity: (hostname, peerCertificate) => {
|
||||
// Return an error if the certificate is invalid
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
This is similar to how it works in Node's `net` module.
|
||||
|
||||
## Debugging
|
||||
|
||||
To help with debugging, you can pass `verbose: true` to `fetch`:
|
||||
|
||||
```ts
|
||||
const response = await fetch("http://example.com", {
|
||||
verbose: true,
|
||||
});
|
||||
```
|
||||
|
||||
This will print the request and response headers to your terminal:
|
||||
|
||||
```sh
|
||||
[fetch] > HTTP/1.1 GET http://example.com/
|
||||
[fetch] > Connection: keep-alive
|
||||
[fetch] > User-Agent: Bun/1.1.21
|
||||
[fetch] > Accept: */*
|
||||
[fetch] > Host: example.com
|
||||
[fetch] > Accept-Encoding: gzip, deflate, br
|
||||
|
||||
[fetch] < 200 OK
|
||||
[fetch] < Content-Encoding: gzip
|
||||
[fetch] < Age: 201555
|
||||
[fetch] < Cache-Control: max-age=604800
|
||||
[fetch] < Content-Type: text/html; charset=UTF-8
|
||||
[fetch] < Date: Sun, 21 Jul 2024 02:41:14 GMT
|
||||
[fetch] < Etag: "3147526947+gzip"
|
||||
[fetch] < Expires: Sun, 28 Jul 2024 02:41:14 GMT
|
||||
[fetch] < Last-Modified: Thu, 17 Oct 2019 07:18:26 GMT
|
||||
[fetch] < Server: ECAcc (sac/254F)
|
||||
[fetch] < Vary: Accept-Encoding
|
||||
[fetch] < X-Cache: HIT
|
||||
[fetch] < Content-Length: 648
|
||||
```
|
||||
|
||||
Note: `verbose: boolean` is not part of the Web standard `fetch` API and is specific to Bun.
|
||||
|
||||
## Performance
|
||||
|
||||
Before an HTTP request can be sent, the DNS lookup must be performed. This can take a significant amount of time, especially if the DNS server is slow or the network connection is poor.
|
||||
|
||||
After the DNS lookup, the TCP socket must be connected and the TLS handshake might need to be performed. This can also take a significant amount of time.
|
||||
|
||||
After the request completes, consuming the response body can also take a significant amount of time and memory.
|
||||
|
||||
At every step of the way, Bun provides APIs to help you optimize the performance of your application.
|
||||
|
||||
### DNS prefetching
|
||||
|
||||
To prefetch a DNS entry, you can use the `dns.prefetch` API. This API is useful when you know you'll need to connect to a host soon and want to avoid the initial DNS lookup.
|
||||
|
||||
```ts
|
||||
import { dns } from "bun";
|
||||
|
||||
dns.prefetch("bun.sh", 443);
|
||||
```
|
||||
|
||||
#### DNS caching
|
||||
|
||||
By default, Bun caches and deduplicates DNS queries in-memory for up to 30 seconds. You can see the cache stats by calling `dns.getCacheStats()`:
|
||||
|
||||
To learn more about DNS caching in Bun, see the [DNS caching](/docs/api/dns) documentation.
|
||||
|
||||
### Preconnect to a host
|
||||
|
||||
To preconnect to a host, you can use the `fetch.preconnect` API. This API is useful when you know you'll need to connect to a host soon and want to start the initial DNS lookup, TCP socket connection, and TLS handshake early.
|
||||
|
||||
```ts
|
||||
import { fetch } from "bun";
|
||||
|
||||
fetch.preconnect("https://bun.sh");
|
||||
```
|
||||
|
||||
Note: calling `fetch` immediately after `fetch.preconnect` will not make your request faster. Preconnecting only helps if you know you'll need to connect to a host soon, but you're not ready to make the request yet.
|
||||
|
||||
#### Preconnect at startup
|
||||
|
||||
To preconnect to a host at startup, you can pass `--fetch-preconnect`:
|
||||
|
||||
```sh
|
||||
$ bun --fetch-preconnect https://bun.sh ./my-script.ts
|
||||
```
|
||||
|
||||
This is sort of like `<link rel="preconnect">` in HTML.
|
||||
|
||||
This feature is not implemented on Windows yet. If you're interested in using this feature on Windows, please file an issue and we can implement support for it on Windows.
|
||||
|
||||
### Connection pooling & HTTP keep-alive
|
||||
|
||||
Bun automatically reuses connections to the same host. This is known as connection pooling. This can significantly reduce the time it takes to establish a connection. You don't need to do anything to enable this; it's automatic.
|
||||
|
||||
#### Simultaneous connection limit
|
||||
|
||||
By default, Bun limits the maximum number of simultaneous `fetch` requests to 256. We do this for several reasons:
|
||||
|
||||
- It improves overall system stability. Operating systems have an upper limit on the number of simultaneous open TCP sockets, usually in the low thousands. Nearing this limit causes your entire computer to behave strangely. Applications hang and crash.
|
||||
- It encourages HTTP Keep-Alive connection reuse. For short-lived HTTP requests, the slowest step is often the initial connection setup. Reusing connections can save a lot of time.
|
||||
|
||||
When the limit is exceeded, the requests are queued and sent as soon as the next request ends.
|
||||
|
||||
You can increase the maximum number of simultaneous connections via the `BUN_CONFIG_MAX_HTTP_REQUESTS` environment variable:
|
||||
|
||||
```sh
|
||||
$ BUN_CONFIG_MAX_HTTP_REQUESTS=512 bun ./my-script.ts
|
||||
```
|
||||
|
||||
The max value for this limit is currently set to 65,336. The maximum port number is 65,535, so it's quite difficult for any one computer to exceed this limit.
|
||||
|
||||
### Response buffering
|
||||
|
||||
Bun goes to great lengths to optimize the performance of reading the response body. The fastest way to read the response body is to use one of these methods:
|
||||
|
||||
- `response.text(): Promise<string>`
|
||||
- `response.json(): Promise<any>`
|
||||
- `response.formData(): Promise<FormData>`
|
||||
- `response.bytes(): Promise<Uint8Array>`
|
||||
- `response.arrayBuffer(): Promise<ArrayBuffer>`
|
||||
- `response.blob(): Promise<Blob>`
|
||||
|
||||
You can also use `Bun.write` to write the response body to a file on disk:
|
||||
|
||||
```ts
|
||||
import { write } from "bun";
|
||||
|
||||
await write("output.txt", response);
|
||||
```
|
||||
@@ -106,6 +106,31 @@ The `--minify` argument optimizes the size of the transpiled output code. If you
|
||||
|
||||
The `--sourcemap` argument embeds a sourcemap compressed with zstd, so that errors & stacktraces point to their original locations instead of the transpiled location. Bun will automatically decompress & resolve the sourcemap when an error occurs.
|
||||
|
||||
## Worker
|
||||
|
||||
To use workers in a standalone executable, add the worker's entrypoint to the CLI arguments:
|
||||
|
||||
```sh
|
||||
$ bun build --compile ./index.ts ./my-worker.ts --outfile myapp
|
||||
```
|
||||
|
||||
Then, reference the worker in your code:
|
||||
|
||||
```ts
|
||||
console.log("Hello from Bun!");
|
||||
|
||||
// Any of these will work:
|
||||
new Worker("./my-worker.ts");
|
||||
new Worker(new URL("./my-worker.ts", import.meta.url));
|
||||
new Worker(new URL("./my-worker.ts", import.meta.url).href);
|
||||
```
|
||||
|
||||
As of Bun v1.1.25, when you add multiple entrypoints to a standalone executable, they will be bundled separately into the executable.
|
||||
|
||||
In the future, we may automatically detect usages of statically-known paths in `new Worker(path)` and then bundle those into the executable, but for now, you'll need to add it to the shell command manually like the above example.
|
||||
|
||||
If you use a relative path to a file not included in the standalone executable, it will attempt to load that path from disk relative to the current working directory of the process (and then error if it doesn't exist).
|
||||
|
||||
## SQLite
|
||||
|
||||
You can use `bun:sqlite` imports with `bun build --compile`.
|
||||
@@ -179,6 +204,59 @@ console.log(addon.hello());
|
||||
|
||||
Unfortunately, if you're using `@mapbox/node-pre-gyp` or other similar tools, you'll need to make sure the `.node` file is directly required or it won't bundle correctly.
|
||||
|
||||
### Embed directories
|
||||
|
||||
To embed a directory with `bun build --compile`, use a shell glob in your `bun build` command:
|
||||
|
||||
```sh
|
||||
$ bun build --compile ./index.ts ./public/**/*.png
|
||||
```
|
||||
|
||||
Then, you can reference the files in your code:
|
||||
|
||||
```ts
|
||||
import icon from "./public/assets/icon.png" with { type: "file" };
|
||||
import { file } from "bun";
|
||||
|
||||
export default {
|
||||
fetch(req) {
|
||||
// Embedded files can be streamed from Response objects
|
||||
return new Response(file(icon));
|
||||
},
|
||||
};
|
||||
```
|
||||
|
||||
This is honestly a workaround, and we expect to improve this in the future with a more direct API.
|
||||
|
||||
### Listing embedded files
|
||||
|
||||
To get a list of all embedded files, use `Bun.embeddedFiles`:
|
||||
|
||||
```js
|
||||
import "./icon.png" with { type: "file" };
|
||||
import { embeddedFiles } from "bun";
|
||||
|
||||
console.log(embeddedFiles[0].name); // `icon-${hash}.png`
|
||||
```
|
||||
|
||||
`Bun.embeddedFiles` returns an array of `Blob` objects which you can use to get the size, contents, and other properties of the files.
|
||||
|
||||
```ts
|
||||
embeddedFiles: Blob[]
|
||||
```
|
||||
|
||||
The list of embedded files excludes bundled source code like `.ts` and `.js` files.
|
||||
|
||||
#### Content hash
|
||||
|
||||
By default, embedded files have a content hash appended to their name. This is useful for situations where you want to serve the file from a URL or CDN and have fewer cache invalidation issues. But sometimes, this is unexpected and you might want the original name instead:
|
||||
|
||||
To disable the content hash, pass `--asset-naming` to `bun build --compile` like this:
|
||||
|
||||
```sh
|
||||
$ bun build --compile --asset-naming="[name].[ext]" ./index.ts
|
||||
```
|
||||
|
||||
## Minification
|
||||
|
||||
To trim down the size of the executable a little, pass `--minify` to `bun build --compile`. This uses Bun's minifier to reduce the code size. Overall though, Bun's binary is still way too big and we need to make it smaller.
|
||||
|
||||
@@ -756,6 +756,25 @@ $ bun build ./index.tsx --outdir ./out --external '*'
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
### `packages`
|
||||
|
||||
Control whatever package dependencies are included to bundle or not. Possible values: `bundle` (default), `external`. Bun threats any import which path do not start with `.`, `..` or `/` as package.
|
||||
|
||||
{% codetabs group="a" %}
|
||||
|
||||
```ts#JavaScript
|
||||
await Bun.build({
|
||||
entrypoints: ['./index.ts'],
|
||||
packages: 'external',
|
||||
})
|
||||
```
|
||||
|
||||
```bash#CLI
|
||||
$ bun build ./index.ts --packages external
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
### `naming`
|
||||
|
||||
Customizes the generated file names. Defaults to `./[dir]/[name].[ext]`.
|
||||
@@ -1257,7 +1276,7 @@ interface BuildOptions {
|
||||
loader?: { [k in string]: Loader }; // See https://bun.sh/docs/bundler/loaders
|
||||
manifest?: boolean; // false
|
||||
external?: string[]; // []
|
||||
sourcemap?: "none" | "inline" | "linked" | "external" | boolean; // "none"
|
||||
sourcemap?: "none" | "inline" | "linked" | "external" | "linked" | boolean; // "none"
|
||||
root?: string; // computed from entrypoints
|
||||
naming?:
|
||||
| string
|
||||
|
||||
@@ -94,8 +94,8 @@ In Bun's CLI, simple boolean flags like `--minify` do not accept an argument. Ot
|
||||
---
|
||||
|
||||
- `--packages`
|
||||
- n/a
|
||||
- Not supported
|
||||
- `--packages`
|
||||
- No differences
|
||||
|
||||
---
|
||||
|
||||
@@ -208,8 +208,7 @@ In Bun's CLI, simple boolean flags like `--minify` do not accept an argument. Ot
|
||||
---
|
||||
|
||||
- `--ignore-annotations`
|
||||
- n/a
|
||||
- Not supported
|
||||
- `--ignore-dce-annotations`
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -35,6 +35,10 @@ $ bun add --optional lodash
|
||||
|
||||
## `--exact`
|
||||
|
||||
{% callout %}
|
||||
**Alias** — `-E`
|
||||
{% /callout %}
|
||||
|
||||
To add a package and pin to the resolved version, use `--exact`. This will resolve the version of the package and add it to your `package.json` with an exact version number instead of a version range.
|
||||
|
||||
```bash
|
||||
|
||||
61
docs/cli/outdated.md
Normal file
61
docs/cli/outdated.md
Normal file
@@ -0,0 +1,61 @@
|
||||
Use `bun outdated` to display a table of outdated dependencies with their latest versions for the current workspace:
|
||||
|
||||
```sh
|
||||
$ bun outdated
|
||||
|
||||
|--------------------------------------------------------------------|
|
||||
| Package | Current | Update | Latest |
|
||||
|----------------------------------------|---------|--------|--------|
|
||||
| @types/bun (dev) | 1.1.6 | 1.1.7 | 1.1.7 |
|
||||
|----------------------------------------|---------|--------|--------|
|
||||
| @types/react (dev) | 18.3.3 | 18.3.4 | 18.3.4 |
|
||||
|----------------------------------------|---------|--------|--------|
|
||||
| @typescript-eslint/eslint-plugin (dev) | 7.16.1 | 7.18.0 | 8.2.0 |
|
||||
|----------------------------------------|---------|--------|--------|
|
||||
| @typescript-eslint/parser (dev) | 7.16.1 | 7.18.0 | 8.2.0 |
|
||||
|----------------------------------------|---------|--------|--------|
|
||||
| esbuild (dev) | 0.21.5 | 0.21.5 | 0.23.1 |
|
||||
|----------------------------------------|---------|--------|--------|
|
||||
| eslint (dev) | 9.7.0 | 9.9.1 | 9.9.1 |
|
||||
|----------------------------------------|---------|--------|--------|
|
||||
| typescript (dev) | 5.5.3 | 5.5.4 | 5.5.4 |
|
||||
|--------------------------------------------------------------------|
|
||||
```
|
||||
|
||||
The `Update` column shows the version that would be installed if you ran `bun update [package]`. This version is the latest version that satisfies the version range specified in your `package.json`.
|
||||
|
||||
The `Latest` column shows the latest version available from the registry. `bun update --latest [package]` will update to this version.
|
||||
|
||||
Dependency names can be provided to filter the output (pattern matching is supported):
|
||||
|
||||
```sh
|
||||
$ bun outdated "@types/*"
|
||||
|
||||
|------------------------------------------------|
|
||||
| Package | Current | Update | Latest |
|
||||
|--------------------|---------|--------|--------|
|
||||
| @types/bun (dev) | 1.1.6 | 1.1.8 | 1.1.8 |
|
||||
|--------------------|---------|--------|--------|
|
||||
| @types/react (dev) | 18.3.3 | 18.3.4 | 18.3.4 |
|
||||
|------------------------------------------------|
|
||||
```
|
||||
|
||||
## `--filter`
|
||||
|
||||
The `--filter` flag can be used to select workspaces to include in the output. Workspace names or paths can be used as patterns.
|
||||
|
||||
```sh
|
||||
$ bun outdated --filter <pattern>
|
||||
```
|
||||
|
||||
For example, to only show outdated dependencies for workspaces in the `./apps` directory:
|
||||
|
||||
```sh
|
||||
$ bun outdated --filter './apps/*'
|
||||
```
|
||||
|
||||
If you want to do the same, but exclude the `./apps/api` workspace:
|
||||
|
||||
```sh
|
||||
$ bun outdated --filter './apps/*' --filter '!./apps/api'
|
||||
```
|
||||
@@ -69,7 +69,7 @@ export const movies = sqliteTable("movies", {
|
||||
We can use the `drizzle-kit` CLI to generate an initial SQL migration.
|
||||
|
||||
```sh
|
||||
$ bunx drizzle-kit generate:sqlite --schema ./schema.ts
|
||||
$ bunx drizzle-kit generate --dialect sqlite --schema ./schema.ts
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -10,7 +10,7 @@ This automatically load balances incoming requests across multiple instances of
|
||||
```ts#server.ts
|
||||
import { serve } from "bun";
|
||||
|
||||
const id = = Math.random().toString(36).slice(2);
|
||||
const id = Math.random().toString(36).slice(2);
|
||||
|
||||
serve({
|
||||
port: process.env.PORT || 8080,
|
||||
|
||||
@@ -13,7 +13,7 @@ console.log(Bun.argv);
|
||||
Running this file with arguments results in the following:
|
||||
|
||||
```sh
|
||||
$ bun run cli.tsx --flag1 --flag2 value
|
||||
$ bun run cli.ts --flag1 --flag2 value
|
||||
[ '/path/to/bun', '/path/to/cli.ts', '--flag1', '--flag2', 'value' ]
|
||||
```
|
||||
|
||||
@@ -47,7 +47,7 @@ console.log(positionals);
|
||||
then it outputs
|
||||
|
||||
```
|
||||
$ bun run cli.tsx --flag1 --flag2 value
|
||||
$ bun run cli.ts --flag1 --flag2 value
|
||||
{
|
||||
flag1: true,
|
||||
flag2: "value",
|
||||
|
||||
@@ -13,7 +13,7 @@ jobs:
|
||||
steps:
|
||||
# ...
|
||||
- uses: actions/checkout@v4
|
||||
+ - uses: oven-sh/setup-bun@v1
|
||||
+ - uses: oven-sh/setup-bun@v2
|
||||
|
||||
# run any `bun` or `bunx` command
|
||||
+ - run: bun install
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# ...
|
||||
- uses: oven-sh/setup-bun@v1
|
||||
- uses: oven-sh/setup-bun@v2
|
||||
+ with:
|
||||
+ bun-version: 1.0.11 # or "latest", "canary", <sha>
|
||||
```
|
||||
|
||||
@@ -5,8 +5,8 @@ name: Define and replace static globals & constants
|
||||
The `--define` flag lets you declare statically-analyzable constants and globals. It replace all usages of an identifier or property in a JavaScript or TypeScript file with a constant value. This feature is supported at runtime and also in `bun build`. This is sort of similar to `#define` in C/C++, except for JavaScript.
|
||||
|
||||
```ts
|
||||
bun --define:process.env.NODE_ENV="'production'" src/index.ts # Runtime
|
||||
bun build --define:process.env.NODE_ENV="'production'" src/index.ts # Build
|
||||
bun --define process.env.NODE_ENV="'production'" src/index.ts # Runtime
|
||||
bun build --define process.env.NODE_ENV="'production'" src/index.ts # Build
|
||||
```
|
||||
|
||||
---
|
||||
@@ -95,7 +95,7 @@ To replace all usages of `AWS` with the JSON object `{"ACCESS_KEY":"abc","SECRET
|
||||
|
||||
```sh
|
||||
# JSON
|
||||
bun --define:AWS='{"ACCESS_KEY":"abc","SECRET_KEY":"def"}' src/index.ts
|
||||
bun --define AWS='{"ACCESS_KEY":"abc","SECRET_KEY":"def"}' src/index.ts
|
||||
```
|
||||
|
||||
Those will be transformed into the equivalent JavaScript code.
|
||||
@@ -119,7 +119,7 @@ You can also pass properties to the `--define` flag.
|
||||
For example, to replace all usages of `console.write` with `console.log`, you can use the following command (requires Bun v1.1.5 or later)
|
||||
|
||||
```sh
|
||||
bun --define:console.write=console.log src/index.ts
|
||||
bun --define console.write=console.log src/index.ts
|
||||
```
|
||||
|
||||
That transforms the following input:
|
||||
|
||||
@@ -27,6 +27,28 @@ data.version; // => "1.0.0"
|
||||
data.author.name; // => "John Dough"
|
||||
```
|
||||
|
||||
Bun also supports [Import Attributes](https://github.com/tc39/proposal-import-attributes/) and [JSON modules](https://github.com/tc39/proposal-json-modules) syntax.
|
||||
|
||||
```ts
|
||||
import data from "./package.json" with { type: "json" };
|
||||
|
||||
data.name; // => "bun"
|
||||
data.version; // => "1.0.0"
|
||||
data.author.name; // => "John Dough"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Bun also supports [Import Attributes](https://github.com/tc39/proposal-import-attributes/) and [JSON modules](https://github.com/tc39/proposal-json-modules) syntax.
|
||||
|
||||
```ts
|
||||
import data from "./package.json" with { type: "json" };
|
||||
|
||||
data.name; // => "bun"
|
||||
data.version; // => "1.0.0"
|
||||
data.author.name; // => "John Dough"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
See [Docs > Runtime > TypeScript](/docs/runtime/typescript) for more information on using TypeScript with Bun.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
All packages downloaded from the registry are stored in a global cache at `~/.bun/install/cache`. They are stored in subdirectories named like `${name}@${version}`, so multiple versions of a package can be cached.
|
||||
|
||||
{% details summary="Configuring cache behavior" (bunfig.toml) %}
|
||||
{% details summary="Configuring cache behavior (bunfig.toml)" %}
|
||||
|
||||
```toml
|
||||
[install.cache]
|
||||
|
||||
@@ -2,7 +2,7 @@ Bun supports loading configuration options from [`.npmrc`](https://docs.npmjs.co
|
||||
|
||||
{% callout %}
|
||||
|
||||
**NOTE**: We recommend migrating your `.npmrc` file to Bun's [`bunfig.toml`](/docs/runtime/bunfig) format, as it provides more flexible options and can let you configure Bun-specific configuration options.
|
||||
**NOTE**: We recommend migrating your `.npmrc` file to Bun's [`bunfig.toml`](/docs/runtime/bunfig) format, as it provides more flexible options and can let you configure Bun-specific options.
|
||||
|
||||
{% /callout %}
|
||||
|
||||
@@ -50,16 +50,22 @@ Allows you to set options for a specific registry:
|
||||
|
||||
|
||||
# or you could set a username and password
|
||||
# note that the password is base64 encoded
|
||||
//http://localhost:4873/:username=myusername
|
||||
|
||||
//http://localhost:4873/:_password=${NPM_PASSWORD}
|
||||
|
||||
# or use _auth, which is your username and password
|
||||
# combined into a single string, which is then base 64 encoded
|
||||
//http://localhost:4873/:_auth=${NPM_AUTH}
|
||||
```
|
||||
|
||||
The following options are supported:
|
||||
|
||||
- `_authToken`
|
||||
- `username`
|
||||
- `_password`
|
||||
- `_password` (base64 encoded password)
|
||||
- `_auth` (base64 encoded username:password, e.g. `btoa(username + ":" + password)`)
|
||||
|
||||
The equivalent `bunfig.toml` option is to add a key in [`install.scopes`](/docs/runtime/bunfig#install-registry):
|
||||
|
||||
|
||||
@@ -164,6 +164,9 @@ export default {
|
||||
page("cli/update", "`bun update`", {
|
||||
description: "Update your project's dependencies.",
|
||||
}),
|
||||
page("cli/outdated", "`bun outdated`", {
|
||||
description: "Check for outdated dependencies.",
|
||||
}),
|
||||
page("cli/link", "`bun link`", {
|
||||
description: "Install local packages as dependencies in your project.",
|
||||
}),
|
||||
@@ -287,8 +290,11 @@ export default {
|
||||
|
||||
divider("API"),
|
||||
page("api/http", "HTTP server", {
|
||||
description: `Bun implements Web-standard fetch, plus a Bun-native API for building fast HTTP servers.`,
|
||||
description: `Bun implements a fast HTTP server built on Request/Response objects, along with supporting node:http APIs.`,
|
||||
}), // "`Bun.serve`"),
|
||||
page("api/fetch", "HTTP client", {
|
||||
description: `Bun implements Web-standard fetch with some Bun-native extensions.`,
|
||||
}), // "fetch"),
|
||||
page("api/websockets", "WebSockets", {
|
||||
description: `Bun supports server-side WebSockets with on-the-fly compression, TLS support, and a Bun-native pubsub API.`,
|
||||
}), // "`Bun.serve`"),
|
||||
|
||||
@@ -171,6 +171,8 @@ Once imported, you should see something like this:
|
||||
|
||||
{% image alt="Viewing heap snapshot in Safari" src="https://user-images.githubusercontent.com/709451/204429337-b0d8935f-3509-4071-b991-217794d1fb27.png" caption="Viewing heap snapshot in Safari Dev Tools" /%}
|
||||
|
||||
> The [web debugger](https://bun.sh/docs/runtime/debugger#inspect) also offers the timeline feature which allows you to track and examine the memory usage of the running debug session.
|
||||
|
||||
### Native heap stats
|
||||
|
||||
Bun uses mimalloc for the other heap. To report a summary of non-JavaScript memory usage, set the `MIMALLOC_SHOW_STATS=1` environment variable. and stats will print on exit.
|
||||
|
||||
@@ -60,7 +60,7 @@ Visual Studio can be installed graphically using the wizard or through WinGet:
|
||||
|
||||
After Visual Studio, you need the following:
|
||||
|
||||
- LLVM 16
|
||||
- LLVM 18.1.8
|
||||
- Go
|
||||
- Rust
|
||||
- NASM
|
||||
@@ -78,14 +78,14 @@ After Visual Studio, you need the following:
|
||||
|
||||
```ps1#WinGet
|
||||
## Select "Add LLVM to the system PATH for all users" in the LLVM installer
|
||||
> winget install -i LLVM.LLVM -v 16.0.6 && winget install GoLang.Go Rustlang.Rustup NASM.NASM StrawberryPerl.StrawberryPerl RubyInstallerTeam.Ruby.3.2 OpenJS.NodeJS.LTS
|
||||
> winget install -i LLVM.LLVM -v 18.1.8 && winget install GoLang.Go Rustlang.Rustup NASM.NASM StrawberryPerl.StrawberryPerl RubyInstallerTeam.Ruby.3.2 OpenJS.NodeJS.LTS
|
||||
```
|
||||
|
||||
```ps1#Scoop
|
||||
> irm https://get.scoop.sh | iex
|
||||
> scoop install nodejs-lts go rust nasm ruby perl
|
||||
# scoop seems to be buggy if you install llvm and the rest at the same time
|
||||
> scoop install llvm@16.0.6
|
||||
> scoop install llvm@18.1.8
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
@@ -179,7 +179,7 @@ These environment variables are read by Bun and configure aspects of its behavio
|
||||
---
|
||||
|
||||
- `BUN_CONFIG_NO_CLEAR_TERMINAL_ON_RELOAD`
|
||||
- If `BUN_CONFIG_NO_CLEAR_TERMINAL_ON_RELOAD=1`, then `bun --watch` will not clear the console on reload
|
||||
- If `BUN_CONFIG_NO_CLEAR_TERMINAL_ON_RELOAD=true`, then `bun --watch` will not clear the console on reload
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -48,14 +48,6 @@ In this case, we are importing from `./hello`, a relative path with no extension
|
||||
- `./hello/index.cjs`
|
||||
- `./hello/index.json`
|
||||
|
||||
Import paths are case-insensitive, meaning these are all valid imports:
|
||||
|
||||
```ts#index.ts
|
||||
import { hello } from "./hello";
|
||||
import { hello } from "./HELLO";
|
||||
import { hello } from "./hElLo";
|
||||
```
|
||||
|
||||
Import paths can optionally include extensions. If an extension is present, Bun will only check for a file with that exact extension.
|
||||
|
||||
```ts#index.ts
|
||||
|
||||
@@ -22,7 +22,7 @@ This page is updated regularly to reflect compatibility status of the latest ver
|
||||
|
||||
### [`node:cluster`](https://nodejs.org/api/cluster.html)
|
||||
|
||||
🔴 Not implemented.
|
||||
🟡 Handles and file descriptors cannot be passed between workers, which means load-balancing HTTP requests across processes is only supported on Linux at this time (via `SO_REUSEPORT`). Otherwise, implemented but not battle-tested.
|
||||
|
||||
### [`node:console`](https://nodejs.org/api/console.html)
|
||||
|
||||
@@ -153,7 +153,7 @@ Some methods are not optimized yet.
|
||||
|
||||
### [`node:util`](https://nodejs.org/api/util.html)
|
||||
|
||||
🟡 Missing `MIMEParams` `MIMEType` `aborted` `debug` `getSystemErrorMap` `getSystemErrorName` `transferableAbortController` `transferableAbortSignal` `stripVTControlCharacters`
|
||||
🟡 Missing `MIMEParams` `MIMEType` `aborted` `debug` `getSystemErrorMap` `transferableAbortController` `transferableAbortSignal` `stripVTControlCharacters`
|
||||
|
||||
### [`node:v8`](https://nodejs.org/api/v8.html)
|
||||
|
||||
@@ -341,7 +341,7 @@ The table below lists all globals implemented by Node.js and Bun's current compa
|
||||
|
||||
### [`process`](https://nodejs.org/api/process.html)
|
||||
|
||||
🟡 Missing `domain` `initgroups` `setegid` `seteuid` `setgid` `setgroups` `setuid` `allowedNodeEnvironmentFlags` `getActiveResourcesInfo` `setActiveResourcesInfo` `moduleLoadList` `setSourceMapsEnabled` `channel`. `process.binding` is partially implemented.
|
||||
🟡 Missing `domain` `initgroups` `setegid` `seteuid` `setgid` `setgroups` `setuid` `allowedNodeEnvironmentFlags` `getActiveResourcesInfo` `setActiveResourcesInfo` `moduleLoadList` `setSourceMapsEnabled`. `process.binding` is partially implemented.
|
||||
|
||||
### [`queueMicrotask()`](https://developer.mozilla.org/en-US/docs/Web/API/queueMicrotask)
|
||||
|
||||
@@ -413,7 +413,7 @@ The table below lists all globals implemented by Node.js and Bun's current compa
|
||||
|
||||
### [`TextDecoderStream`](https://developer.mozilla.org/en-US/docs/Web/API/TextDecoderStream)
|
||||
|
||||
🔴 Not implemented.
|
||||
🟢 Fully implemented.
|
||||
|
||||
### [`TextEncoder`](https://developer.mozilla.org/en-US/docs/Web/API/TextEncoder)
|
||||
|
||||
@@ -421,7 +421,7 @@ The table below lists all globals implemented by Node.js and Bun's current compa
|
||||
|
||||
### [`TextEncoderStream`](https://developer.mozilla.org/en-US/docs/Web/API/TextEncoderStream)
|
||||
|
||||
🔴 Not implemented.
|
||||
🟢 Fully implemented.
|
||||
|
||||
### [`TransformStream`](https://developer.mozilla.org/en-US/docs/Web/API/TransformStream)
|
||||
|
||||
|
||||
@@ -235,6 +235,55 @@ const result = await $`cat < ${response} | wc -w`.text();
|
||||
console.log(result); // 6\n
|
||||
```
|
||||
|
||||
## Command substitution (`$(...)`)
|
||||
|
||||
Command substitution allows you to substitute the output of another script into the current script:
|
||||
|
||||
```js
|
||||
import { $ } from "bun";
|
||||
|
||||
// Prints out the hash of the current commit
|
||||
await $`echo Hash of current commit: $(git rev-parse HEAD)`;
|
||||
```
|
||||
|
||||
This is a textual insertion of the command's output and can be used to, for example, declare a shell variable:
|
||||
|
||||
```js
|
||||
import { $ } from "bun";
|
||||
|
||||
await $`
|
||||
REV=$(git rev-parse HEAD)
|
||||
docker built -t myapp:$REV
|
||||
echo Done building docker image "myapp:$REV"
|
||||
`;
|
||||
```
|
||||
|
||||
{% callout %}
|
||||
|
||||
**NOTE**: Because Bun internally uses the special [`raw`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Template_literals#raw_strings) property on the input template literal, using the backtick syntax for command substitution won't work:
|
||||
|
||||
```js
|
||||
import { $ } from "bun";
|
||||
|
||||
await $`echo \`echo hi\``;
|
||||
```
|
||||
|
||||
Instead of printing:
|
||||
|
||||
```
|
||||
hi
|
||||
```
|
||||
|
||||
The above will print out:
|
||||
|
||||
```
|
||||
echo hi
|
||||
```
|
||||
|
||||
We instead recommend sticking to the `$(...)` syntax.
|
||||
|
||||
{% /callout %}
|
||||
|
||||
## Environment variables
|
||||
|
||||
Environment variables can be set like in bash:
|
||||
|
||||
@@ -7,22 +7,37 @@ The following Web APIs are partially or completely supported.
|
||||
---
|
||||
|
||||
- HTTP
|
||||
- [`fetch`](https://developer.mozilla.org/en-US/docs/Web/API/fetch) [`Response`](https://developer.mozilla.org/en-US/docs/Web/API/Response) [`Request`](https://developer.mozilla.org/en-US/docs/Web/API/Request) [`Headers`](https://developer.mozilla.org/en-US/docs/Web/API/Headers) [`AbortController`](https://developer.mozilla.org/en-US/docs/Web/API/AbortController) [`AbortSignal`](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal)
|
||||
- [`fetch`](https://developer.mozilla.org/en-US/docs/Web/API/fetch)
|
||||
[`Response`](https://developer.mozilla.org/en-US/docs/Web/API/Response)
|
||||
[`Request`](https://developer.mozilla.org/en-US/docs/Web/API/Request)
|
||||
[`Headers`](https://developer.mozilla.org/en-US/docs/Web/API/Headers)
|
||||
[`AbortController`](https://developer.mozilla.org/en-US/docs/Web/API/AbortController)
|
||||
[`AbortSignal`](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal)
|
||||
|
||||
---
|
||||
|
||||
- URLs
|
||||
- [`URL`](https://developer.mozilla.org/en-US/docs/Web/API/URL) [`URLSearchParams`](https://developer.mozilla.org/en-US/docs/Web/API/URLSearchParams)
|
||||
- [`URL`](https://developer.mozilla.org/en-US/docs/Web/API/URL)
|
||||
[`URLSearchParams`](https://developer.mozilla.org/en-US/docs/Web/API/URLSearchParams)
|
||||
|
||||
---
|
||||
|
||||
- Web Workers
|
||||
- [`Worker`](https://developer.mozilla.org/en-US/docs/Web/API/Worker) [`self.postMessage`](https://developer.mozilla.org/en-US/docs/Web/API/DedicatedWorkerGlobalScope/postMessage) [`structuredClone`](https://developer.mozilla.org/en-US/docs/Web/API/structuredClone) [`MessagePort`](https://developer.mozilla.org/en-US/docs/Web/API/MessagePort) [`MessageChannel`](https://developer.mozilla.org/en-US/docs/Web/API/MessageChannel), [`BroadcastChannel`](https://developer.mozilla.org/en-US/docs/Web/API/BroadcastChannel).
|
||||
- [`Worker`](https://developer.mozilla.org/en-US/docs/Web/API/Worker)
|
||||
[`self.postMessage`](https://developer.mozilla.org/en-US/docs/Web/API/DedicatedWorkerGlobalScope/postMessage)
|
||||
[`structuredClone`](https://developer.mozilla.org/en-US/docs/Web/API/structuredClone)
|
||||
[`MessagePort`](https://developer.mozilla.org/en-US/docs/Web/API/MessagePort)
|
||||
[`MessageChannel`](https://developer.mozilla.org/en-US/docs/Web/API/MessageChannel)
|
||||
[`BroadcastChannel`](https://developer.mozilla.org/en-US/docs/Web/API/BroadcastChannel).
|
||||
|
||||
---
|
||||
|
||||
- Streams
|
||||
- [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream) [`WritableStream`](https://developer.mozilla.org/en-US/docs/Web/API/WritableStream) [`TransformStream`](https://developer.mozilla.org/en-US/docs/Web/API/TransformStream) [`ByteLengthQueuingStrategy`](https://developer.mozilla.org/en-US/docs/Web/API/ByteLengthQueuingStrategy) [`CountQueuingStrategy`](https://developer.mozilla.org/en-US/docs/Web/API/CountQueuingStrategy) and associated classes
|
||||
- [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream)
|
||||
[`WritableStream`](https://developer.mozilla.org/en-US/docs/Web/API/WritableStream)
|
||||
[`TransformStream`](https://developer.mozilla.org/en-US/docs/Web/API/TransformStream)
|
||||
[`ByteLengthQueuingStrategy`](https://developer.mozilla.org/en-US/docs/Web/API/ByteLengthQueuingStrategy)
|
||||
[`CountQueuingStrategy`](https://developer.mozilla.org/en-US/docs/Web/API/CountQueuingStrategy) and associated classes
|
||||
|
||||
---
|
||||
|
||||
@@ -37,7 +52,10 @@ The following Web APIs are partially or completely supported.
|
||||
---
|
||||
|
||||
- Encoding and decoding
|
||||
- [`atob`](https://developer.mozilla.org/en-US/docs/Web/API/atob) [`btoa`](https://developer.mozilla.org/en-US/docs/Web/API/btoa) [`TextEncoder`](https://developer.mozilla.org/en-US/docs/Web/API/TextEncoder) [`TextDecoder`](https://developer.mozilla.org/en-US/docs/Web/API/TextDecoder)
|
||||
- [`atob`](https://developer.mozilla.org/en-US/docs/Web/API/atob)
|
||||
[`btoa`](https://developer.mozilla.org/en-US/docs/Web/API/btoa)
|
||||
[`TextEncoder`](https://developer.mozilla.org/en-US/docs/Web/API/TextEncoder)
|
||||
[`TextDecoder`](https://developer.mozilla.org/en-US/docs/Web/API/TextDecoder)
|
||||
|
||||
---
|
||||
|
||||
@@ -47,24 +65,28 @@ The following Web APIs are partially or completely supported.
|
||||
---
|
||||
|
||||
- Timeouts
|
||||
- [`setTimeout`](https://developer.mozilla.org/en-US/docs/Web/API/setTimeout) [`clearTimeout`](https://developer.mozilla.org/en-US/docs/Web/API/clearTimeout)
|
||||
- [`setTimeout`](https://developer.mozilla.org/en-US/docs/Web/API/setTimeout)
|
||||
[`clearTimeout`](https://developer.mozilla.org/en-US/docs/Web/API/clearTimeout)
|
||||
|
||||
---
|
||||
|
||||
- Intervals
|
||||
- [`setInterval`](https://developer.mozilla.org/en-US/docs/Web/API/setInterval)[`clearInterval`](https://developer.mozilla.org/en-US/docs/Web/API/clearInterval)
|
||||
- [`setInterval`](https://developer.mozilla.org/en-US/docs/Web/API/setInterval)
|
||||
[`clearInterval`](https://developer.mozilla.org/en-US/docs/Web/API/clearInterval)
|
||||
|
||||
---
|
||||
|
||||
- Crypto
|
||||
- [`crypto`](https://developer.mozilla.org/en-US/docs/Web/API/Crypto) [`SubtleCrypto`](https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto)
|
||||
- [`crypto`](https://developer.mozilla.org/en-US/docs/Web/API/Crypto)
|
||||
[`SubtleCrypto`](https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto)
|
||||
[`CryptoKey`](https://developer.mozilla.org/en-US/docs/Web/API/CryptoKey)
|
||||
|
||||
---
|
||||
|
||||
- Debugging
|
||||
|
||||
- [`console`](https://developer.mozilla.org/en-US/docs/Web/API/console) [`performance`](https://developer.mozilla.org/en-US/docs/Web/API/Performance)
|
||||
- [`console`](https://developer.mozilla.org/en-US/docs/Web/API/console)
|
||||
[`performance`](https://developer.mozilla.org/en-US/docs/Web/API/Performance)
|
||||
|
||||
---
|
||||
|
||||
@@ -79,7 +101,9 @@ The following Web APIs are partially or completely supported.
|
||||
---
|
||||
|
||||
- User interaction
|
||||
- [`alert`](https://developer.mozilla.org/en-US/docs/Web/API/Window/alert) [`confirm`](https://developer.mozilla.org/en-US/docs/Web/API/Window/confirm) [`prompt`](https://developer.mozilla.org/en-US/docs/Web/API/Window/prompt) (intended for interactive CLIs)
|
||||
- [`alert`](https://developer.mozilla.org/en-US/docs/Web/API/Window/alert)
|
||||
[`confirm`](https://developer.mozilla.org/en-US/docs/Web/API/Window/confirm)
|
||||
[`prompt`](https://developer.mozilla.org/en-US/docs/Web/API/Window/prompt) (intended for interactive CLIs)
|
||||
|
||||
<!-- - Blocking. Prints the alert message to terminal and awaits `[ENTER]` before proceeding. -->
|
||||
<!-- - Blocking. Prints confirmation message and awaits `[y/N]` input from user. Returns `true` if user entered `y` or `Y`, `false` otherwise.
|
||||
@@ -94,7 +118,10 @@ The following Web APIs are partially or completely supported.
|
||||
|
||||
- Events
|
||||
- [`EventTarget`](https://developer.mozilla.org/en-US/docs/Web/API/EventTarget)
|
||||
[`Event`](https://developer.mozilla.org/en-US/docs/Web/API/Event) [`ErrorEvent`](https://developer.mozilla.org/en-US/docs/Web/API/ErrorEvent) [`CloseEvent`](https://developer.mozilla.org/en-US/docs/Web/API/CloseEvent) [`MessageEvent`](https://developer.mozilla.org/en-US/docs/Web/API/MessageEvent)
|
||||
[`Event`](https://developer.mozilla.org/en-US/docs/Web/API/Event)
|
||||
[`ErrorEvent`](https://developer.mozilla.org/en-US/docs/Web/API/ErrorEvent)
|
||||
[`CloseEvent`](https://developer.mozilla.org/en-US/docs/Web/API/CloseEvent)
|
||||
[`MessageEvent`](https://developer.mozilla.org/en-US/docs/Web/API/MessageEvent)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -196,3 +196,41 @@ As of Bun v1.0.19, Bun automatically resolves the `specifier` argument to `mock.
|
||||
After resolution, the mocked module is stored in the ES Module registry **and** the CommonJS require cache. This means that you can use `import` and `require` interchangeably for mocked modules.
|
||||
|
||||
The callback function is called lazily, only if the module is imported or required. This means that you can use `mock.module()` to mock modules that don't exist yet, and it means that you can use `mock.module()` to mock modules that are imported by other modules.
|
||||
|
||||
## Restore all function mocks to their original values with `mock.restore()`
|
||||
|
||||
Instead of manually restoring each mock individually with `mockFn.mockRestore()`, restore all mocks with one command by calling `mock.restore()`. Doing so does not reset the value of modules overridden with `mock.module()`.
|
||||
|
||||
Using `mock.restore()` can reduce the amount of code in your tests by adding it to `afterEach` blocks in each test file or even in your [test preload code](https://bun.sh/docs/runtime/bunfig#test-preload).
|
||||
|
||||
```ts
|
||||
import { expect, mock, spyOn, test } from "bun:test";
|
||||
|
||||
import * as fooModule from './foo.ts';
|
||||
import * as barModule from './bar.ts';
|
||||
import * as bazModule from './baz.ts';
|
||||
|
||||
test('foo, bar, baz', () => {
|
||||
const fooSpy = spyOn(fooModule, 'foo');
|
||||
const barSpy = spyOn(barModule, 'bar');
|
||||
const bazSpy = spyOn(bazModule, 'baz');
|
||||
|
||||
expect(fooSpy).toBe('foo');
|
||||
expect(barSpy).toBe('bar');
|
||||
expect(bazSpy).toBe('baz');
|
||||
|
||||
fooSpy.mockImplementation(() => 42);
|
||||
barSpy.mockImplementation(() => 43);
|
||||
bazSpy.mockImplementation(() => 44);
|
||||
|
||||
expect(fooSpy).toBe(42);
|
||||
expect(barSpy).toBe(43);
|
||||
expect(bazSpy).toBe(44);
|
||||
|
||||
mock.restore();
|
||||
|
||||
expect(fooSpy).toBe('foo');
|
||||
expect(barSpy).toBe('bar');
|
||||
expect(bazSpy).toBe('baz');
|
||||
});
|
||||
```
|
||||
|
||||
@@ -195,7 +195,6 @@ pub fn main() anyerror!void {
|
||||
args.headers_buf,
|
||||
response_body_string,
|
||||
args.body,
|
||||
0,
|
||||
HTTP.FetchRedirect.follow,
|
||||
),
|
||||
};
|
||||
|
||||
@@ -31,7 +31,6 @@ const params = [_]clap.Param(clap.Help){
|
||||
clap.parseParam("-b, --body <STR> HTTP request body as a string") catch unreachable,
|
||||
clap.parseParam("-f, --file <STR> File path to load as body") catch unreachable,
|
||||
clap.parseParam("-n, --count <INT> How many runs? Default 10") catch unreachable,
|
||||
clap.parseParam("-t, --timeout <INT> Max duration per request") catch unreachable,
|
||||
clap.parseParam("-r, --retry <INT> Max retry count") catch unreachable,
|
||||
clap.parseParam("--no-gzip Disable gzip") catch unreachable,
|
||||
clap.parseParam("--no-deflate Disable deflate") catch unreachable,
|
||||
@@ -75,7 +74,6 @@ pub const Arguments = struct {
|
||||
body: string = "",
|
||||
turbo: bool = false,
|
||||
count: usize = 10,
|
||||
timeout: usize = 0,
|
||||
repeat: usize = 0,
|
||||
concurrency: u16 = 32,
|
||||
|
||||
@@ -165,10 +163,6 @@ pub const Arguments = struct {
|
||||
// .keep_alive = !args.flag("--no-keep-alive"),
|
||||
.concurrency = std.fmt.parseInt(u16, args.option("--max-concurrency") orelse "32", 10) catch 32,
|
||||
.turbo = args.flag("--turbo"),
|
||||
.timeout = std.fmt.parseInt(usize, args.option("--timeout") orelse "0", 10) catch |err| {
|
||||
Output.prettyErrorln("<r><red>{s}<r> parsing timeout", .{@errorName(err)});
|
||||
Global.exit(1);
|
||||
},
|
||||
.count = std.fmt.parseInt(usize, args.option("--count") orelse "10", 10) catch |err| {
|
||||
Output.prettyErrorln("<r><red>{s}<r> parsing count", .{@errorName(err)});
|
||||
Global.exit(1);
|
||||
@@ -225,7 +219,6 @@ pub fn main() anyerror!void {
|
||||
args.headers_buf,
|
||||
response_body,
|
||||
"",
|
||||
args.timeout,
|
||||
),
|
||||
};
|
||||
ctx.http.client.verbose = args.verbose;
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"workspaces": [
|
||||
"./packages/bun-types"
|
||||
],
|
||||
"dependencies": {
|
||||
"devDependencies": {
|
||||
"@vscode/debugadapter": "^1.65.0",
|
||||
"esbuild": "^0.21.4",
|
||||
"eslint": "^9.4.0",
|
||||
@@ -15,9 +15,7 @@
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"source-map-js": "^1.2.0",
|
||||
"typescript": "^5.4.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"typescript": "^5.4.5",
|
||||
"@types/bun": "^1.1.3",
|
||||
"@types/react": "^18.3.3",
|
||||
"@typescript-eslint/eslint-plugin": "^7.11.0",
|
||||
@@ -31,7 +29,7 @@
|
||||
"bump": "bun ./scripts/bump.ts",
|
||||
"build": "if [ ! -e build ]; then bun setup; fi && ninja -C build",
|
||||
"build:valgrind": "cmake . -DZIG_OPTIMIZE=Debug -DUSE_DEBUG_JSC=ON -DCMAKE_BUILD_TYPE=Debug -GNinja -Bbuild-valgrind && ninja -Cbuild-valgrind",
|
||||
"build:tidy": "BUN_SILENT=1 cmake --log-level=WARNING . -DZIG_OPTIMIZE=Debug -DUSE_DEBUG_JSC=ON -DBUN_TIDY_ONLY=ON -DCMAKE_BUILD_TYPE=Debug -GNinja -Bbuild-tidy >> ${GITHUB_STEP_SUMMARY:-/dev/stdout} && BUN_SILENT=1 ninja -Cbuild-tidy >> ${GITHUB_STEP_SUMMARY:-/dev/stdout}",
|
||||
"build:tidy": "bash ./scripts/env.sh && BUN_SILENT=1 cmake --log-level=WARNING . ${CMAKE_FLAGS[@]} -DZIG_OPTIMIZE=Debug -DUSE_DEBUG_JSC=ON -DBUN_TIDY_ONLY=ON -DCMAKE_BUILD_TYPE=Debug -GNinja -Bbuild-tidy && BUN_SILENT=1 ninja -Cbuild-tidy",
|
||||
"build:tidy-extra": "cmake . -DZIG_OPTIMIZE=Debug -DUSE_DEBUG_JSC=ON -DBUN_TIDY_ONLY_EXTRA=ON -DCMAKE_BUILD_TYPE=Debug -GNinja -Bbuild-tidy && ninja -Cbuild-tidy",
|
||||
"build:release": "cmake . -DCMAKE_BUILD_TYPE=Release -GNinja -Bbuild-release && ninja -Cbuild-release",
|
||||
"build:release:local": "cmake . -DCMAKE_BUILD_TYPE=Release -DWEBKIT_DIR=$(pwd)/src/bun.js/WebKit/WebKitBuild/Release -GNinja -Bbuild-release-local && ninja -Cbuild-release-local",
|
||||
@@ -39,6 +37,7 @@
|
||||
"build:debug-zig-release": "cmake . -DCMAKE_BUILD_TYPE=Release -DZIG_OPTIMIZE=Debug -GNinja -Bbuild-debug-zig-release && ninja -Cbuild-debug-zig-release",
|
||||
"build:safe": "cmake . -DZIG_OPTIMIZE=ReleaseSafe -DUSE_DEBUG_JSC=ON -DCMAKE_BUILD_TYPE=Release -GNinja -Bbuild-safe && ninja -Cbuild-safe",
|
||||
"build:windows": "cmake -B build -S . -G Ninja -DCMAKE_BUILD_TYPE=Debug && ninja -Cbuild",
|
||||
"build:windows:release": "cmake -B build-release -S . -G Ninja -DCMAKE_BUILD_TYPE=Release && ninja -Cbuild-release",
|
||||
"typecheck": "tsc --noEmit && cd test && bun run typecheck",
|
||||
"fmt": "prettier --write --cache './{.vscode,src,test,bench,packages/{bun-types,bun-inspector-*,bun-vscode,bun-debug-adapter-protocol}}/**/*.{mjs,ts,tsx,js,jsx}'",
|
||||
"fmt:zig": "zig fmt src/*.zig src/*/*.zig src/*/*/*.zig src/*/*/*/*.zig",
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
"std.debug.assert": "Use bun.assert instead",
|
||||
"std.debug.dumpStackTrace": "Use bun.handleErrorReturnTrace or bun.crash_handler.dumpStackTrace instead",
|
||||
"std.debug.print": "Don't let this be committed",
|
||||
"std.mem.indexOfAny(": "Use bun.strings.indexOfAny",
|
||||
"std.mem.indexOfAny(u8": "Use bun.strings.indexOfAny",
|
||||
"undefined != ": "This is by definition Undefined Behavior.",
|
||||
"undefined == ": "This is by definition Undefined Behavior.",
|
||||
"bun.toFD(std.fs.cwd().fd)": "Use bun.FD.cwd()",
|
||||
@@ -13,5 +13,6 @@
|
||||
"std.StringArrayHashMap(": "bun.StringArrayHashMap has a faster `eql`",
|
||||
"std.StringHashMapUnmanaged(": "bun.StringHashMapUnmanaged has a faster `eql`",
|
||||
"std.StringHashMap(": "bun.StringHashMaphas a faster `eql`",
|
||||
"std.enums.tagName(": "Use bun.tagName instead",
|
||||
"": ""
|
||||
}
|
||||
|
||||
@@ -239,7 +239,7 @@ Starting "${testFileName}"
|
||||
GITHUB_ACTIONS: process.env.GITHUB_ACTIONS ?? "true",
|
||||
BUN_DEBUG_QUIET_LOGS: "1",
|
||||
BUN_INSTALL_CACHE_DIR: join(TMPDIR, ".bun-install-cache"),
|
||||
BUN_ENABLE_CRASH_REPORTING: "1",
|
||||
BUN_ENABLE_CRASH_REPORTING: "0",
|
||||
[windows ? "TEMP" : "TMPDIR"]: TMPDIR,
|
||||
},
|
||||
});
|
||||
|
||||
@@ -22,10 +22,10 @@ bun upgrade
|
||||
- [Linux, arm64](https://www.npmjs.com/package/@oven/bun-linux-aarch64)
|
||||
- [Linux, x64](https://www.npmjs.com/package/@oven/bun-linux-x64)
|
||||
- [Linux, x64 (without AVX2 instructions)](https://www.npmjs.com/package/@oven/bun-linux-x64-baseline)
|
||||
- [Windows (using Windows Subsystem for Linux, aka. "WSL")](https://relatablecode.com/how-to-set-up-bun-on-a-windows-machine)
|
||||
- [Windows](https://www.npmjs.com/package/@oven/bun-windows-x64)
|
||||
- [Windows (without AVX2 instructions)](https://www.npmjs.com/package/@oven/bun-windows-x64-baseline)
|
||||
|
||||
### Future Platforms
|
||||
|
||||
- [Windows](https://github.com/oven-sh/bun/issues/43)
|
||||
- Unix-like variants such as FreeBSD, OpenBSD, etc.
|
||||
- Android and iOS
|
||||
|
||||
@@ -173,6 +173,14 @@ function publishModule(name: string, dryRun?: boolean): void {
|
||||
);
|
||||
error(stderr || stdout);
|
||||
if (exitCode !== 0) {
|
||||
if (
|
||||
stdout.includes("You cannot publish over the previously published version") ||
|
||||
stderr.includes("You cannot publish over the previously published version")
|
||||
) {
|
||||
console.warn("Ignoring npm publish error:", stdout, stderr);
|
||||
return;
|
||||
}
|
||||
|
||||
throw new Error("npm publish failed with code " + exitCode);
|
||||
}
|
||||
} else {
|
||||
|
||||
75
packages/bun-types/bun.d.ts
vendored
75
packages/bun-types/bun.d.ts
vendored
@@ -1455,7 +1455,7 @@ declare module "bun" {
|
||||
* ```js
|
||||
* const {imports, exports} = transpiler.scan(`
|
||||
* import {foo} from "baz";
|
||||
* const hello = "hi!";
|
||||
* export const hello = "hi!";
|
||||
* `);
|
||||
*
|
||||
* console.log(imports); // ["baz"]
|
||||
@@ -1516,11 +1516,12 @@ declare module "bun" {
|
||||
plugins?: BunPlugin[];
|
||||
// manifest?: boolean; // whether to return manifest
|
||||
external?: string[];
|
||||
packages?: "bundle" | "external";
|
||||
publicPath?: string;
|
||||
define?: Record<string, string>;
|
||||
// origin?: string; // e.g. http://mydomain.com
|
||||
loader?: { [k in string]: Loader };
|
||||
sourcemap?: "none" | "linked" | "inline" | "external"; // default: "none", true -> "inline"
|
||||
sourcemap?: "none" | "linked" | "inline" | "external" | "linked"; // default: "none", true -> "inline"
|
||||
/**
|
||||
* package.json `exports` conditions used when resolving imports
|
||||
*
|
||||
@@ -1536,6 +1537,16 @@ declare module "bun" {
|
||||
syntax?: boolean;
|
||||
identifiers?: boolean;
|
||||
};
|
||||
/**
|
||||
* Ignore dead code elimination/tree-shaking annotations such as @__PURE__ and package.json
|
||||
* "sideEffects" fields. This should only be used as a temporary workaround for incorrect
|
||||
* annotations in libraries.
|
||||
*/
|
||||
ignoreDCEAnnotations?: boolean;
|
||||
/**
|
||||
* Force emitting @__PURE__ annotations even if minify.whitespace is true.
|
||||
*/
|
||||
emitDCEAnnotations?: boolean;
|
||||
// treeshaking?: boolean;
|
||||
|
||||
// jsx?:
|
||||
@@ -2331,6 +2342,14 @@ declare module "bun" {
|
||||
*/
|
||||
unix?: never;
|
||||
|
||||
/**
|
||||
* Sets the the number of seconds to wait before timing out a connection
|
||||
* due to inactivity.
|
||||
*
|
||||
* Default is `10` seconds.
|
||||
*/
|
||||
idleTimeout?: number;
|
||||
|
||||
/**
|
||||
* Handle HTTP requests
|
||||
*
|
||||
@@ -2748,6 +2767,16 @@ declare module "bun" {
|
||||
compress?: boolean,
|
||||
): ServerWebSocketSendStatus;
|
||||
|
||||
/**
|
||||
* A count of connections subscribed to a given topic
|
||||
*
|
||||
* This operation will loop through each topic internally to get the count.
|
||||
*
|
||||
* @param topic the websocket topic to check how many subscribers are connected to
|
||||
* @returns the number of subscribers
|
||||
*/
|
||||
subscriberCount(topic: string): number;
|
||||
|
||||
/**
|
||||
* Returns the client IP address and port of the given Request. If the request was closed or is a unix socket, returns null.
|
||||
*
|
||||
@@ -2849,6 +2878,13 @@ declare module "bun" {
|
||||
// tslint:disable-next-line:unified-signatures
|
||||
function file(path: string | URL, options?: BlobPropertyBag): BunFile;
|
||||
|
||||
/**
|
||||
* A list of files embedded into the standalone executable. Lexigraphically sorted by name.
|
||||
*
|
||||
* If the process is not a standalone executable, this returns an empty array.
|
||||
*/
|
||||
const embeddedFiles: ReadonlyArray<Blob>;
|
||||
|
||||
/**
|
||||
* `Blob` that leverages the fastest system calls available to operate on files.
|
||||
*
|
||||
@@ -3099,6 +3135,10 @@ declare module "bun" {
|
||||
*/
|
||||
function openInEditor(path: string, options?: EditorOptions): void;
|
||||
|
||||
const fetch: typeof globalThis.fetch & {
|
||||
preconnect(url: string): void;
|
||||
};
|
||||
|
||||
interface EditorOptions {
|
||||
editor?: "vscode" | "subl";
|
||||
line?: number;
|
||||
@@ -3476,6 +3516,13 @@ declare module "bun" {
|
||||
* Filtered data consists mostly of small values with a somewhat random distribution.
|
||||
*/
|
||||
strategy?: number;
|
||||
|
||||
library?: "zlib";
|
||||
}
|
||||
|
||||
interface LibdeflateCompressionOptions {
|
||||
level?: 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12;
|
||||
library?: "libdeflate";
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -3484,26 +3531,38 @@ declare module "bun" {
|
||||
* @param options Compression options to use
|
||||
* @returns The output buffer with the compressed data
|
||||
*/
|
||||
function deflateSync(data: Uint8Array | string | ArrayBuffer, options?: ZlibCompressionOptions): Uint8Array;
|
||||
function deflateSync(
|
||||
data: Uint8Array | string | ArrayBuffer,
|
||||
options?: ZlibCompressionOptions | LibdeflateCompressionOptions,
|
||||
): Uint8Array;
|
||||
/**
|
||||
* Compresses a chunk of data with `zlib` GZIP algorithm.
|
||||
* @param data The buffer of data to compress
|
||||
* @param options Compression options to use
|
||||
* @returns The output buffer with the compressed data
|
||||
*/
|
||||
function gzipSync(data: Uint8Array | string | ArrayBuffer, options?: ZlibCompressionOptions): Uint8Array;
|
||||
function gzipSync(
|
||||
data: Uint8Array | string | ArrayBuffer,
|
||||
options?: ZlibCompressionOptions | LibdeflateCompressionOptions,
|
||||
): Uint8Array;
|
||||
/**
|
||||
* Decompresses a chunk of data with `zlib` INFLATE algorithm.
|
||||
* @param data The buffer of data to decompress
|
||||
* @returns The output buffer with the decompressed data
|
||||
*/
|
||||
function inflateSync(data: Uint8Array | string | ArrayBuffer): Uint8Array;
|
||||
function inflateSync(
|
||||
data: Uint8Array | string | ArrayBuffer,
|
||||
options?: ZlibCompressionOptions | LibdeflateCompressionOptions,
|
||||
): Uint8Array;
|
||||
/**
|
||||
* Decompresses a chunk of data with `zlib` GUNZIP algorithm.
|
||||
* @param data The buffer of data to decompress
|
||||
* @returns The output buffer with the decompressed data
|
||||
*/
|
||||
function gunzipSync(data: Uint8Array | string | ArrayBuffer): Uint8Array;
|
||||
function gunzipSync(
|
||||
data: Uint8Array | string | ArrayBuffer,
|
||||
options?: ZlibCompressionOptions | LibdeflateCompressionOptions,
|
||||
): Uint8Array;
|
||||
|
||||
type Target =
|
||||
/**
|
||||
@@ -3823,7 +3882,7 @@ declare module "bun" {
|
||||
*/
|
||||
const isMainThread: boolean;
|
||||
|
||||
interface Socket<Data = undefined> {
|
||||
interface Socket<Data = undefined> extends Disposable {
|
||||
/**
|
||||
* Write `data` to the socket
|
||||
*
|
||||
@@ -4105,7 +4164,7 @@ declare module "bun" {
|
||||
setMaxSendFragment(size: number): boolean;
|
||||
}
|
||||
|
||||
interface SocketListener<Data = undefined> {
|
||||
interface SocketListener<Data = undefined> extends Disposable {
|
||||
stop(closeActiveConnections?: boolean): void;
|
||||
ref(): void;
|
||||
unref(): void;
|
||||
|
||||
59
packages/bun-types/globals.d.ts
vendored
59
packages/bun-types/globals.d.ts
vendored
@@ -907,26 +907,42 @@ declare global {
|
||||
new (): ShadowRealm;
|
||||
};
|
||||
|
||||
/**
|
||||
* Send a HTTP(s) request
|
||||
*
|
||||
* @param request Request object
|
||||
* @param init A structured value that contains settings for the fetch() request.
|
||||
*
|
||||
* @returns A promise that resolves to {@link Response} object.
|
||||
*/
|
||||
interface Fetch {
|
||||
/**
|
||||
* Send a HTTP(s) request
|
||||
*
|
||||
* @param request Request object
|
||||
* @param init A structured value that contains settings for the fetch() request.
|
||||
*
|
||||
* @returns A promise that resolves to {@link Response} object.
|
||||
*/
|
||||
(request: Request, init?: RequestInit): Promise<Response>;
|
||||
|
||||
// tslint:disable-next-line:unified-signatures
|
||||
function fetch(request: Request, init?: RequestInit): Promise<Response>;
|
||||
/**
|
||||
* Send a HTTP(s) request
|
||||
*
|
||||
* @param url URL string
|
||||
* @param init A structured value that contains settings for the fetch() request.
|
||||
*
|
||||
* @returns A promise that resolves to {@link Response} object.
|
||||
*/
|
||||
function fetch(url: string | URL | Request, init?: FetchRequestInit): Promise<Response>;
|
||||
/**
|
||||
* Send a HTTP(s) request
|
||||
*
|
||||
* @param url URL string
|
||||
* @param init A structured value that contains settings for the fetch() request.
|
||||
*
|
||||
* @returns A promise that resolves to {@link Response} object.
|
||||
*/
|
||||
(url: string | URL | Request, init?: FetchRequestInit): Promise<Response>;
|
||||
|
||||
(input: string | URL | globalThis.Request, init?: RequestInit): Promise<Response>;
|
||||
|
||||
/**
|
||||
* Start the DNS resolution, TCP connection, and TLS handshake for a request
|
||||
* before the request is actually sent.
|
||||
*
|
||||
* This can reduce the latency of a request when you know there's some
|
||||
* long-running task that will delay the request starting.
|
||||
*
|
||||
* This is a bun-specific API and is not part of the Fetch API specification.
|
||||
*/
|
||||
preconnect(url: string | URL): void;
|
||||
}
|
||||
|
||||
var fetch: Fetch;
|
||||
|
||||
function queueMicrotask(callback: (...args: any[]) => void): void;
|
||||
/**
|
||||
@@ -939,6 +955,7 @@ declare global {
|
||||
ref(): Timer;
|
||||
unref(): Timer;
|
||||
hasRef(): boolean;
|
||||
refresh(): Timer
|
||||
|
||||
[Symbol.toPrimitive](): number;
|
||||
}
|
||||
@@ -1901,6 +1918,10 @@ declare global {
|
||||
* closely to the `BodyMixin` API.
|
||||
*/
|
||||
formData(): Promise<FormData>;
|
||||
/**
|
||||
* Returns a promise that resolves to the contents of the blob as a Uint8Array (array of bytes) its the same as `new Uint8Array(await blob.arrayBuffer())`
|
||||
*/
|
||||
bytes(): Promise<Uint8Array>;
|
||||
}
|
||||
var Blob: typeof globalThis extends {
|
||||
onerror: any;
|
||||
|
||||
35
packages/bun-types/jsc.d.ts
vendored
35
packages/bun-types/jsc.d.ts
vendored
@@ -78,21 +78,7 @@ declare module "bun:jsc" {
|
||||
*/
|
||||
function setTimeZone(timeZone: string): string;
|
||||
|
||||
/**
|
||||
* Run JavaScriptCore's sampling profiler for a particular function
|
||||
*
|
||||
* This is pretty low-level.
|
||||
*
|
||||
* Things to know:
|
||||
* - LLint means "Low Level Interpreter", which is the interpreter that runs before any JIT compilation
|
||||
* - Baseline is the first JIT compilation tier. It's the least optimized, but the fastest to compile
|
||||
* - DFG means "Data Flow Graph", which is the second JIT compilation tier. It has some optimizations, but is slower to compile
|
||||
* - FTL means "Faster Than Light", which is the third JIT compilation tier. It has the most optimizations, but is the slowest to compile
|
||||
*/
|
||||
function profile(
|
||||
callback: CallableFunction,
|
||||
sampleInterval?: number,
|
||||
): {
|
||||
interface SamplingProfile {
|
||||
/**
|
||||
* A formatted summary of the top functions
|
||||
*
|
||||
@@ -183,7 +169,24 @@ declare module "bun:jsc" {
|
||||
* Stack traces of the top functions
|
||||
*/
|
||||
stackTraces: string[];
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Run JavaScriptCore's sampling profiler for a particular function
|
||||
*
|
||||
* This is pretty low-level.
|
||||
*
|
||||
* Things to know:
|
||||
* - LLint means "Low Level Interpreter", which is the interpreter that runs before any JIT compilation
|
||||
* - Baseline is the first JIT compilation tier. It's the least optimized, but the fastest to compile
|
||||
* - DFG means "Data Flow Graph", which is the second JIT compilation tier. It has some optimizations, but is slower to compile
|
||||
* - FTL means "Faster Than Light", which is the third JIT compilation tier. It has the most optimizations, but is the slowest to compile
|
||||
*/
|
||||
function profile<T extends (...args: any[]) => any>(
|
||||
callback: T,
|
||||
sampleInterval?: number,
|
||||
...args: Parameters<T>
|
||||
): ReturnType<T> extends Promise<infer U> ? Promise<SamplingProfile> : SamplingProfile;
|
||||
|
||||
/**
|
||||
* This returns objects which native code has explicitly protected from being
|
||||
|
||||
26
packages/bun-types/sqlite.d.ts
vendored
26
packages/bun-types/sqlite.d.ts
vendored
@@ -36,7 +36,7 @@ declare module "bun:sqlite" {
|
||||
* ```ts
|
||||
* const db = new Database("mydb.sqlite");
|
||||
* db.run("CREATE TABLE foo (bar TEXT)");
|
||||
* db.run("INSERT INTO foo VALUES (?)", "baz");
|
||||
* db.run("INSERT INTO foo VALUES (?)", ["baz"]);
|
||||
* console.log(db.query("SELECT * FROM foo").all());
|
||||
* ```
|
||||
*
|
||||
@@ -47,7 +47,7 @@ declare module "bun:sqlite" {
|
||||
* ```ts
|
||||
* const db = new Database(":memory:");
|
||||
* db.run("CREATE TABLE foo (bar TEXT)");
|
||||
* db.run("INSERT INTO foo VALUES (?)", "hiiiiii");
|
||||
* db.run("INSERT INTO foo VALUES (?)", ["hiiiiii"]);
|
||||
* console.log(db.query("SELECT * FROM foo").all());
|
||||
* ```
|
||||
*
|
||||
@@ -158,7 +158,7 @@ declare module "bun:sqlite" {
|
||||
* @example
|
||||
* ```ts
|
||||
* db.run("CREATE TABLE foo (bar TEXT)");
|
||||
* db.run("INSERT INTO foo VALUES (?)", "baz");
|
||||
* db.run("INSERT INTO foo VALUES (?)", ["baz"]);
|
||||
* ```
|
||||
*
|
||||
* Useful for queries like:
|
||||
@@ -268,9 +268,9 @@ declare module "bun:sqlite" {
|
||||
* @example
|
||||
* ```ts
|
||||
* db.run("CREATE TABLE foo (bar TEXT)");
|
||||
* db.run("INSERT INTO foo VALUES (?)", "baz");
|
||||
* db.run("INSERT INTO foo VALUES (?)", ["baz"]);
|
||||
* db.run("BEGIN");
|
||||
* db.run("INSERT INTO foo VALUES (?)", "qux");
|
||||
* db.run("INSERT INTO foo VALUES (?)", ["qux"]);
|
||||
* console.log(db.inTransaction());
|
||||
* ```
|
||||
*/
|
||||
@@ -536,7 +536,7 @@ declare module "bun:sqlite" {
|
||||
* // => [{bar: "baz"}]
|
||||
*
|
||||
* stmt.all();
|
||||
* // => [{bar: "baz"}]
|
||||
* // => []
|
||||
*
|
||||
* stmt.all("foo");
|
||||
* // => [{bar: "foo"}]
|
||||
@@ -555,14 +555,14 @@ declare module "bun:sqlite" {
|
||||
* ```ts
|
||||
* const stmt = db.prepare("SELECT * FROM foo WHERE bar = ?");
|
||||
*
|
||||
* stmt.all("baz");
|
||||
* // => [{bar: "baz"}]
|
||||
* stmt.get("baz");
|
||||
* // => {bar: "baz"}
|
||||
*
|
||||
* stmt.all();
|
||||
* // => [{bar: "baz"}]
|
||||
* stmt.get();
|
||||
* // => null
|
||||
*
|
||||
* stmt.all("foo");
|
||||
* // => [{bar: "foo"}]
|
||||
* stmt.get("foo");
|
||||
* // => {bar: "foo"}
|
||||
* ```
|
||||
*
|
||||
* The following types can be used when binding parameters:
|
||||
@@ -747,7 +747,7 @@ declare module "bun:sqlite" {
|
||||
* query.as(User);
|
||||
* const user = query.get();
|
||||
* console.log(user.birthdate);
|
||||
* // => Date(1995, 11, 19)
|
||||
* // => Date(1995, 12, 19)
|
||||
* ```
|
||||
*/
|
||||
as<T = unknown>(Class: new (...args: any[]) => T): Statement<T, ParamsType>;
|
||||
|
||||
2
packages/bun-types/test.d.ts
vendored
2
packages/bun-types/test.d.ts
vendored
@@ -1250,7 +1250,7 @@ declare module "bun:test" {
|
||||
* - If expected is a `string` or `RegExp`, it will check the `message` property.
|
||||
* - If expected is an `Error` object, it will check the `name` and `message` properties.
|
||||
* - If expected is an `Error` constructor, it will check the class of the `Error`.
|
||||
* - If expected is not provided, it will check if anything as thrown.
|
||||
* - If expected is not provided, it will check if anything has thrown.
|
||||
*
|
||||
* @example
|
||||
* function fail() {
|
||||
|
||||
@@ -26,6 +26,10 @@
|
||||
#include <stdlib.h>
|
||||
|
||||
#ifndef _WIN32
|
||||
// Necessary for the stdint include
|
||||
#ifndef _GNU_SOURCE
|
||||
#define _GNU_SOURCE
|
||||
#endif
|
||||
#include <sys/types.h>
|
||||
#include <sys/socket.h>
|
||||
#include <netinet/in.h>
|
||||
@@ -38,10 +42,11 @@
|
||||
#include <mstcpip.h>
|
||||
#endif
|
||||
|
||||
#if defined(__APPLE__) && defined(__aarch64__)
|
||||
#define HAS_MSGX
|
||||
#if defined(__APPLE__)
|
||||
extern int Bun__doesMacOSVersionSupportSendRecvMsgX();
|
||||
#endif
|
||||
|
||||
|
||||
/* We need to emulate sendmmsg, recvmmsg on platform who don't have it */
|
||||
int bsd_sendmmsg(LIBUS_SOCKET_DESCRIPTOR fd, struct udp_sendbuf* sendbuf, int flags) {
|
||||
#if defined(_WIN32)// || defined(__APPLE__)
|
||||
@@ -72,32 +77,30 @@ int bsd_sendmmsg(LIBUS_SOCKET_DESCRIPTOR fd, struct udp_sendbuf* sendbuf, int fl
|
||||
}
|
||||
return sendbuf->num;
|
||||
#elif defined(__APPLE__)
|
||||
// TODO figure out why sendmsg_x fails when one of the messages is empty
|
||||
// so that we can get rid of this code.
|
||||
// One of the weird things is that once a non-empty message has been sent on the socket,
|
||||
// empty messages start working as well. Bizzare.
|
||||
#ifdef HAS_MSGX
|
||||
if (sendbuf->has_empty) {
|
||||
#endif
|
||||
for (int i = 0; i < sendbuf->num; i++) {
|
||||
while (1) {
|
||||
ssize_t ret = sendmsg(fd, &sendbuf->msgvec[i].msg_hdr, flags);
|
||||
if (ret < 0) {
|
||||
if (errno == EINTR) continue;
|
||||
if (errno == EAGAIN || errno == EWOULDBLOCK) return i;
|
||||
return ret;
|
||||
}
|
||||
break;
|
||||
}
|
||||
// sendmsg_x does not support addresses.
|
||||
if (!sendbuf->has_empty && !sendbuf->has_addresses && Bun__doesMacOSVersionSupportSendRecvMsgX()) {
|
||||
while (1) {
|
||||
int ret = sendmsg_x(fd, sendbuf->msgvec, sendbuf->num, flags);
|
||||
if (ret >= 0) return ret;
|
||||
// If we receive EMMSGSIZE, we should use the fallback code.
|
||||
if (errno == EMSGSIZE) break;
|
||||
if (errno != EINTR) return ret;
|
||||
}
|
||||
return sendbuf->num;
|
||||
#ifdef HAS_MSGX
|
||||
}
|
||||
while (1) {
|
||||
int ret = sendmsg_x(fd, sendbuf->msgvec, sendbuf->num, flags);
|
||||
if (ret >= 0 || errno != EINTR) return ret;
|
||||
|
||||
for (size_t i = 0, count = sendbuf->num; i < count; i++) {
|
||||
while (1) {
|
||||
ssize_t ret = sendmsg(fd, &sendbuf->msgvec[i].msg_hdr, flags);
|
||||
if (ret < 0) {
|
||||
if (errno == EINTR) continue;
|
||||
if (errno == EAGAIN || errno == EWOULDBLOCK) return i;
|
||||
return ret;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
return sendbuf->num;
|
||||
#else
|
||||
while (1) {
|
||||
int ret = sendmmsg(fd, sendbuf->msgvec, sendbuf->num, flags | MSG_NOSIGNAL);
|
||||
@@ -119,12 +122,13 @@ int bsd_recvmmsg(LIBUS_SOCKET_DESCRIPTOR fd, struct udp_recvbuf *recvbuf, int fl
|
||||
return 1;
|
||||
}
|
||||
#elif defined(__APPLE__)
|
||||
#ifdef HAS_MSGX
|
||||
while (1) {
|
||||
int ret = recvmsg_x(fd, recvbuf->msgvec, LIBUS_UDP_RECV_COUNT, flags);
|
||||
if (ret >= 0 || errno != EINTR) return ret;
|
||||
if (Bun__doesMacOSVersionSupportSendRecvMsgX()) {
|
||||
while (1) {
|
||||
int ret = recvmsg_x(fd, recvbuf->msgvec, LIBUS_UDP_RECV_COUNT, flags);
|
||||
if (ret >= 0 || errno != EINTR) return ret;
|
||||
}
|
||||
}
|
||||
#else
|
||||
|
||||
for (int i = 0; i < LIBUS_UDP_RECV_COUNT; ++i) {
|
||||
while (1) {
|
||||
ssize_t ret = recvmsg(fd, &recvbuf->msgvec[i].msg_hdr, flags);
|
||||
@@ -138,7 +142,6 @@ int bsd_recvmmsg(LIBUS_SOCKET_DESCRIPTOR fd, struct udp_recvbuf *recvbuf, int fl
|
||||
}
|
||||
}
|
||||
return LIBUS_UDP_RECV_COUNT;
|
||||
#endif
|
||||
#else
|
||||
while (1) {
|
||||
int ret = recvmmsg(fd, (struct mmsghdr *)&recvbuf->msgvec, LIBUS_UDP_RECV_COUNT, flags, 0);
|
||||
@@ -153,19 +156,20 @@ void bsd_udp_setup_recvbuf(struct udp_recvbuf *recvbuf, void *databuf, size_t da
|
||||
recvbuf->buflen = databuflen;
|
||||
#else
|
||||
// assert(databuflen > LIBUS_UDP_MAX_SIZE * LIBUS_UDP_RECV_COUNT);
|
||||
|
||||
for (int i = 0; i < LIBUS_UDP_RECV_COUNT; i++) {
|
||||
memset(recvbuf, 0, sizeof(struct udp_recvbuf));
|
||||
for (size_t i = 0; i < LIBUS_UDP_RECV_COUNT; i++) {
|
||||
recvbuf->iov[i].iov_base = (char*)databuf + i * LIBUS_UDP_MAX_SIZE;
|
||||
recvbuf->iov[i].iov_len = LIBUS_UDP_MAX_SIZE;
|
||||
|
||||
recvbuf->msgvec[i].msg_hdr.msg_name = &recvbuf->addr[i];
|
||||
recvbuf->msgvec[i].msg_hdr.msg_namelen = sizeof(struct sockaddr_storage);
|
||||
|
||||
recvbuf->msgvec[i].msg_hdr.msg_iov = &recvbuf->iov[i];
|
||||
recvbuf->msgvec[i].msg_hdr.msg_iovlen = 1;
|
||||
|
||||
recvbuf->msgvec[i].msg_hdr.msg_control = recvbuf->control[i];
|
||||
recvbuf->msgvec[i].msg_hdr.msg_controllen = 256;
|
||||
struct msghdr mh = {};
|
||||
memset(&mh, 0, sizeof(struct msghdr));
|
||||
mh.msg_name = &recvbuf->addr[i];
|
||||
mh.msg_namelen = sizeof(struct sockaddr_storage);
|
||||
mh.msg_iov = &recvbuf->iov[i];
|
||||
mh.msg_iovlen = 1;
|
||||
mh.msg_control = recvbuf->control[i];
|
||||
mh.msg_controllen = sizeof(recvbuf->control[i]);
|
||||
recvbuf->msgvec[i].msg_hdr = mh;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
@@ -178,7 +182,12 @@ int bsd_udp_setup_sendbuf(struct udp_sendbuf *buf, size_t bufsize, void** payloa
|
||||
buf->num = num;
|
||||
return num;
|
||||
#else
|
||||
// TODO: can we skip empty messages altogether? Do we really need to send 0-length messages?
|
||||
buf->has_empty = 0;
|
||||
|
||||
// sendmsg_x docs states it does not support addresses.
|
||||
buf->has_addresses = 0;
|
||||
|
||||
struct mmsghdr *msgvec = buf->msgvec;
|
||||
// todo check this math
|
||||
size_t count = (bufsize - sizeof(struct udp_sendbuf)) / (sizeof(struct mmsghdr) + sizeof(struct iovec));
|
||||
@@ -193,6 +202,9 @@ int bsd_udp_setup_sendbuf(struct udp_sendbuf *buf, size_t bufsize, void** payloa
|
||||
addr_len = addr->sa_family == AF_INET ? sizeof(struct sockaddr_in)
|
||||
: addr->sa_family == AF_INET6 ? sizeof(struct sockaddr_in6)
|
||||
: 0;
|
||||
if (addr_len > 0) {
|
||||
buf->has_addresses = 1;
|
||||
}
|
||||
}
|
||||
iov[i].iov_base = payloads[i];
|
||||
iov[i].iov_len = lengths[i];
|
||||
@@ -205,6 +217,7 @@ int bsd_udp_setup_sendbuf(struct udp_sendbuf *buf, size_t bufsize, void** payloa
|
||||
msgvec[i].msg_hdr.msg_flags = 0;
|
||||
msgvec[i].msg_len = 0;
|
||||
|
||||
|
||||
if (lengths[i] == 0) {
|
||||
buf->has_empty = 1;
|
||||
}
|
||||
@@ -397,7 +410,9 @@ int bsd_addr_get_port(struct bsd_addr_t *addr) {
|
||||
// called by dispatch_ready_poll
|
||||
LIBUS_SOCKET_DESCRIPTOR bsd_accept_socket(LIBUS_SOCKET_DESCRIPTOR fd, struct bsd_addr_t *addr) {
|
||||
LIBUS_SOCKET_DESCRIPTOR accepted_fd;
|
||||
addr->len = sizeof(addr->mem);
|
||||
|
||||
while (1) {
|
||||
addr->len = sizeof(addr->mem);
|
||||
|
||||
#if defined(SOCK_CLOEXEC) && defined(SOCK_NONBLOCK)
|
||||
// Linux, FreeBSD
|
||||
@@ -405,12 +420,18 @@ LIBUS_SOCKET_DESCRIPTOR bsd_accept_socket(LIBUS_SOCKET_DESCRIPTOR fd, struct bsd
|
||||
#else
|
||||
// Windows, OS X
|
||||
accepted_fd = accept(fd, (struct sockaddr *) addr, &addr->len);
|
||||
|
||||
#endif
|
||||
|
||||
/* We cannot rely on addr since it is not initialized if failed */
|
||||
if (accepted_fd == LIBUS_SOCKET_ERROR) {
|
||||
return LIBUS_SOCKET_ERROR;
|
||||
if (UNLIKELY(IS_EINTR(accepted_fd))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
/* We cannot rely on addr since it is not initialized if failed */
|
||||
if (accepted_fd == LIBUS_SOCKET_ERROR) {
|
||||
return LIBUS_SOCKET_ERROR;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
internal_finalize_bsd_addr(addr);
|
||||
@@ -423,14 +444,22 @@ LIBUS_SOCKET_DESCRIPTOR bsd_accept_socket(LIBUS_SOCKET_DESCRIPTOR fd, struct bsd
|
||||
#endif
|
||||
}
|
||||
|
||||
int bsd_recv(LIBUS_SOCKET_DESCRIPTOR fd, void *buf, int length, int flags) {
|
||||
return recv(fd, buf, length, flags);
|
||||
ssize_t bsd_recv(LIBUS_SOCKET_DESCRIPTOR fd, void *buf, int length, int flags) {
|
||||
while (1) {
|
||||
ssize_t ret = recv(fd, buf, length, flags);
|
||||
|
||||
if (UNLIKELY(IS_EINTR(ret))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
|
||||
#if !defined(_WIN32)
|
||||
#include <sys/uio.h>
|
||||
|
||||
int bsd_write2(LIBUS_SOCKET_DESCRIPTOR fd, const char *header, int header_length, const char *payload, int payload_length) {
|
||||
ssize_t bsd_write2(LIBUS_SOCKET_DESCRIPTOR fd, const char *header, int header_length, const char *payload, int payload_length) {
|
||||
struct iovec chunks[2];
|
||||
|
||||
chunks[0].iov_base = (char *)header;
|
||||
@@ -438,13 +467,21 @@ int bsd_write2(LIBUS_SOCKET_DESCRIPTOR fd, const char *header, int header_length
|
||||
chunks[1].iov_base = (char *)payload;
|
||||
chunks[1].iov_len = payload_length;
|
||||
|
||||
return writev(fd, chunks, 2);
|
||||
while (1) {
|
||||
ssize_t written = writev(fd, chunks, 2);
|
||||
|
||||
if (UNLIKELY(IS_EINTR(written))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
return written;
|
||||
}
|
||||
}
|
||||
#else
|
||||
int bsd_write2(LIBUS_SOCKET_DESCRIPTOR fd, const char *header, int header_length, const char *payload, int payload_length) {
|
||||
int written = bsd_send(fd, header, header_length, 0);
|
||||
ssize_t bsd_write2(LIBUS_SOCKET_DESCRIPTOR fd, const char *header, int header_length, const char *payload, int payload_length) {
|
||||
ssize_t written = bsd_send(fd, header, header_length, 0);
|
||||
if (written == header_length) {
|
||||
int second_write = bsd_send(fd, payload, payload_length, 0);
|
||||
ssize_t second_write = bsd_send(fd, payload, payload_length, 0);
|
||||
if (second_write > 0) {
|
||||
written += second_write;
|
||||
}
|
||||
@@ -453,26 +490,28 @@ int bsd_write2(LIBUS_SOCKET_DESCRIPTOR fd, const char *header, int header_length
|
||||
}
|
||||
#endif
|
||||
|
||||
int bsd_send(LIBUS_SOCKET_DESCRIPTOR fd, const char *buf, int length, int msg_more) {
|
||||
|
||||
ssize_t bsd_send(LIBUS_SOCKET_DESCRIPTOR fd, const char *buf, int length, int msg_more) {
|
||||
while (1) {
|
||||
// MSG_MORE (Linux), MSG_PARTIAL (Windows), TCP_NOPUSH (BSD)
|
||||
|
||||
#ifndef MSG_NOSIGNAL
|
||||
#define MSG_NOSIGNAL 0
|
||||
#endif
|
||||
|
||||
#ifdef MSG_MORE
|
||||
#ifdef MSG_MORE
|
||||
// for Linux we do not want signals
|
||||
ssize_t rc = send(fd, buf, length, ((msg_more != 0) * MSG_MORE) | MSG_NOSIGNAL | MSG_DONTWAIT);
|
||||
#else
|
||||
// use TCP_NOPUSH
|
||||
ssize_t rc = send(fd, buf, length, MSG_NOSIGNAL | MSG_DONTWAIT);
|
||||
#endif
|
||||
|
||||
// for Linux we do not want signals
|
||||
return send(fd, buf, length, ((msg_more != 0) * MSG_MORE) | MSG_NOSIGNAL | MSG_DONTWAIT);
|
||||
if (UNLIKELY(IS_EINTR(rc))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
#else
|
||||
|
||||
// use TCP_NOPUSH
|
||||
|
||||
return send(fd, buf, length, MSG_NOSIGNAL | MSG_DONTWAIT);
|
||||
|
||||
#endif
|
||||
return rc;
|
||||
}
|
||||
}
|
||||
|
||||
int bsd_would_block() {
|
||||
@@ -483,6 +522,23 @@ int bsd_would_block() {
|
||||
#endif
|
||||
}
|
||||
|
||||
static int us_internal_bind_and_listen(LIBUS_SOCKET_DESCRIPTOR listenFd, struct sockaddr *listenAddr, socklen_t listenAddrLength, int backlog) {
|
||||
int result;
|
||||
do
|
||||
result = bind(listenFd, listenAddr, listenAddrLength);
|
||||
while (IS_EINTR(result));
|
||||
|
||||
if (result == -1) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
do
|
||||
result = listen(listenFd, backlog);
|
||||
while (IS_EINTR(result));
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
inline __attribute__((always_inline)) LIBUS_SOCKET_DESCRIPTOR bsd_bind_listen_fd(
|
||||
LIBUS_SOCKET_DESCRIPTOR listenFd,
|
||||
struct addrinfo *listenAddr,
|
||||
@@ -512,7 +568,7 @@ inline __attribute__((always_inline)) LIBUS_SOCKET_DESCRIPTOR bsd_bind_listen_fd
|
||||
setsockopt(listenFd, IPPROTO_IPV6, IPV6_V6ONLY, (void *) &disabled, sizeof(disabled));
|
||||
#endif
|
||||
|
||||
if (bind(listenFd, listenAddr->ai_addr, (socklen_t) listenAddr->ai_addrlen) || listen(listenFd, 512)) {
|
||||
if (us_internal_bind_and_listen(listenFd, listenAddr->ai_addr, (socklen_t) listenAddr->ai_addrlen, 512)) {
|
||||
return LIBUS_SOCKET_ERROR;
|
||||
}
|
||||
|
||||
@@ -690,7 +746,7 @@ static LIBUS_SOCKET_DESCRIPTOR internal_bsd_create_listen_socket_unix(const char
|
||||
unlink(path);
|
||||
#endif
|
||||
|
||||
if (bind(listenFd, (struct sockaddr *)server_address, addrlen) || listen(listenFd, 512)) {
|
||||
if (us_internal_bind_and_listen(listenFd, (struct sockaddr *) server_address, (socklen_t) addrlen, 512)) {
|
||||
#if defined(_WIN32)
|
||||
int shouldSimulateENOENT = WSAGetLastError() == WSAENETDOWN;
|
||||
#endif
|
||||
@@ -838,7 +894,7 @@ int bsd_connect_udp_socket(LIBUS_SOCKET_DESCRIPTOR fd, const char *host, int por
|
||||
}
|
||||
|
||||
freeaddrinfo(result);
|
||||
return LIBUS_SOCKET_ERROR;
|
||||
return (int)LIBUS_SOCKET_ERROR;
|
||||
}
|
||||
|
||||
int bsd_disconnect_udp_socket(LIBUS_SOCKET_DESCRIPTOR fd) {
|
||||
@@ -925,7 +981,7 @@ static int bsd_do_connect_raw(LIBUS_SOCKET_DESCRIPTOR fd, struct sockaddr *addr,
|
||||
do {
|
||||
errno = 0;
|
||||
r = connect(fd, (struct sockaddr *)addr, namelen);
|
||||
} while (r == -1 && errno == EINTR);
|
||||
} while (IS_EINTR(r));
|
||||
|
||||
// connect() can return -1 with an errno of 0.
|
||||
// the errno is the correct one in that case.
|
||||
|
||||
@@ -15,18 +15,18 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#include "libusockets.h"
|
||||
#include "internal/internal.h"
|
||||
#include "libusockets.h"
|
||||
#include <errno.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <errno.h>
|
||||
|
||||
#ifndef _WIN32
|
||||
#include <arpa/inet.h>
|
||||
#endif
|
||||
|
||||
#define CONCURRENT_CONNECTIONS 2
|
||||
#define CONCURRENT_CONNECTIONS 4
|
||||
|
||||
// clang-format off
|
||||
int default_is_low_prio_handler(struct us_socket_t *s) {
|
||||
return 0;
|
||||
}
|
||||
@@ -44,7 +44,7 @@ int us_raw_root_certs(struct us_cert_string_t**out){
|
||||
void us_listen_socket_close(int ssl, struct us_listen_socket_t *ls) {
|
||||
/* us_listen_socket_t extends us_socket_t so we close in similar ways */
|
||||
if (!us_socket_is_closed(0, &ls->s)) {
|
||||
us_internal_socket_context_unlink_listen_socket(ls->s.context, ls);
|
||||
us_internal_socket_context_unlink_listen_socket(ssl, ls->s.context, ls);
|
||||
us_poll_stop((struct us_poll_t *) &ls->s, ls->s.context->loop);
|
||||
bsd_close_socket(us_poll_fd((struct us_poll_t *) &ls->s));
|
||||
|
||||
@@ -60,11 +60,19 @@ void us_listen_socket_close(int ssl, struct us_listen_socket_t *ls) {
|
||||
}
|
||||
|
||||
void us_socket_context_close(int ssl, struct us_socket_context_t *context) {
|
||||
/* Begin by closing all listen sockets */
|
||||
/* First start closing pending connecting sockets*/
|
||||
struct us_connecting_socket_t *c = context->head_connecting_sockets;
|
||||
while (c) {
|
||||
struct us_connecting_socket_t *nextC = c->next_pending;
|
||||
us_connecting_socket_close(ssl, c);
|
||||
c = nextC;
|
||||
}
|
||||
/* After this by closing all listen sockets */
|
||||
struct us_listen_socket_t *ls = context->head_listen_sockets;
|
||||
while (ls) {
|
||||
struct us_listen_socket_t *nextLS = (struct us_listen_socket_t *) ls->s.next;
|
||||
us_listen_socket_close(ssl, ls);
|
||||
|
||||
ls = nextLS;
|
||||
}
|
||||
|
||||
@@ -72,12 +80,12 @@ void us_socket_context_close(int ssl, struct us_socket_context_t *context) {
|
||||
struct us_socket_t *s = context->head_sockets;
|
||||
while (s) {
|
||||
struct us_socket_t *nextS = s->next;
|
||||
us_socket_close(ssl, s, 0, 0);
|
||||
us_socket_close(ssl, s, LIBUS_SOCKET_CLOSE_CODE_CLEAN_SHUTDOWN, 0);
|
||||
s = nextS;
|
||||
}
|
||||
}
|
||||
|
||||
void us_internal_socket_context_unlink_listen_socket(struct us_socket_context_t *context, struct us_listen_socket_t *ls) {
|
||||
void us_internal_socket_context_unlink_listen_socket(int ssl, struct us_socket_context_t *context, struct us_listen_socket_t *ls) {
|
||||
/* We have to properly update the iterator used to sweep sockets for timeouts */
|
||||
if (ls == (struct us_listen_socket_t *) context->iterator) {
|
||||
context->iterator = ls->s.next;
|
||||
@@ -95,9 +103,10 @@ void us_internal_socket_context_unlink_listen_socket(struct us_socket_context_t
|
||||
ls->s.next->prev = ls->s.prev;
|
||||
}
|
||||
}
|
||||
us_socket_context_unref(ssl, context);
|
||||
}
|
||||
|
||||
void us_internal_socket_context_unlink_socket(struct us_socket_context_t *context, struct us_socket_t *s) {
|
||||
void us_internal_socket_context_unlink_socket(int ssl, struct us_socket_context_t *context, struct us_socket_t *s) {
|
||||
/* We have to properly update the iterator used to sweep sockets for timeouts */
|
||||
if (s == context->iterator) {
|
||||
context->iterator = s->next;
|
||||
@@ -115,6 +124,22 @@ void us_internal_socket_context_unlink_socket(struct us_socket_context_t *contex
|
||||
s->next->prev = s->prev;
|
||||
}
|
||||
}
|
||||
us_socket_context_unref(ssl, context);
|
||||
}
|
||||
void us_internal_socket_context_unlink_connecting_socket(int ssl, struct us_socket_context_t *context, struct us_connecting_socket_t *c) {
|
||||
if (c->prev_pending == c->next_pending) {
|
||||
context->head_connecting_sockets = 0;
|
||||
} else {
|
||||
if (c->prev_pending) {
|
||||
c->prev_pending->next_pending = c->next_pending;
|
||||
} else {
|
||||
context->head_connecting_sockets = c->next_pending;
|
||||
}
|
||||
if (c->next_pending) {
|
||||
c->next_pending->prev_pending = c->prev_pending;
|
||||
}
|
||||
}
|
||||
us_socket_context_unref(ssl, context);
|
||||
}
|
||||
|
||||
/* We always add in the top, so we don't modify any s.next */
|
||||
@@ -126,8 +151,21 @@ void us_internal_socket_context_link_listen_socket(struct us_socket_context_t *c
|
||||
context->head_listen_sockets->s.prev = &ls->s;
|
||||
}
|
||||
context->head_listen_sockets = ls;
|
||||
us_socket_context_ref(0, context);
|
||||
}
|
||||
|
||||
void us_internal_socket_context_link_connecting_socket(int ssl, struct us_socket_context_t *context, struct us_connecting_socket_t *c) {
|
||||
c->context = context;
|
||||
c->next_pending = context->head_connecting_sockets;
|
||||
c->prev_pending = 0;
|
||||
if (context->head_connecting_sockets) {
|
||||
context->head_connecting_sockets->prev_pending = c;
|
||||
}
|
||||
context->head_connecting_sockets = c;
|
||||
us_socket_context_ref(ssl, context);
|
||||
}
|
||||
|
||||
|
||||
/* We always add in the top, so we don't modify any s.next */
|
||||
void us_internal_socket_context_link_socket(struct us_socket_context_t *context, struct us_socket_t *s) {
|
||||
s->context = context;
|
||||
@@ -137,6 +175,7 @@ void us_internal_socket_context_link_socket(struct us_socket_context_t *context,
|
||||
context->head_sockets->prev = s;
|
||||
}
|
||||
context->head_sockets = s;
|
||||
us_socket_context_ref(0, context);
|
||||
}
|
||||
|
||||
struct us_loop_t *us_socket_context_loop(int ssl, struct us_socket_context_t *context) {
|
||||
@@ -231,6 +270,7 @@ struct us_socket_context_t *us_create_socket_context(int ssl, struct us_loop_t *
|
||||
struct us_socket_context_t *context = us_calloc(1, sizeof(struct us_socket_context_t) + context_ext_size);
|
||||
context->loop = loop;
|
||||
context->is_low_prio = default_is_low_prio_handler;
|
||||
context->ref_count = 1;
|
||||
|
||||
us_internal_loop_link(loop, context);
|
||||
|
||||
@@ -252,6 +292,7 @@ struct us_socket_context_t *us_create_bun_socket_context(int ssl, struct us_loop
|
||||
struct us_socket_context_t *context = us_calloc(1, sizeof(struct us_socket_context_t) + context_ext_size);
|
||||
context->loop = loop;
|
||||
context->is_low_prio = default_is_low_prio_handler;
|
||||
context->ref_count = 1;
|
||||
|
||||
us_internal_loop_link(loop, context);
|
||||
|
||||
@@ -271,8 +312,8 @@ struct us_bun_verify_error_t us_socket_verify_error(int ssl, struct us_socket_t
|
||||
return (struct us_bun_verify_error_t) { .error = 0, .code = NULL, .reason = NULL };
|
||||
}
|
||||
|
||||
void us_internal_socket_context_free(int ssl, struct us_socket_context_t *context) {
|
||||
|
||||
void us_socket_context_free(int ssl, struct us_socket_context_t *context) {
|
||||
#ifndef LIBUS_NO_SSL
|
||||
if (ssl) {
|
||||
/* This function will call us again with SSL=false */
|
||||
@@ -285,7 +326,24 @@ void us_socket_context_free(int ssl, struct us_socket_context_t *context) {
|
||||
* This is the opposite order compared to when creating the context - SSL code is cleaning up before non-SSL */
|
||||
|
||||
us_internal_loop_unlink(context->loop, context);
|
||||
us_free(context);
|
||||
/* Link this context to the close-list and let it be deleted after this iteration */
|
||||
context->next = context->loop->data.closed_context_head;
|
||||
context->loop->data.closed_context_head = context;
|
||||
}
|
||||
|
||||
void us_socket_context_ref(int ssl, struct us_socket_context_t *context) {
|
||||
context->ref_count++;
|
||||
}
|
||||
void us_socket_context_unref(int ssl, struct us_socket_context_t *context) {
|
||||
uint32_t ref_count = context->ref_count;
|
||||
context->ref_count--;
|
||||
if (ref_count == 1) {
|
||||
us_internal_socket_context_free(ssl, context);
|
||||
}
|
||||
}
|
||||
|
||||
void us_socket_context_free(int ssl, struct us_socket_context_t *context) {
|
||||
us_socket_context_unref(ssl, context);
|
||||
}
|
||||
|
||||
struct us_listen_socket_t *us_socket_context_listen(int ssl, struct us_socket_context_t *context, const char *host, int port, int options, int socket_ext_size) {
|
||||
@@ -456,14 +514,14 @@ void *us_socket_context_connect(int ssl, struct us_socket_context_t *context, co
|
||||
}
|
||||
|
||||
struct us_connecting_socket_t *c = us_calloc(1, sizeof(struct us_connecting_socket_t) + socket_ext_size);
|
||||
c->socket_ext_size = socket_ext_size;
|
||||
c->context = context;
|
||||
c->socket_ext_size = socket_ext_size;
|
||||
c->options = options;
|
||||
c->ssl = ssl > 0;
|
||||
c->timeout = 255;
|
||||
c->long_timeout = 255;
|
||||
c->pending_resolve_callback = 1;
|
||||
c->port = port;
|
||||
us_internal_socket_context_link_connecting_socket(ssl, context, c);
|
||||
|
||||
#ifdef _WIN32
|
||||
loop->uv_loop->active_handles++;
|
||||
@@ -525,15 +583,12 @@ void us_internal_socket_after_resolve(struct us_connecting_socket_t *c) {
|
||||
c->pending_resolve_callback = 0;
|
||||
// if the socket was closed while we were resolving the address, free it
|
||||
if (c->closed) {
|
||||
us_connecting_socket_free(c);
|
||||
us_connecting_socket_free(c->ssl, c);
|
||||
return;
|
||||
}
|
||||
struct addrinfo_result *result = Bun__addrinfo_getRequestResult(c->addrinfo_req);
|
||||
if (result->error) {
|
||||
c->error = result->error;
|
||||
c->context->on_connect_error(c, result->error);
|
||||
Bun__addrinfo_freeRequest(c->addrinfo_req, 0);
|
||||
us_connecting_socket_close(0, c);
|
||||
us_connecting_socket_close(c->ssl, c);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -541,10 +596,7 @@ void us_internal_socket_after_resolve(struct us_connecting_socket_t *c) {
|
||||
|
||||
int opened = start_connections(c, CONCURRENT_CONNECTIONS);
|
||||
if (opened == 0) {
|
||||
c->error = ECONNREFUSED;
|
||||
c->context->on_connect_error(c, ECONNREFUSED);
|
||||
Bun__addrinfo_freeRequest(c->addrinfo_req, 1);
|
||||
us_connecting_socket_close(0, c);
|
||||
us_connecting_socket_close(c->ssl, c);
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -612,10 +664,7 @@ void us_internal_socket_after_open(struct us_socket_t *s, int error) {
|
||||
// we have run out of addresses to attempt, signal the connection error
|
||||
// but only if there are no other sockets in the list
|
||||
if (opened == 0 && c->connecting_head == NULL) {
|
||||
c->error = ECONNREFUSED;
|
||||
c->context->on_connect_error(c, error);
|
||||
Bun__addrinfo_freeRequest(c->addrinfo_req, ECONNREFUSED);
|
||||
us_connecting_socket_close(0, c);
|
||||
us_connecting_socket_close(c->ssl, c);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@@ -644,7 +693,7 @@ void us_internal_socket_after_open(struct us_socket_t *s, int error) {
|
||||
}
|
||||
// now that the socket is open, we can release the associated us_connecting_socket_t if it exists
|
||||
Bun__addrinfo_freeRequest(c->addrinfo_req, 0);
|
||||
us_connecting_socket_free(c);
|
||||
us_connecting_socket_free(c->ssl, c);
|
||||
s->connect_state = NULL;
|
||||
}
|
||||
|
||||
@@ -703,13 +752,15 @@ struct us_socket_t *us_socket_context_adopt_socket(int ssl, struct us_socket_con
|
||||
#endif
|
||||
|
||||
/* Cannot adopt a closed socket */
|
||||
if (us_socket_is_closed(ssl, s)) {
|
||||
if (us_socket_is_closed(ssl, s) || us_socket_is_shut_down(ssl, s)) {
|
||||
return s;
|
||||
}
|
||||
|
||||
if (s->low_prio_state != 1) {
|
||||
/* We need to be sure that we still holding a reference*/
|
||||
us_socket_context_ref(ssl, context);
|
||||
/* This properly updates the iterator if in on_timeout */
|
||||
us_internal_socket_context_unlink_socket(s->context, s);
|
||||
us_internal_socket_context_unlink_socket(ssl, s->context, s);
|
||||
}
|
||||
|
||||
|
||||
@@ -720,7 +771,10 @@ struct us_socket_t *us_socket_context_adopt_socket(int ssl, struct us_socket_con
|
||||
new_s = (struct us_socket_t *) us_poll_resize(&s->p, s->context->loop, sizeof(struct us_socket_t) + ext_size);
|
||||
if (c) {
|
||||
c->connecting_head = new_s;
|
||||
struct us_socket_context_t *old_context = s->context;
|
||||
c->context = context;
|
||||
us_internal_socket_context_link_connecting_socket(ssl, context, c);
|
||||
us_internal_socket_context_unlink_connecting_socket(ssl, old_context, c);
|
||||
}
|
||||
}
|
||||
new_s->timeout = 255;
|
||||
@@ -734,6 +788,7 @@ struct us_socket_t *us_socket_context_adopt_socket(int ssl, struct us_socket_con
|
||||
if (new_s->next) new_s->next->prev = new_s;
|
||||
} else {
|
||||
us_internal_socket_context_link_socket(context, new_s);
|
||||
us_socket_context_unref(ssl, context);
|
||||
}
|
||||
|
||||
return new_s;
|
||||
|
||||
@@ -14,8 +14,14 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// clang-format off
|
||||
#if (defined(LIBUS_USE_OPENSSL) || defined(LIBUS_USE_WOLFSSL))
|
||||
|
||||
|
||||
#include "internal/internal.h"
|
||||
#include "libusockets.h"
|
||||
#include <string.h>
|
||||
|
||||
/* These are in sni_tree.cpp */
|
||||
void *sni_new();
|
||||
void sni_free(void *sni, void (*cb)(void *));
|
||||
@@ -23,10 +29,6 @@ int sni_add(void *sni, const char *hostname, void *user);
|
||||
void *sni_remove(void *sni, const char *hostname);
|
||||
void *sni_find(void *sni, const char *hostname);
|
||||
|
||||
#include "internal/internal.h"
|
||||
#include "libusockets.h"
|
||||
#include <string.h>
|
||||
|
||||
/* This module contains the entire OpenSSL implementation
|
||||
* of the SSL socket and socket context interfaces. */
|
||||
#ifdef LIBUS_USE_OPENSSL
|
||||
@@ -71,10 +73,6 @@ struct us_internal_ssl_socket_context_t {
|
||||
// socket context
|
||||
SSL_CTX *ssl_context;
|
||||
int is_parent;
|
||||
#if ALLOW_SERVER_RENEGOTIATION
|
||||
unsigned int client_renegotiation_limit;
|
||||
unsigned int client_renegotiation_window;
|
||||
#endif
|
||||
/* These decorate the base implementation */
|
||||
struct us_internal_ssl_socket_t *(*on_open)(struct us_internal_ssl_socket_t *,
|
||||
int is_client, char *ip,
|
||||
@@ -86,6 +84,10 @@ struct us_internal_ssl_socket_context_t {
|
||||
struct us_internal_ssl_socket_t *(*on_close)(
|
||||
struct us_internal_ssl_socket_t *, int code, void *reason);
|
||||
|
||||
struct us_internal_ssl_socket_t *(*on_timeout)(
|
||||
struct us_internal_ssl_socket_t *);
|
||||
struct us_internal_ssl_socket_t *(*on_long_timeout)(struct us_internal_ssl_socket_t *);
|
||||
|
||||
/* Called for missing SNI hostnames, if not NULL */
|
||||
void (*on_server_name)(struct us_internal_ssl_socket_context_t *,
|
||||
const char *hostname);
|
||||
@@ -108,15 +110,10 @@ enum {
|
||||
struct us_internal_ssl_socket_t {
|
||||
struct us_socket_t s;
|
||||
SSL *ssl; // this _must_ be the first member after s
|
||||
#if ALLOW_SERVER_RENEGOTIATION
|
||||
unsigned int client_pending_renegotiations;
|
||||
uint64_t last_ssl_renegotiation;
|
||||
unsigned int is_client : 1;
|
||||
#endif
|
||||
unsigned int ssl_write_wants_read : 1; // we use this for now
|
||||
unsigned int ssl_read_wants_write : 1;
|
||||
unsigned int handshake_state : 2;
|
||||
unsigned int received_ssl_shutdown : 1;
|
||||
unsigned int fatal_error : 1;
|
||||
};
|
||||
|
||||
int passphrase_cb(char *buf, int size, int rwflag, void *u) {
|
||||
@@ -182,10 +179,9 @@ int BIO_s_custom_read(BIO *bio, char *dst, int length) {
|
||||
return length;
|
||||
}
|
||||
|
||||
struct us_internal_ssl_socket_t *ssl_on_open(struct us_internal_ssl_socket_t *s,
|
||||
int is_client, char *ip,
|
||||
int ip_length) {
|
||||
|
||||
struct loop_ssl_data * us_internal_set_loop_ssl_data(struct us_internal_ssl_socket_t *s) {
|
||||
// note: this context can change when we adopt the socket!
|
||||
struct us_internal_ssl_socket_context_t *context =
|
||||
(struct us_internal_ssl_socket_context_t *)us_socket_context(0, &s->s);
|
||||
|
||||
@@ -193,17 +189,31 @@ struct us_internal_ssl_socket_t *ssl_on_open(struct us_internal_ssl_socket_t *s,
|
||||
struct loop_ssl_data *loop_ssl_data =
|
||||
(struct loop_ssl_data *)loop->data.ssl_data;
|
||||
|
||||
s->ssl = SSL_new(context->ssl_context);
|
||||
#if ALLOW_SERVER_RENEGOTIATION
|
||||
s->client_pending_renegotiations = context->client_renegotiation_limit;
|
||||
s->last_ssl_renegotiation = 0;
|
||||
s->is_client = is_client ? 1 : 0;
|
||||
// note: if we put data here we should never really clear it (not in write
|
||||
// either, it still should be available for SSL_write to read from!)
|
||||
|
||||
#endif
|
||||
loop_ssl_data->ssl_read_input_length = 0;
|
||||
loop_ssl_data->ssl_read_input_offset = 0;
|
||||
loop_ssl_data->ssl_socket = &s->s;
|
||||
loop_ssl_data->msg_more = 0;
|
||||
return loop_ssl_data;
|
||||
}
|
||||
|
||||
struct us_internal_ssl_socket_t *ssl_on_open(struct us_internal_ssl_socket_t *s,
|
||||
int is_client, char *ip,
|
||||
int ip_length) {
|
||||
|
||||
struct us_internal_ssl_socket_context_t *context =
|
||||
(struct us_internal_ssl_socket_context_t *)us_socket_context(0, &s->s);
|
||||
|
||||
struct loop_ssl_data *loop_ssl_data = us_internal_set_loop_ssl_data(s);
|
||||
|
||||
s->ssl = SSL_new(context->ssl_context);
|
||||
s->ssl_write_wants_read = 0;
|
||||
s->ssl_read_wants_write = 0;
|
||||
s->fatal_error = 0;
|
||||
s->handshake_state = HANDSHAKE_PENDING;
|
||||
s->received_ssl_shutdown = 0;
|
||||
|
||||
|
||||
SSL_set_bio(s->ssl, loop_ssl_data->shared_rbio, loop_ssl_data->shared_wbio);
|
||||
// if we allow renegotiation, we need to set the mode here
|
||||
@@ -213,24 +223,18 @@ struct us_internal_ssl_socket_t *ssl_on_open(struct us_internal_ssl_socket_t *s,
|
||||
// this can be a DoS vector for servers, so we enable it using a limit
|
||||
// we do not use ssl_renegotiate_freely, since ssl_renegotiate_explicit is
|
||||
// more performant when using BoringSSL
|
||||
#if ALLOW_SERVER_RENEGOTIATION
|
||||
if (context->client_renegotiation_limit) {
|
||||
SSL_set_renegotiate_mode(s->ssl, ssl_renegotiate_explicit);
|
||||
} else {
|
||||
SSL_set_renegotiate_mode(s->ssl, ssl_renegotiate_never);
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
BIO_up_ref(loop_ssl_data->shared_rbio);
|
||||
BIO_up_ref(loop_ssl_data->shared_wbio);
|
||||
|
||||
if (is_client) {
|
||||
#if ALLOW_SERVER_RENEGOTIATION == 0
|
||||
SSL_set_renegotiate_mode(s->ssl, ssl_renegotiate_explicit);
|
||||
#endif
|
||||
SSL_set_connect_state(s->ssl);
|
||||
} else {
|
||||
SSL_set_accept_state(s->ssl);
|
||||
// we do not allow renegotiation on the server side (should be the default for BoringSSL, but we set to make openssl compatible)
|
||||
SSL_set_renegotiate_mode(s->ssl, ssl_renegotiate_never);
|
||||
}
|
||||
|
||||
struct us_internal_ssl_socket_t *result =
|
||||
@@ -246,6 +250,64 @@ struct us_internal_ssl_socket_t *ssl_on_open(struct us_internal_ssl_socket_t *s,
|
||||
return result;
|
||||
}
|
||||
|
||||
/// @brief Complete the shutdown or do a fast shutdown when needed, this should only be called before closing the socket
|
||||
/// @param s
|
||||
int us_internal_handle_shutdown(struct us_internal_ssl_socket_t *s, int force_fast_shutdown) {
|
||||
// if we are already shutdown or in the middle of a handshake we dont need to do anything
|
||||
// Scenarios:
|
||||
// 1 - SSL is not initialized yet (null)
|
||||
// 2 - socket is alread shutdown
|
||||
// 3 - we already sent a shutdown
|
||||
// 4 - we are in the middle of a handshake
|
||||
// 5 - we received a fatal error
|
||||
if(us_internal_ssl_socket_is_shut_down(s) || s->fatal_error || !SSL_is_init_finished(s->ssl)) return 1;
|
||||
|
||||
// we are closing the socket but did not sent a shutdown yet
|
||||
int state = SSL_get_shutdown(s->ssl);
|
||||
int sent_shutdown = state & SSL_SENT_SHUTDOWN;
|
||||
int received_shutdown = state & SSL_RECEIVED_SHUTDOWN;
|
||||
// if we are missing a shutdown call, we need to do a fast shutdown here
|
||||
if(!sent_shutdown || !received_shutdown) {
|
||||
// make sure that the ssl loop data is set
|
||||
us_internal_set_loop_ssl_data(s);
|
||||
// Zero means that we should wait for the peer to close the connection
|
||||
// but we are already closing the connection so we do a fast shutdown here
|
||||
int ret = SSL_shutdown(s->ssl);
|
||||
if(ret == 0 && force_fast_shutdown) {
|
||||
// do a fast shutdown (dont wait for peer)
|
||||
ret = SSL_shutdown(s->ssl);
|
||||
}
|
||||
if(ret < 0) {
|
||||
// we got some error here, but we dont care about it, we are closing the socket
|
||||
int err = SSL_get_error(s->ssl, ret);
|
||||
if (err == SSL_ERROR_SSL || err == SSL_ERROR_SYSCALL) {
|
||||
// clear
|
||||
ERR_clear_error();
|
||||
s->fatal_error = 1;
|
||||
// Fatal error occurred, we should close the socket imeadiatly
|
||||
return 1;
|
||||
}
|
||||
if(err == SSL_ERROR_WANT_READ || err == SSL_ERROR_WANT_WRITE) {
|
||||
// We are waiting to be readable or writable this will come in SSL_read to complete the shutdown
|
||||
// if we are forcing a fast shutdown we should return 1 here to imeadiatly close the socket
|
||||
// Scenarios:
|
||||
// 1 - We called abort but the socket is not writable or reable anymore (force_fast_shutdown = 1)
|
||||
// 2 - We called close but wanna to wait until close_notify is received (force_fast_shutdown = 0)
|
||||
return force_fast_shutdown ? 1 : 0;
|
||||
}
|
||||
// If we error we probably do not even start the first handshake or have a critical error so just close the socket
|
||||
// Scenarios:
|
||||
// 1 - We abort the connection to fast and we did not even start the first handshake
|
||||
// 2 - SSL is in a broken state
|
||||
// 3 - SSL is not broken but is in a state that we cannot recover from
|
||||
s->fatal_error = 1;
|
||||
return 1;
|
||||
}
|
||||
return ret == 1;
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
||||
void us_internal_on_ssl_handshake(
|
||||
struct us_internal_ssl_socket_context_t *context,
|
||||
void (*on_handshake)(struct us_internal_ssl_socket_t *, int success,
|
||||
@@ -256,9 +318,17 @@ void us_internal_on_ssl_handshake(
|
||||
context->handshake_data = custom_data;
|
||||
}
|
||||
|
||||
int us_internal_ssl_socket_is_closed(struct us_internal_ssl_socket_t *s) {
|
||||
return us_socket_is_closed(0, &s->s);
|
||||
}
|
||||
|
||||
struct us_internal_ssl_socket_t *
|
||||
us_internal_ssl_socket_close(struct us_internal_ssl_socket_t *s, int code,
|
||||
void *reason) {
|
||||
|
||||
// check if we are already closed
|
||||
if (us_internal_ssl_socket_is_closed(s)) return s;
|
||||
|
||||
if (s->handshake_state != HANDSHAKE_COMPLETED) {
|
||||
// if we have some pending handshake we cancel it and try to check the
|
||||
// latest handshake error this way we will always call on_handshake with the
|
||||
@@ -269,8 +339,14 @@ us_internal_ssl_socket_close(struct us_internal_ssl_socket_t *s, int code,
|
||||
us_internal_trigger_handshake_callback(s, 0);
|
||||
}
|
||||
|
||||
return (struct us_internal_ssl_socket_t *)us_socket_close(
|
||||
0, (struct us_socket_t *)s, code, reason);
|
||||
// if we are in the middle of a close_notify we need to finish it (code != 0 forces a fast shutdown)
|
||||
int can_close = us_internal_handle_shutdown(s, code != 0);
|
||||
|
||||
// only close the socket if we are not in the middle of a handshake
|
||||
if(can_close) {
|
||||
return (struct us_internal_ssl_socket_t *)us_socket_close(0, (struct us_socket_t *)s, code, reason);
|
||||
}
|
||||
return s;
|
||||
}
|
||||
|
||||
void us_internal_trigger_handshake_callback(struct us_internal_ssl_socket_t *s,
|
||||
@@ -292,26 +368,7 @@ int us_internal_ssl_renegotiate(struct us_internal_ssl_socket_t *s) {
|
||||
// if is a server and we have no pending renegotiation we can check
|
||||
// the limits
|
||||
s->handshake_state = HANDSHAKE_RENEGOTIATION_PENDING;
|
||||
#if ALLOW_SERVER_RENEGOTIATION
|
||||
if (!s->is_client && !SSL_renegotiate_pending(s->ssl)) {
|
||||
uint64_t now = time(NULL);
|
||||
struct us_internal_ssl_socket_context_t *context =
|
||||
(struct us_internal_ssl_socket_context_t *)us_socket_context(0, &s->s);
|
||||
// if is not the first time we negotiate and we are outside the time
|
||||
// window, reset the limits
|
||||
if (s->last_ssl_renegotiation && (now - s->last_ssl_renegotiation) >=
|
||||
context->client_renegotiation_window) {
|
||||
// reset the limits
|
||||
s->client_pending_renegotiations = context->client_renegotiation_limit;
|
||||
}
|
||||
// if we have no more renegotiations, we should close the connection
|
||||
if (s->client_pending_renegotiations == 0) {
|
||||
return 0;
|
||||
}
|
||||
s->last_ssl_renegotiation = now;
|
||||
s->client_pending_renegotiations--;
|
||||
}
|
||||
#endif
|
||||
|
||||
if (!SSL_renegotiate(s->ssl)) {
|
||||
// we failed to renegotiate
|
||||
us_internal_trigger_handshake_callback(s, 0);
|
||||
@@ -321,24 +378,13 @@ int us_internal_ssl_renegotiate(struct us_internal_ssl_socket_t *s) {
|
||||
}
|
||||
|
||||
void us_internal_update_handshake(struct us_internal_ssl_socket_t *s) {
|
||||
struct us_internal_ssl_socket_context_t *context =
|
||||
(struct us_internal_ssl_socket_context_t *)us_socket_context(0, &s->s);
|
||||
|
||||
// nothing todo here, renegotiation must be handled in SSL_read
|
||||
if (s->handshake_state != HANDSHAKE_PENDING)
|
||||
return;
|
||||
|
||||
struct us_loop_t *loop = us_socket_context_loop(0, &context->sc);
|
||||
struct loop_ssl_data *loop_ssl_data =
|
||||
(struct loop_ssl_data *)loop->data.ssl_data;
|
||||
|
||||
loop_ssl_data->ssl_read_input_length = 0;
|
||||
loop_ssl_data->ssl_read_input_offset = 0;
|
||||
loop_ssl_data->ssl_socket = &s->s;
|
||||
loop_ssl_data->msg_more = 0;
|
||||
|
||||
if (us_socket_is_closed(0, &s->s) || us_internal_ssl_socket_is_shut_down(s) ||
|
||||
SSL_get_shutdown(s->ssl) & SSL_RECEIVED_SHUTDOWN) {
|
||||
|
||||
if (us_internal_ssl_socket_is_closed(s) || us_internal_ssl_socket_is_shut_down(s) ||
|
||||
(s->ssl && SSL_get_shutdown(s->ssl) & SSL_RECEIVED_SHUTDOWN)) {
|
||||
|
||||
us_internal_trigger_handshake_callback(s, 0);
|
||||
return;
|
||||
@@ -347,7 +393,6 @@ void us_internal_update_handshake(struct us_internal_ssl_socket_t *s) {
|
||||
int result = SSL_do_handshake(s->ssl);
|
||||
|
||||
if (SSL_get_shutdown(s->ssl) & SSL_RECEIVED_SHUTDOWN) {
|
||||
s->received_ssl_shutdown = 1;
|
||||
us_internal_ssl_socket_close(s, 0, NULL);
|
||||
return;
|
||||
}
|
||||
@@ -356,30 +401,23 @@ void us_internal_update_handshake(struct us_internal_ssl_socket_t *s) {
|
||||
int err = SSL_get_error(s->ssl, result);
|
||||
// as far as I know these are the only errors we want to handle
|
||||
if (err != SSL_ERROR_WANT_READ && err != SSL_ERROR_WANT_WRITE) {
|
||||
us_internal_trigger_handshake_callback(s, 1);
|
||||
|
||||
// clear per thread error queue if it may contain something
|
||||
if (err == SSL_ERROR_SSL || err == SSL_ERROR_SYSCALL) {
|
||||
ERR_clear_error();
|
||||
s->fatal_error = 1;
|
||||
}
|
||||
us_internal_trigger_handshake_callback(s, 0);
|
||||
|
||||
return;
|
||||
}
|
||||
s->handshake_state = HANDSHAKE_PENDING;
|
||||
// Ensure that we'll cycle through internal openssl's state
|
||||
if (!us_socket_is_closed(0, &s->s) &&
|
||||
!us_internal_ssl_socket_is_shut_down(s)) {
|
||||
us_socket_write(1, loop_ssl_data->ssl_socket, "\0", 0, 0);
|
||||
}
|
||||
s->ssl_write_wants_read = 1;
|
||||
|
||||
return;
|
||||
}
|
||||
// success
|
||||
us_internal_trigger_handshake_callback(s, 1);
|
||||
// Ensure that we'll cycle through internal openssl's state
|
||||
if (!us_socket_is_closed(0, &s->s) &&
|
||||
!us_internal_ssl_socket_is_shut_down(s)) {
|
||||
us_socket_write(1, loop_ssl_data->ssl_socket, "\0", 0, 0);
|
||||
}
|
||||
s->ssl_write_wants_read = 1;
|
||||
}
|
||||
|
||||
struct us_internal_ssl_socket_t *
|
||||
@@ -387,16 +425,33 @@ ssl_on_close(struct us_internal_ssl_socket_t *s, int code, void *reason) {
|
||||
struct us_internal_ssl_socket_context_t *context =
|
||||
(struct us_internal_ssl_socket_context_t *)us_socket_context(0, &s->s);
|
||||
|
||||
SSL_free(s->ssl);
|
||||
us_internal_set_loop_ssl_data(s);
|
||||
struct us_internal_ssl_socket_t * ret = context->on_close(s, code, reason);
|
||||
SSL_free(s->ssl); // free SSL after on_close
|
||||
s->ssl = NULL; // set to NULL
|
||||
return ret;
|
||||
}
|
||||
|
||||
return context->on_close(s, code, reason);
|
||||
struct us_internal_ssl_socket_t * ssl_on_timeout(struct us_internal_ssl_socket_t *s) {
|
||||
struct us_internal_ssl_socket_context_t *context =
|
||||
(struct us_internal_ssl_socket_context_t *)us_socket_context(0, &s->s);
|
||||
|
||||
us_internal_set_loop_ssl_data(s);
|
||||
return context->on_timeout(s);
|
||||
}
|
||||
|
||||
struct us_internal_ssl_socket_t * ssl_on_long_timeout(struct us_internal_ssl_socket_t *s) {
|
||||
struct us_internal_ssl_socket_context_t *context =
|
||||
(struct us_internal_ssl_socket_context_t *)us_socket_context(0, &s->s);
|
||||
|
||||
us_internal_set_loop_ssl_data(s);
|
||||
return context->on_long_timeout(s);
|
||||
}
|
||||
|
||||
struct us_internal_ssl_socket_t *
|
||||
ssl_on_end(struct us_internal_ssl_socket_t *s) {
|
||||
us_internal_set_loop_ssl_data(s);
|
||||
// whatever state we are in, a TCP FIN is always an answered shutdown
|
||||
|
||||
/* Todo: this should report CLEANLY SHUTDOWN as reason */
|
||||
return us_internal_ssl_socket_close(s, 0, NULL);
|
||||
}
|
||||
|
||||
@@ -408,43 +463,20 @@ struct us_internal_ssl_socket_t *ssl_on_data(struct us_internal_ssl_socket_t *s,
|
||||
struct us_internal_ssl_socket_context_t *context =
|
||||
(struct us_internal_ssl_socket_context_t *)us_socket_context(0, &s->s);
|
||||
|
||||
struct us_loop_t *loop = us_socket_context_loop(0, &context->sc);
|
||||
struct loop_ssl_data *loop_ssl_data =
|
||||
(struct loop_ssl_data *)loop->data.ssl_data;
|
||||
struct loop_ssl_data *loop_ssl_data = us_internal_set_loop_ssl_data(s);
|
||||
|
||||
// note: if we put data here we should never really clear it (not in write
|
||||
// either, it still should be available for SSL_write to read from!)
|
||||
loop_ssl_data->ssl_read_input = data;
|
||||
loop_ssl_data->ssl_read_input_length = length;
|
||||
loop_ssl_data->ssl_read_input_offset = 0;
|
||||
loop_ssl_data->ssl_socket = &s->s;
|
||||
loop_ssl_data->msg_more = 0;
|
||||
|
||||
if (us_socket_is_closed(0, &s->s) || s->received_ssl_shutdown) {
|
||||
if (us_internal_ssl_socket_is_closed(s)) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (us_internal_ssl_socket_is_shut_down(s)) {
|
||||
|
||||
int ret = 0;
|
||||
if ((ret = SSL_shutdown(s->ssl)) == 1) {
|
||||
// two phase shutdown is complete here
|
||||
|
||||
/* Todo: this should also report some kind of clean shutdown */
|
||||
return us_internal_ssl_socket_close(s, 0, NULL);
|
||||
} else if (ret < 0) {
|
||||
|
||||
int err = SSL_get_error(s->ssl, ret);
|
||||
|
||||
if (err == SSL_ERROR_SSL || err == SSL_ERROR_SYSCALL) {
|
||||
// we need to clear the error queue in case these added to the thread
|
||||
// local queue
|
||||
ERR_clear_error();
|
||||
}
|
||||
}
|
||||
|
||||
// no further processing of data when in shutdown state
|
||||
return s;
|
||||
us_internal_ssl_socket_close(s, 0, NULL);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
// bug checking: this loop needs a lot of attention and clean-ups and
|
||||
@@ -452,17 +484,12 @@ struct us_internal_ssl_socket_t *ssl_on_data(struct us_internal_ssl_socket_t *s,
|
||||
int read = 0;
|
||||
restart:
|
||||
// read until shutdown
|
||||
while (!s->received_ssl_shutdown) {
|
||||
while (1) {
|
||||
int just_read = SSL_read(s->ssl,
|
||||
loop_ssl_data->ssl_read_output +
|
||||
LIBUS_RECV_BUFFER_PADDING + read,
|
||||
LIBUS_RECV_BUFFER_LENGTH - read);
|
||||
// we need to check if we received a shutdown here
|
||||
if (SSL_get_shutdown(s->ssl) & SSL_RECEIVED_SHUTDOWN) {
|
||||
s->received_ssl_shutdown = 1;
|
||||
// we will only close after we handle the data and errors
|
||||
}
|
||||
|
||||
|
||||
if (just_read <= 0) {
|
||||
int err = SSL_get_error(s->ssl, just_read);
|
||||
// as far as I know these are the only errors we want to handle
|
||||
@@ -477,8 +504,9 @@ restart:
|
||||
// clean and close renegotiation failed
|
||||
err = SSL_ERROR_SSL;
|
||||
} else if (err == SSL_ERROR_ZERO_RETURN) {
|
||||
// zero return can be EOF/FIN, if we have data just signal on_data and
|
||||
// close
|
||||
// Remotely-Initiated Shutdown
|
||||
// See: https://www.openssl.org/docs/manmaster/man3/SSL_shutdown.html
|
||||
|
||||
if (read) {
|
||||
context =
|
||||
(struct us_internal_ssl_socket_context_t *)us_socket_context(
|
||||
@@ -487,21 +515,24 @@ restart:
|
||||
s = context->on_data(
|
||||
s, loop_ssl_data->ssl_read_output + LIBUS_RECV_BUFFER_PADDING,
|
||||
read);
|
||||
if (!s || us_socket_is_closed(0, &s->s)) {
|
||||
return s;
|
||||
if (!s || us_internal_ssl_socket_is_closed(s)) {
|
||||
return NULL; // stop processing data
|
||||
}
|
||||
}
|
||||
// terminate connection here
|
||||
return us_internal_ssl_socket_close(s, 0, NULL);
|
||||
us_internal_ssl_socket_close(s, 0, NULL);
|
||||
return NULL; // stop processing data
|
||||
}
|
||||
|
||||
if (err == SSL_ERROR_SSL || err == SSL_ERROR_SYSCALL) {
|
||||
// clear per thread error queue if it may contain something
|
||||
ERR_clear_error();
|
||||
s->fatal_error = 1;
|
||||
}
|
||||
|
||||
// terminate connection here
|
||||
return us_internal_ssl_socket_close(s, 0, NULL);
|
||||
us_internal_ssl_socket_close(s, 0, NULL);
|
||||
return NULL; // stop processing data
|
||||
} else {
|
||||
// emit the data we have and exit
|
||||
|
||||
@@ -526,8 +557,8 @@ restart:
|
||||
s = context->on_data(
|
||||
s, loop_ssl_data->ssl_read_output + LIBUS_RECV_BUFFER_PADDING,
|
||||
read);
|
||||
if (!s || us_socket_is_closed(0, &s->s)) {
|
||||
return s;
|
||||
if (!s || us_internal_ssl_socket_is_closed(s)) {
|
||||
return NULL; // stop processing data
|
||||
}
|
||||
|
||||
break;
|
||||
@@ -549,22 +580,19 @@ restart:
|
||||
// emit data and restart
|
||||
s = context->on_data(
|
||||
s, loop_ssl_data->ssl_read_output + LIBUS_RECV_BUFFER_PADDING, read);
|
||||
if (!s || us_socket_is_closed(0, &s->s)) {
|
||||
return s;
|
||||
if (!s || us_internal_ssl_socket_is_closed(s)) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
read = 0;
|
||||
goto restart;
|
||||
}
|
||||
}
|
||||
|
||||
// we received the shutdown after reading so we close
|
||||
if (s->received_ssl_shutdown) {
|
||||
us_internal_ssl_socket_close(s, 0, NULL);
|
||||
return NULL;
|
||||
}
|
||||
// trigger writable if we failed last write with want read
|
||||
if (s->ssl_write_wants_read) {
|
||||
// Trigger writable if we failed last SSL_write with SSL_ERROR_WANT_READ
|
||||
// If we failed SSL_read because we need to write more data (SSL_ERROR_WANT_WRITE) we are not going to trigger on_writable, we will wait until the next on_data or on_writable event
|
||||
// SSL_read will try to flush the write buffer and if fails with SSL_ERROR_WANT_WRITE means the socket is not in a writable state anymore and only makes sense to trigger on_writable if we can write more data
|
||||
// Otherwise we possible would trigger on_writable -> on_data event in a recursive loop
|
||||
if (s->ssl_write_wants_read && !s->ssl_read_wants_write) {
|
||||
s->ssl_write_wants_read = 0;
|
||||
|
||||
// make sure to update context before we call (context can change if the
|
||||
@@ -575,8 +603,8 @@ restart:
|
||||
s = (struct us_internal_ssl_socket_t *)context->sc.on_writable(
|
||||
&s->s); // cast here!
|
||||
// if we are closed here, then exit
|
||||
if (!s || us_socket_is_closed(0, &s->s)) {
|
||||
return s;
|
||||
if (!s || us_internal_ssl_socket_is_closed(s)) {
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -585,6 +613,7 @@ restart:
|
||||
|
||||
struct us_internal_ssl_socket_t *
|
||||
ssl_on_writable(struct us_internal_ssl_socket_t *s) {
|
||||
us_internal_set_loop_ssl_data(s);
|
||||
us_internal_update_handshake(s);
|
||||
|
||||
struct us_internal_ssl_socket_context_t *context =
|
||||
@@ -606,8 +635,8 @@ ssl_on_writable(struct us_internal_ssl_socket_t *s) {
|
||||
}
|
||||
// Do not call on_writable if the socket is closed.
|
||||
// on close means the socket data is no longer accessible
|
||||
if (!s || us_socket_is_closed(0, &s->s)) {
|
||||
return 0;
|
||||
if (!s || us_internal_ssl_socket_is_closed(s) || us_internal_ssl_socket_is_shut_down(s)) {
|
||||
return s;
|
||||
}
|
||||
|
||||
if (s->handshake_state == HANDSHAKE_COMPLETED) {
|
||||
@@ -1032,7 +1061,7 @@ long us_internal_verify_peer_certificate( // NOLINT(runtime/int)
|
||||
|
||||
struct us_bun_verify_error_t
|
||||
us_internal_verify_error(struct us_internal_ssl_socket_t *s) {
|
||||
if (us_socket_is_closed(0, &s->s) || us_internal_ssl_socket_is_shut_down(s)) {
|
||||
if (us_internal_ssl_socket_is_closed(s) || us_internal_ssl_socket_is_shut_down(s)) {
|
||||
return (struct us_bun_verify_error_t){
|
||||
.error = 0, .code = NULL, .reason = NULL};
|
||||
}
|
||||
@@ -1317,10 +1346,6 @@ void us_bun_internal_ssl_socket_context_add_server_name(
|
||||
|
||||
/* We do not want to hold any nullptr's in our SNI tree */
|
||||
if (ssl_context) {
|
||||
#if ALLOW_SERVER_RENEGOTIATION
|
||||
context->client_renegotiation_limit = options.client_renegotiation_limit;
|
||||
context->client_renegotiation_window = options.client_renegotiation_window;
|
||||
#endif
|
||||
if (sni_add(context->sni, hostname_pattern, ssl_context)) {
|
||||
/* If we already had that name, ignore */
|
||||
free_ssl_context(ssl_context);
|
||||
@@ -1469,10 +1494,6 @@ us_internal_bun_create_ssl_socket_context(
|
||||
|
||||
context->on_handshake = NULL;
|
||||
context->handshake_data = NULL;
|
||||
#if ALLOW_SERVER_RENEGOTIATION
|
||||
context->client_renegotiation_limit = options.client_renegotiation_limit;
|
||||
context->client_renegotiation_window = options.client_renegotiation_window;
|
||||
#endif
|
||||
/* We, as parent context, may ignore data */
|
||||
context->sc.is_low_prio = (int (*)(struct us_socket_t *))ssl_is_low_prio;
|
||||
|
||||
@@ -1503,7 +1524,7 @@ void us_internal_ssl_socket_context_free(
|
||||
sni_free(context->sni, sni_hostname_destructor);
|
||||
}
|
||||
|
||||
us_socket_context_free(0, &context->sc);
|
||||
us_internal_socket_context_free(0, &context->sc);
|
||||
}
|
||||
|
||||
struct us_listen_socket_t *us_internal_ssl_socket_context_listen(
|
||||
@@ -1592,7 +1613,8 @@ void us_internal_ssl_socket_context_on_timeout(
|
||||
struct us_internal_ssl_socket_t *s)) {
|
||||
us_socket_context_on_timeout(0, (struct us_socket_context_t *)context,
|
||||
(struct us_socket_t * (*)(struct us_socket_t *))
|
||||
on_timeout);
|
||||
ssl_on_timeout);
|
||||
context->on_timeout = on_timeout;
|
||||
}
|
||||
|
||||
void us_internal_ssl_socket_context_on_long_timeout(
|
||||
@@ -1601,7 +1623,8 @@ void us_internal_ssl_socket_context_on_long_timeout(
|
||||
struct us_internal_ssl_socket_t *s)) {
|
||||
us_socket_context_on_long_timeout(
|
||||
0, (struct us_socket_context_t *)context,
|
||||
(struct us_socket_t * (*)(struct us_socket_t *)) on_long_timeout);
|
||||
(struct us_socket_t * (*)(struct us_socket_t *)) ssl_on_long_timeout);
|
||||
context->on_long_timeout = on_long_timeout;
|
||||
}
|
||||
|
||||
/* We do not really listen to passed FIN-handler, we entirely override it with
|
||||
@@ -1656,8 +1679,8 @@ int us_internal_ssl_socket_raw_write(struct us_internal_ssl_socket_t *s,
|
||||
|
||||
int us_internal_ssl_socket_write(struct us_internal_ssl_socket_t *s,
|
||||
const char *data, int length, int msg_more) {
|
||||
|
||||
if (us_socket_is_closed(0, &s->s) || us_internal_ssl_socket_is_shut_down(s)) {
|
||||
|
||||
if (us_socket_is_closed(0, &s->s) || us_internal_ssl_socket_is_shut_down(s) || length == 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@@ -1697,6 +1720,7 @@ int us_internal_ssl_socket_write(struct us_internal_ssl_socket_t *s,
|
||||
// these two errors may add to the error queue, which is per thread and
|
||||
// must be cleared
|
||||
ERR_clear_error();
|
||||
s->fatal_error = 1;
|
||||
|
||||
// all errors here except for want write are critical and should not
|
||||
// happen
|
||||
@@ -1714,12 +1738,12 @@ void *us_internal_connecting_ssl_socket_ext(struct us_connecting_socket_t *s) {
|
||||
}
|
||||
|
||||
int us_internal_ssl_socket_is_shut_down(struct us_internal_ssl_socket_t *s) {
|
||||
return us_socket_is_shut_down(0, &s->s) ||
|
||||
SSL_get_shutdown(s->ssl) & SSL_SENT_SHUTDOWN;
|
||||
return !s->ssl || us_socket_is_shut_down(0, &s->s) ||
|
||||
SSL_get_shutdown(s->ssl) & SSL_SENT_SHUTDOWN || s->fatal_error;
|
||||
}
|
||||
|
||||
void us_internal_ssl_socket_shutdown(struct us_internal_ssl_socket_t *s) {
|
||||
if (!us_socket_is_closed(0, &s->s) &&
|
||||
if (!us_internal_ssl_socket_is_closed(s) &&
|
||||
!us_internal_ssl_socket_is_shut_down(s)) {
|
||||
struct us_internal_ssl_socket_context_t *context =
|
||||
(struct us_internal_ssl_socket_context_t *)us_socket_context(0, &s->s);
|
||||
@@ -1740,19 +1764,22 @@ void us_internal_ssl_socket_shutdown(struct us_internal_ssl_socket_t *s) {
|
||||
loop_ssl_data->ssl_socket = &s->s;
|
||||
|
||||
loop_ssl_data->msg_more = 0;
|
||||
|
||||
// sets SSL_SENT_SHUTDOWN no matter what (not actually true if error!)
|
||||
// sets SSL_SENT_SHUTDOWN and waits for the other side to do the same
|
||||
int ret = SSL_shutdown(s->ssl);
|
||||
if (ret == 0) {
|
||||
ret = SSL_shutdown(s->ssl);
|
||||
|
||||
if (SSL_in_init(s->ssl) || SSL_get_quiet_shutdown(s->ssl)) {
|
||||
// when SSL_in_init or quiet shutdown in BoringSSL, we call shutdown
|
||||
// directly
|
||||
us_socket_shutdown(0, &s->s);
|
||||
return;
|
||||
}
|
||||
|
||||
if (ret < 0) {
|
||||
|
||||
int err = SSL_get_error(s->ssl, ret);
|
||||
if (err == SSL_ERROR_SSL || err == SSL_ERROR_SYSCALL) {
|
||||
// clear
|
||||
ERR_clear_error();
|
||||
s->fatal_error = 1;
|
||||
}
|
||||
|
||||
// we get here if we are shutting down while still in init
|
||||
@@ -1793,6 +1820,7 @@ ssl_wrapped_context_on_close(struct us_internal_ssl_socket_t *s, int code,
|
||||
wrapped_context->old_events.on_close((struct us_socket_t *)s, code, reason);
|
||||
}
|
||||
|
||||
us_socket_context_unref(0, wrapped_context->tcp_context);
|
||||
return s;
|
||||
}
|
||||
|
||||
@@ -1949,6 +1977,7 @@ struct us_internal_ssl_socket_t *us_internal_ssl_socket_wrap_with_tls(
|
||||
}
|
||||
|
||||
struct us_socket_context_t *old_context = us_socket_context(0, s);
|
||||
us_socket_context_ref(0,old_context);
|
||||
|
||||
struct us_socket_context_t *context = us_create_bun_socket_context(
|
||||
1, old_context->loop, sizeof(struct us_wrapped_socket_context_t),
|
||||
@@ -1971,6 +2000,7 @@ struct us_internal_ssl_socket_t *us_internal_ssl_socket_wrap_with_tls(
|
||||
};
|
||||
wrapped_context->old_events = old_events;
|
||||
wrapped_context->events = events;
|
||||
wrapped_context->tcp_context = old_context;
|
||||
|
||||
// no need to wrap open because socket is already open (only new context will
|
||||
// be called so we can configure hostname and ssl stuff normally here before
|
||||
@@ -2043,8 +2073,8 @@ us_socket_context_on_socket_connect_error(
|
||||
socket->ssl = NULL;
|
||||
socket->ssl_write_wants_read = 0;
|
||||
socket->ssl_read_wants_write = 0;
|
||||
socket->fatal_error = 0;
|
||||
socket->handshake_state = HANDSHAKE_PENDING;
|
||||
socket->received_ssl_shutdown = 0;
|
||||
return socket;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
// MSVC doesn't support C11 stdatomic.h propertly yet.
|
||||
// so we use C++ std::atomic instead.
|
||||
#include "./internal/internal.h"
|
||||
#include "./root_certs.h"
|
||||
#include <openssl/x509.h>
|
||||
#include <openssl/pem.h>
|
||||
#include "./internal/internal.h"
|
||||
#include <atomic>
|
||||
|
||||
#include <openssl/pem.h>
|
||||
#include <openssl/x509.h>
|
||||
#include <string.h>
|
||||
static const int root_certs_size = sizeof(root_certs) / sizeof(root_certs[0]);
|
||||
static X509* root_cert_instances[sizeof(root_certs) / sizeof(root_certs[0])] = {NULL};
|
||||
static X509 *root_cert_instances[sizeof(root_certs) / sizeof(root_certs[0])] = {
|
||||
NULL};
|
||||
static X509 *root_extra_cert_instances = {NULL};
|
||||
|
||||
static std::atomic_flag root_cert_instances_lock = ATOMIC_FLAG_INIT;
|
||||
static std::atomic_bool root_cert_instances_initialized = 0;
|
||||
|
||||
@@ -16,15 +19,16 @@ static std::atomic_bool root_cert_instances_initialized = 0;
|
||||
// for the OpenSSL CLI, but works poorly for this case because it involves
|
||||
// synchronous interaction with the controlling terminal, something we never
|
||||
// want, and use this function to avoid it.
|
||||
int us_no_password_callback(char* buf, int size, int rwflag, void* u) {
|
||||
int us_no_password_callback(char *buf, int size, int rwflag, void *u) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
static X509 * us_ssl_ctx_get_X509_without_callback_from(struct us_cert_string_t content) {
|
||||
static X509 *
|
||||
us_ssl_ctx_get_X509_without_callback_from(struct us_cert_string_t content) {
|
||||
X509 *x = NULL;
|
||||
BIO *in;
|
||||
|
||||
ERR_clear_error(); // clear error stack for SSL_CTX_use_certificate()
|
||||
ERR_clear_error(); // clear error stack for SSL_CTX_use_certificate()
|
||||
|
||||
in = BIO_new_mem_buf(content.str, content.len);
|
||||
if (in == NULL) {
|
||||
@@ -37,9 +41,37 @@ static X509 * us_ssl_ctx_get_X509_without_callback_from(struct us_cert_string_t
|
||||
OPENSSL_PUT_ERROR(SSL, ERR_R_PEM_LIB);
|
||||
goto end;
|
||||
}
|
||||
|
||||
return x;
|
||||
end:
|
||||
X509_free(x);
|
||||
BIO_free(in);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static X509 *
|
||||
us_ssl_ctx_get_X509_without_callback_from_file(const char *filename) {
|
||||
X509 *x = NULL;
|
||||
BIO *in;
|
||||
|
||||
ERR_clear_error(); // clear error stack for SSL_CTX_use_certificate()
|
||||
|
||||
in = BIO_new(BIO_s_file());
|
||||
if (in == NULL) {
|
||||
OPENSSL_PUT_ERROR(SSL, ERR_R_BUF_LIB);
|
||||
goto end;
|
||||
}
|
||||
|
||||
if (BIO_read_filename(in, filename) <= 0) {
|
||||
OPENSSL_PUT_ERROR(SSL, ERR_R_SYS_LIB);
|
||||
goto end;
|
||||
}
|
||||
|
||||
x = PEM_read_bio_X509(in, NULL, us_no_password_callback, NULL);
|
||||
if (x == NULL) {
|
||||
OPENSSL_PUT_ERROR(SSL, ERR_R_PEM_LIB);
|
||||
goto end;
|
||||
}
|
||||
return x;
|
||||
end:
|
||||
X509_free(x);
|
||||
BIO_free(in);
|
||||
@@ -47,44 +79,65 @@ end:
|
||||
}
|
||||
|
||||
static void us_internal_init_root_certs() {
|
||||
if(std::atomic_load(&root_cert_instances_initialized) == 1) return;
|
||||
if (std::atomic_load(&root_cert_instances_initialized) == 1)
|
||||
return;
|
||||
|
||||
while(atomic_flag_test_and_set_explicit(&root_cert_instances_lock, std::memory_order_acquire));
|
||||
while (atomic_flag_test_and_set_explicit(&root_cert_instances_lock,
|
||||
std::memory_order_acquire))
|
||||
;
|
||||
|
||||
if(!atomic_exchange(&root_cert_instances_initialized, 1)) {
|
||||
for (size_t i = 0; i < root_certs_size; i++) {
|
||||
root_cert_instances[i] = us_ssl_ctx_get_X509_without_callback_from(root_certs[i]);
|
||||
}
|
||||
}
|
||||
|
||||
atomic_flag_clear_explicit(&root_cert_instances_lock, std::memory_order_release);
|
||||
}
|
||||
|
||||
extern "C" int us_internal_raw_root_certs(struct us_cert_string_t** out) {
|
||||
*out = root_certs;
|
||||
return root_certs_size;
|
||||
}
|
||||
|
||||
extern "C" X509_STORE* us_get_default_ca_store() {
|
||||
X509_STORE *store = X509_STORE_new();
|
||||
if (store == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (!X509_STORE_set_default_paths(store)) {
|
||||
X509_STORE_free(store);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
us_internal_init_root_certs();
|
||||
|
||||
// load all root_cert_instances on the default ca store
|
||||
if (!atomic_exchange(&root_cert_instances_initialized, 1)) {
|
||||
for (size_t i = 0; i < root_certs_size; i++) {
|
||||
X509* cert = root_cert_instances[i];
|
||||
if(cert == NULL) continue;
|
||||
X509_up_ref(cert);
|
||||
X509_STORE_add_cert(store, cert);
|
||||
root_cert_instances[i] =
|
||||
us_ssl_ctx_get_X509_without_callback_from(root_certs[i]);
|
||||
}
|
||||
|
||||
return store;
|
||||
|
||||
// get extra cert option from environment variable
|
||||
const char *extra_cert = getenv("NODE_EXTRA_CA_CERTS");
|
||||
if (extra_cert) {
|
||||
size_t length = strlen(extra_cert);
|
||||
if (length > 0) {
|
||||
root_extra_cert_instances =
|
||||
us_ssl_ctx_get_X509_without_callback_from_file(extra_cert);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
atomic_flag_clear_explicit(&root_cert_instances_lock,
|
||||
std::memory_order_release);
|
||||
}
|
||||
|
||||
extern "C" int us_internal_raw_root_certs(struct us_cert_string_t **out) {
|
||||
*out = root_certs;
|
||||
return root_certs_size;
|
||||
}
|
||||
|
||||
extern "C" X509_STORE *us_get_default_ca_store() {
|
||||
X509_STORE *store = X509_STORE_new();
|
||||
if (store == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (!X509_STORE_set_default_paths(store)) {
|
||||
X509_STORE_free(store);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
us_internal_init_root_certs();
|
||||
|
||||
// load all root_cert_instances on the default ca store
|
||||
for (size_t i = 0; i < root_certs_size; i++) {
|
||||
X509 *cert = root_cert_instances[i];
|
||||
if (cert == NULL)
|
||||
continue;
|
||||
X509_up_ref(cert);
|
||||
X509_STORE_add_cert(store, cert);
|
||||
}
|
||||
|
||||
if (root_extra_cert_instances) {
|
||||
X509_up_ref(root_extra_cert_instances);
|
||||
X509_STORE_add_cert(store, root_extra_cert_instances);
|
||||
}
|
||||
|
||||
return store;
|
||||
}
|
||||
@@ -109,6 +109,51 @@ struct us_loop_t *us_timer_loop(struct us_timer_t *t) {
|
||||
return internal_cb->loop;
|
||||
}
|
||||
|
||||
|
||||
#if defined(LIBUS_USE_EPOLL)
|
||||
|
||||
#include <sys/syscall.h>
|
||||
static int has_epoll_pwait2 = -1;
|
||||
|
||||
#ifndef SYS_epoll_pwait2
|
||||
// It's consistent on multiple architectures
|
||||
// https://github.com/torvalds/linux/blob/9d1ddab261f3e2af7c384dc02238784ce0cf9f98/include/uapi/asm-generic/unistd.h#L795
|
||||
// https://github.com/google/gvisor/blob/master/test/syscalls/linux/epoll.cc#L48C1-L50C7
|
||||
#define SYS_epoll_pwait2 441
|
||||
#endif
|
||||
|
||||
static ssize_t sys_epoll_pwait2(int epfd, struct epoll_event *events, int maxevents, const struct timespec *timeout, const sigset_t *sigmask, size_t sigsetsize) {
|
||||
return syscall(SYS_epoll_pwait2, epfd, events, maxevents, timeout, sigmask, sigsetsize);
|
||||
}
|
||||
|
||||
static int bun_epoll_pwait2(int epfd, struct epoll_event *events, int maxevents, const struct timespec *timeout) {
|
||||
int ret;
|
||||
if (has_epoll_pwait2 != 0) {
|
||||
do {
|
||||
ret = sys_epoll_pwait2(epfd, events, maxevents, timeout, NULL, 0);
|
||||
} while (IS_EINTR(ret));
|
||||
|
||||
if (LIKELY(ret != -1 || errno != ENOSYS)) {
|
||||
return ret;
|
||||
}
|
||||
|
||||
has_epoll_pwait2 = 0;
|
||||
}
|
||||
|
||||
int timeoutMs = -1;
|
||||
if (timeout) {
|
||||
timeoutMs = timeout->tv_sec * 1000 + timeout->tv_nsec / 1000000;
|
||||
}
|
||||
|
||||
do {
|
||||
ret = epoll_wait(epfd, events, maxevents, timeoutMs);
|
||||
} while (IS_EINTR(ret));
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
/* Loop */
|
||||
struct us_loop_t *us_create_loop(void *hint, void (*wakeup_cb)(struct us_loop_t *loop), void (*pre_cb)(struct us_loop_t *loop), void (*post_cb)(struct us_loop_t *loop), unsigned int ext_size) {
|
||||
struct us_loop_t *loop = (struct us_loop_t *) us_calloc(1, sizeof(struct us_loop_t) + ext_size);
|
||||
@@ -139,9 +184,11 @@ void us_loop_run(struct us_loop_t *loop) {
|
||||
|
||||
/* Fetch ready polls */
|
||||
#ifdef LIBUS_USE_EPOLL
|
||||
loop->num_ready_polls = epoll_wait(loop->fd, loop->ready_polls, 1024, -1);
|
||||
loop->num_ready_polls = bun_epoll_pwait2(loop->fd, loop->ready_polls, 1024, NULL);
|
||||
#else
|
||||
loop->num_ready_polls = kevent64(loop->fd, NULL, 0, loop->ready_polls, 1024, 0, NULL);
|
||||
do {
|
||||
loop->num_ready_polls = kevent64(loop->fd, NULL, 0, loop->ready_polls, 1024, 0, NULL);
|
||||
} while (IS_EINTR(loop->num_ready_polls));
|
||||
#endif
|
||||
|
||||
/* Iterate ready polls, dispatching them by type */
|
||||
@@ -183,12 +230,6 @@ void us_loop_run(struct us_loop_t *loop) {
|
||||
}
|
||||
}
|
||||
|
||||
#if defined(LIBUS_USE_EPOLL)
|
||||
|
||||
// static int has_epoll_pwait2 = 0;
|
||||
// TODO:
|
||||
|
||||
#endif
|
||||
|
||||
void us_loop_run_bun_tick(struct us_loop_t *loop, const struct timespec* timeout) {
|
||||
if (loop->num_polls == 0)
|
||||
@@ -207,13 +248,12 @@ void us_loop_run_bun_tick(struct us_loop_t *loop, const struct timespec* timeout
|
||||
|
||||
/* Fetch ready polls */
|
||||
#ifdef LIBUS_USE_EPOLL
|
||||
int timeoutMs = -1;
|
||||
if (timeout) {
|
||||
timeoutMs = timeout->tv_sec * 1000 + timeout->tv_nsec / 1000000;
|
||||
}
|
||||
loop->num_ready_polls = epoll_wait(loop->fd, loop->ready_polls, 1024, timeoutMs);
|
||||
|
||||
loop->num_ready_polls = bun_epoll_pwait2(loop->fd, loop->ready_polls, 1024, timeout);
|
||||
#else
|
||||
loop->num_ready_polls = kevent64(loop->fd, NULL, 0, loop->ready_polls, 1024, 0, timeout);
|
||||
do {
|
||||
loop->num_ready_polls = kevent64(loop->fd, NULL, 0, loop->ready_polls, 1024, 0, timeout);
|
||||
} while (IS_EINTR(loop->num_ready_polls));
|
||||
#endif
|
||||
|
||||
/* Iterate ready polls, dispatching them by type */
|
||||
@@ -296,7 +336,10 @@ int kqueue_change(int kqfd, int fd, int old_events, int new_events, void *user_d
|
||||
EV_SET64(&change_list[change_length++], fd, EVFILT_WRITE, (new_events & LIBUS_SOCKET_WRITABLE) ? EV_ADD : EV_DELETE, 0, 0, (uint64_t)(void*)user_data, 0, 0);
|
||||
}
|
||||
|
||||
int ret = kevent64(kqfd, change_list, change_length, change_list, change_length, KEVENT_FLAG_ERROR_EVENTS, NULL);
|
||||
int ret;
|
||||
do {
|
||||
ret = kevent64(kqfd, change_list, change_length, change_list, change_length, KEVENT_FLAG_ERROR_EVENTS, NULL);
|
||||
} while (IS_EINTR(ret));
|
||||
|
||||
// ret should be 0 in most cases (not guaranteed when removing async)
|
||||
|
||||
@@ -332,7 +375,10 @@ void us_poll_start(struct us_poll_t *p, struct us_loop_t *loop, int events) {
|
||||
struct epoll_event event;
|
||||
event.events = events;
|
||||
event.data.ptr = p;
|
||||
epoll_ctl(loop->fd, EPOLL_CTL_ADD, p->state.fd, &event);
|
||||
int ret;
|
||||
do {
|
||||
ret = epoll_ctl(loop->fd, EPOLL_CTL_ADD, p->state.fd, &event);
|
||||
} while (IS_EINTR(ret));
|
||||
#else
|
||||
kqueue_change(loop->fd, p->state.fd, 0, events, p);
|
||||
#endif
|
||||
@@ -348,7 +394,10 @@ void us_poll_change(struct us_poll_t *p, struct us_loop_t *loop, int events) {
|
||||
struct epoll_event event;
|
||||
event.events = events;
|
||||
event.data.ptr = p;
|
||||
epoll_ctl(loop->fd, EPOLL_CTL_MOD, p->state.fd, &event);
|
||||
int rc;
|
||||
do {
|
||||
rc = epoll_ctl(loop->fd, EPOLL_CTL_MOD, p->state.fd, &event);
|
||||
} while (IS_EINTR(rc));
|
||||
#else
|
||||
kqueue_change(loop->fd, p->state.fd, old_events, events, p);
|
||||
#endif
|
||||
@@ -362,7 +411,10 @@ void us_poll_stop(struct us_poll_t *p, struct us_loop_t *loop) {
|
||||
int new_events = 0;
|
||||
#ifdef LIBUS_USE_EPOLL
|
||||
struct epoll_event event;
|
||||
epoll_ctl(loop->fd, EPOLL_CTL_DEL, p->state.fd, &event);
|
||||
int rc;
|
||||
do {
|
||||
rc = epoll_ctl(loop->fd, EPOLL_CTL_DEL, p->state.fd, &event);
|
||||
} while (IS_EINTR(rc));
|
||||
#else
|
||||
if (old_events) {
|
||||
kqueue_change(loop->fd, p->state.fd, old_events, new_events, NULL);
|
||||
@@ -373,12 +425,14 @@ void us_poll_stop(struct us_poll_t *p, struct us_loop_t *loop) {
|
||||
us_internal_loop_update_pending_ready_polls(loop, p, 0, old_events, new_events);
|
||||
}
|
||||
|
||||
unsigned int us_internal_accept_poll_event(struct us_poll_t *p) {
|
||||
size_t us_internal_accept_poll_event(struct us_poll_t *p) {
|
||||
#ifdef LIBUS_USE_EPOLL
|
||||
int fd = us_poll_fd(p);
|
||||
uint64_t buf;
|
||||
int read_length = read(fd, &buf, 8);
|
||||
(void)read_length;
|
||||
ssize_t read_length = 0;
|
||||
do {
|
||||
read_length = read(fd, &buf, 8);
|
||||
} while (IS_EINTR(read_length));
|
||||
return buf;
|
||||
#else
|
||||
/* Kqueue has no underlying FD for timers or user events */
|
||||
@@ -467,7 +521,11 @@ void us_timer_close(struct us_timer_t *timer, int fallthrough) {
|
||||
|
||||
struct kevent64_s event;
|
||||
EV_SET64(&event, (uint64_t) (void*) internal_cb, EVFILT_TIMER, EV_DELETE, 0, 0, (uint64_t)internal_cb, 0, 0);
|
||||
kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
|
||||
int ret;
|
||||
do {
|
||||
ret = kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
|
||||
} while (IS_EINTR(ret));
|
||||
|
||||
|
||||
/* (regular) sockets are the only polls which are not freed immediately */
|
||||
if(fallthrough){
|
||||
@@ -486,7 +544,11 @@ void us_timer_set(struct us_timer_t *t, void (*cb)(struct us_timer_t *t), int ms
|
||||
struct kevent64_s event;
|
||||
uint64_t ptr = (uint64_t)(void*)internal_cb;
|
||||
EV_SET64(&event, ptr, EVFILT_TIMER, EV_ADD | (repeat_ms ? 0 : EV_ONESHOT), 0, ms, (uint64_t)internal_cb, 0, 0);
|
||||
kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
|
||||
|
||||
int ret;
|
||||
do {
|
||||
ret = kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
|
||||
} while (IS_EINTR(ret));
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -581,7 +643,11 @@ void us_internal_async_close(struct us_internal_async *a) {
|
||||
struct kevent64_s event;
|
||||
uint64_t ptr = (uint64_t)(void*)internal_cb;
|
||||
EV_SET64(&event, ptr, EVFILT_MACHPORT, EV_DELETE, 0, 0, (uint64_t)(void*)internal_cb, 0,0);
|
||||
kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
|
||||
|
||||
int ret;
|
||||
do {
|
||||
ret = kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
|
||||
} while (IS_EINTR(ret));
|
||||
|
||||
mach_port_deallocate(mach_task_self(), internal_cb->port);
|
||||
us_free(internal_cb->machport_buf);
|
||||
@@ -609,7 +675,10 @@ void us_internal_async_set(struct us_internal_async *a, void (*cb)(struct us_int
|
||||
event.ext[1] = MACHPORT_BUF_LEN;
|
||||
event.udata = (uint64_t)(void*)internal_cb;
|
||||
|
||||
int ret = kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
|
||||
int ret;
|
||||
do {
|
||||
ret = kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
|
||||
} while (IS_EINTR(ret));
|
||||
|
||||
if (UNLIKELY(ret == -1)) {
|
||||
abort();
|
||||
|
||||
@@ -125,7 +125,7 @@ int us_poll_events(struct us_poll_t *p) {
|
||||
((p->poll_type & POLL_TYPE_POLLING_OUT) ? LIBUS_SOCKET_WRITABLE : 0);
|
||||
}
|
||||
|
||||
unsigned int us_internal_accept_poll_event(struct us_poll_t *p) { return 0; }
|
||||
size_t us_internal_accept_poll_event(struct us_poll_t *p) { return 0; }
|
||||
|
||||
int us_internal_poll_type(struct us_poll_t *p) { return p->poll_type & POLL_TYPE_KIND_MASK; }
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user