mirror of
https://github.com/oven-sh/bun
synced 2026-02-17 14:22:01 +00:00
Compare commits
358 Commits
nektro-pat
...
jarred/str
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d0a5078b66 | ||
|
|
36c5f843ec | ||
|
|
d38fc909e3 | ||
|
|
cd7f6a1589 | ||
|
|
a9cf463eeb | ||
|
|
f3da37e486 | ||
|
|
53706de8d6 | ||
|
|
6bf542d350 | ||
|
|
841b617c3d | ||
|
|
ebe070487b | ||
|
|
f4539431a0 | ||
|
|
c91afdb35c | ||
|
|
128d69dcbe | ||
|
|
cd6785771e | ||
|
|
5c6cfd2657 | ||
|
|
5108e3e0d9 | ||
|
|
bd3e62df40 | ||
|
|
1668fde0a9 | ||
|
|
12174e0577 | ||
|
|
c50f8d82d5 | ||
|
|
d30767ea68 | ||
|
|
6b30c1b30d | ||
|
|
b64f1e15b5 | ||
|
|
5f6015bb79 | ||
|
|
f123814d87 | ||
|
|
ef4bcb314c | ||
|
|
fd2ad27b6f | ||
|
|
03de99afcf | ||
|
|
9ba63eb522 | ||
|
|
bac38b8967 | ||
|
|
76c4145f0e | ||
|
|
adb54f1849 | ||
|
|
0f4aa68575 | ||
|
|
6555248a04 | ||
|
|
2f19b71e0f | ||
|
|
9076b369f0 | ||
|
|
9cb203f229 | ||
|
|
5650ed470c | ||
|
|
fc99dd27e3 | ||
|
|
4d61637e8a | ||
|
|
aed0f58dfc | ||
|
|
a37694cec2 | ||
|
|
b52f9923e2 | ||
|
|
1bed7a7fd1 | ||
|
|
59eb5515c5 | ||
|
|
682b3730a1 | ||
|
|
bd3c258af4 | ||
|
|
9faaa9b982 | ||
|
|
bd2eb40a39 | ||
|
|
f3ed9eac4a | ||
|
|
b55670ddb7 | ||
|
|
39eecc7757 | ||
|
|
6faf657e32 | ||
|
|
e48369ddab | ||
|
|
743f40b473 | ||
|
|
b4e552dbeb | ||
|
|
952d44b675 | ||
|
|
8cb0b5db21 | ||
|
|
1976e5bc00 | ||
|
|
f520715622 | ||
|
|
a4264cef23 | ||
|
|
09f002934c | ||
|
|
89dfe9beb6 | ||
|
|
4ac415f58d | ||
|
|
acd8567fa0 | ||
|
|
ba2ea6fbb2 | ||
|
|
36c621b6b1 | ||
|
|
bab5fec95f | ||
|
|
e6b30a90de | ||
|
|
fea302ee1d | ||
|
|
2ffcccc5b4 | ||
|
|
11d7a9d5e9 | ||
|
|
55cdf69415 | ||
|
|
ac8f9052a2 | ||
|
|
5a525d3042 | ||
|
|
6fd06dd023 | ||
|
|
df9d18659c | ||
|
|
d8ac4c59ff | ||
|
|
3309a8479c | ||
|
|
3896b0e29f | ||
|
|
c4f4d7c872 | ||
|
|
ebdd678da5 | ||
|
|
7529cd76b5 | ||
|
|
9eeef3f5df | ||
|
|
2b1a10629b | ||
|
|
0a37423baf | ||
|
|
1a9307da08 | ||
|
|
b005ef43d4 | ||
|
|
078fdd3787 | ||
|
|
dc58c42453 | ||
|
|
b53c25e5f8 | ||
|
|
e97c65fd1e | ||
|
|
5a108c5027 | ||
|
|
0f1d5d5dab | ||
|
|
6415cc3e92 | ||
|
|
8d34846d19 | ||
|
|
781998cf00 | ||
|
|
02a75070fb | ||
|
|
ac8db43485 | ||
|
|
94ee538dc6 | ||
|
|
9cdda49485 | ||
|
|
2c84840222 | ||
|
|
dafa9946e4 | ||
|
|
74d5b93ffc | ||
|
|
886c31f0c5 | ||
|
|
1bac09488d | ||
|
|
83a256013f | ||
|
|
384988f26c | ||
|
|
fe62a61404 | ||
|
|
ef8fd12e43 | ||
|
|
999324a50c | ||
|
|
8ace981fbc | ||
|
|
02ff16d95c | ||
|
|
1d188dbc55 | ||
|
|
f16d802eb1 | ||
|
|
eb8ed27a4a | ||
|
|
5eb053fa3b | ||
|
|
f9af7be5ae | ||
|
|
1367e5e85a | ||
|
|
d55b5cc169 | ||
|
|
fa2e00f109 | ||
|
|
9993d72fee | ||
|
|
fd75ca7585 | ||
|
|
a53db001db | ||
|
|
1a5c05adca | ||
|
|
58d02e467f | ||
|
|
63596c3f8c | ||
|
|
996847bcad | ||
|
|
33c91fe3fa | ||
|
|
7fd072f4af | ||
|
|
15a8e72790 | ||
|
|
64d77e33f6 | ||
|
|
babc907bfe | ||
|
|
83c5d8a942 | ||
|
|
5a8e98cec2 | ||
|
|
d4237b0757 | ||
|
|
766a9cf4f2 | ||
|
|
98a709fb1b | ||
|
|
715ff7f323 | ||
|
|
df1744f0da | ||
|
|
5bd344281f | ||
|
|
b70458c3e4 | ||
|
|
3f686222d4 | ||
|
|
36fc324523 | ||
|
|
a5bd94f582 | ||
|
|
4fae1b4475 | ||
|
|
2fa60f2d12 | ||
|
|
6d79edaa15 | ||
|
|
dc2929d4e1 | ||
|
|
5bc45e2721 | ||
|
|
fe7f5fa731 | ||
|
|
30edb594a8 | ||
|
|
1961a9acc8 | ||
|
|
9482a0afdf | ||
|
|
a1312066b3 | ||
|
|
85a3299115 | ||
|
|
3ea71a9672 | ||
|
|
bf945f6dbb | ||
|
|
a366135bd2 | ||
|
|
eec5abd0da | ||
|
|
cede04b019 | ||
|
|
cf1863236a | ||
|
|
bd3517197c | ||
|
|
2c93e917a9 | ||
|
|
5e6b509100 | ||
|
|
c229da8d9a | ||
|
|
4304368fc0 | ||
|
|
460d6edbda | ||
|
|
9628ee76fc | ||
|
|
9fd6a04460 | ||
|
|
a13a020d4c | ||
|
|
3a245dd248 | ||
|
|
b972ed6540 | ||
|
|
dfa3a9a369 | ||
|
|
444766833c | ||
|
|
f7d459eea5 | ||
|
|
7d018fb323 | ||
|
|
5f08478229 | ||
|
|
d861347dc5 | ||
|
|
1eb5ecb563 | ||
|
|
6661ab6022 | ||
|
|
23aa4f2959 | ||
|
|
9302b42919 | ||
|
|
b9ead441c1 | ||
|
|
24dbef7713 | ||
|
|
28c40babd2 | ||
|
|
35465d3a29 | ||
|
|
7aaf935711 | ||
|
|
bfca627dfa | ||
|
|
22e37a5c8d | ||
|
|
960514364e | ||
|
|
98078e7639 | ||
|
|
62d973f19f | ||
|
|
5cbb6926f5 | ||
|
|
077ee55211 | ||
|
|
adb31c0752 | ||
|
|
ab55477c2d | ||
|
|
ef23b8e60c | ||
|
|
e6528f81c9 | ||
|
|
1481cc2730 | ||
|
|
e6c87bddee | ||
|
|
f7b2e2a795 | ||
|
|
3aaa240233 | ||
|
|
9d74b5bdc8 | ||
|
|
d74a192345 | ||
|
|
76a3dc268d | ||
|
|
d2c821bbf6 | ||
|
|
ff334da585 | ||
|
|
d96629e053 | ||
|
|
c527058f14 | ||
|
|
3efd445084 | ||
|
|
84c91bf7e1 | ||
|
|
9f7c6e34cb | ||
|
|
d44969769f | ||
|
|
ff0f9d5f4d | ||
|
|
c63c55cbb1 | ||
|
|
6d09772a13 | ||
|
|
df33f2b2a2 | ||
|
|
923303047f | ||
|
|
3876ecfde8 | ||
|
|
2680deb5d3 | ||
|
|
e1aadd0d7a | ||
|
|
7a6efad44e | ||
|
|
4ed0c36063 | ||
|
|
b75c605a75 | ||
|
|
7da9e7c45d | ||
|
|
30d06dec47 | ||
|
|
3674493aa4 | ||
|
|
cacbaba524 | ||
|
|
0d7d789ebd | ||
|
|
1aa35089d6 | ||
|
|
1de1745085 | ||
|
|
639e9a83d5 | ||
|
|
9db3379cc5 | ||
|
|
c5c55c7ce4 | ||
|
|
43326b0b2d | ||
|
|
680f842948 | ||
|
|
363a4934d0 | ||
|
|
98f9e276b0 | ||
|
|
ce1286efef | ||
|
|
fd84ace83b | ||
|
|
483af7c33c | ||
|
|
6fbe3d8214 | ||
|
|
c552cb40d1 | ||
|
|
63cf732ab4 | ||
|
|
6303af3ce0 | ||
|
|
9104bd7210 | ||
|
|
b5c91a4b7e | ||
|
|
82239371ab | ||
|
|
26526cba38 | ||
|
|
214b3ccca0 | ||
|
|
ada020b69f | ||
|
|
deb6ff5e6c | ||
|
|
f25599a6e8 | ||
|
|
de64683b22 | ||
|
|
c6d508972f | ||
|
|
2f30e19835 | ||
|
|
0081ab4738 | ||
|
|
6f6ea0d6f3 | ||
|
|
622432e843 | ||
|
|
80eb6d00e8 | ||
|
|
b6715d2c64 | ||
|
|
f371a78568 | ||
|
|
c2cf528953 | ||
|
|
9911407f26 | ||
|
|
59c5c0fe48 | ||
|
|
e585f900c9 | ||
|
|
dc620ea837 | ||
|
|
49ab4c147a | ||
|
|
b2a4df68c3 | ||
|
|
4c0a1f2983 | ||
|
|
bec04c7341 | ||
|
|
a44b7e41d2 | ||
|
|
de5e56336c | ||
|
|
1c648063fa | ||
|
|
1c3354bc95 | ||
|
|
d5d4f53e82 | ||
|
|
7ab4dc738f | ||
|
|
ebc7045ca4 | ||
|
|
848ad19d9e | ||
|
|
1da3436266 | ||
|
|
49e496399a | ||
|
|
9b8340a5b3 | ||
|
|
8efcc61a7b | ||
|
|
4d6480050c | ||
|
|
fc2c134bc6 | ||
|
|
4c4db1da37 | ||
|
|
77e14c8482 | ||
|
|
fba5d65003 | ||
|
|
c181cf45a7 | ||
|
|
5aeb4d9f79 | ||
|
|
1d9a8b4134 | ||
|
|
30881444df | ||
|
|
a2b4e3d4c2 | ||
|
|
e5662caa33 | ||
|
|
1f1ea7bf24 | ||
|
|
175746e569 | ||
|
|
005dd776b6 | ||
|
|
81dec2657f | ||
|
|
dbd320ccfa | ||
|
|
8f8d3968a3 | ||
|
|
0bbdd880e6 | ||
|
|
51257d5668 | ||
|
|
a2ae28d158 | ||
|
|
f04991f6bb | ||
|
|
80e651aca3 | ||
|
|
a5ba02804f | ||
|
|
4199fd4515 | ||
|
|
848327d333 | ||
|
|
bfb72f84c4 | ||
|
|
e4022ec3c7 | ||
|
|
a7f34c15fc | ||
|
|
a0ebb051b0 | ||
|
|
70ca2b76c3 | ||
|
|
e5ac4f94fa | ||
|
|
d547d8a30e | ||
|
|
32d9bb3ced | ||
|
|
75df73ef90 | ||
|
|
13907c4c29 | ||
|
|
87169b6bb3 | ||
|
|
244100c32f | ||
|
|
8a78b2241d | ||
|
|
bf8b6922bb | ||
|
|
7aa05ec542 | ||
|
|
f95ae9baee | ||
|
|
f7cb2da542 | ||
|
|
d966129992 | ||
|
|
6cb5cd2a87 | ||
|
|
080a2806af | ||
|
|
92c83fcd9e | ||
|
|
277ed9d138 | ||
|
|
879cb23163 | ||
|
|
d321ee97c5 | ||
|
|
3bfeb83e7e | ||
|
|
5a18b7d2fc | ||
|
|
e75ef69fb4 | ||
|
|
78021e34ae | ||
|
|
d7187592c0 | ||
|
|
5f1b569c52 | ||
|
|
e54fe5995b | ||
|
|
a2f68989a0 | ||
|
|
4a1e01d076 | ||
|
|
dd8b0a5889 | ||
|
|
8cadf66143 | ||
|
|
77cd03dad1 | ||
|
|
82b42ed851 | ||
|
|
2de82c0b3b | ||
|
|
30df04cd35 | ||
|
|
585c8299d8 | ||
|
|
375d8da8e6 | ||
|
|
0bd8db7162 | ||
|
|
d97260869d | ||
|
|
fdb58dc861 | ||
|
|
5f118704ec | ||
|
|
610c7f5e47 | ||
|
|
1e0b20f514 | ||
|
|
f6c89f4c25 | ||
|
|
907cd8d45d |
@@ -10,9 +10,10 @@ steps:
|
||||
blocked_state: "running"
|
||||
|
||||
- label: ":pipeline:"
|
||||
command: "buildkite-agent pipeline upload .buildkite/ci.yml"
|
||||
agents:
|
||||
queue: "build-linux"
|
||||
queue: "build-darwin"
|
||||
command:
|
||||
- ".buildkite/scripts/prepare-build.sh"
|
||||
|
||||
- if: "build.branch == 'main' && !build.pull_request.repository.fork"
|
||||
label: ":github:"
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
62
.buildkite/scripts/build-bun.sh
Executable file
62
.buildkite/scripts/build-bun.sh
Executable file
@@ -0,0 +1,62 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
source "$(dirname "$0")/env.sh"
|
||||
|
||||
function run_command() {
|
||||
set -x
|
||||
"$@"
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
cwd="$(pwd)"
|
||||
|
||||
mkdir -p build
|
||||
source "$(dirname "$0")/download-artifact.sh" "build/bun-deps/**" --step "$BUILDKITE_GROUP_KEY-build-deps"
|
||||
source "$(dirname "$0")/download-artifact.sh" "build/bun-zig.o" --step "$BUILDKITE_GROUP_KEY-build-zig"
|
||||
source "$(dirname "$0")/download-artifact.sh" "build/bun-cpp-objects.a" --step "$BUILDKITE_GROUP_KEY-build-cpp" --split
|
||||
cd build
|
||||
|
||||
run_command cmake .. "${CMAKE_FLAGS[@]}" \
|
||||
-GNinja \
|
||||
-DBUN_LINK_ONLY="1" \
|
||||
-DNO_CONFIGURE_DEPENDS="1" \
|
||||
-DBUN_ZIG_OBJ_DIR="$cwd/build" \
|
||||
-DBUN_CPP_ARCHIVE="$cwd/build/bun-cpp-objects.a" \
|
||||
-DBUN_DEPS_OUT_DIR="$cwd/build/bun-deps" \
|
||||
-DCMAKE_BUILD_TYPE="$CMAKE_BUILD_TYPE" \
|
||||
-DCPU_TARGET="$CPU_TARGET" \
|
||||
-DUSE_LTO="$USE_LTO" \
|
||||
-DUSE_DEBUG_JSC="$USE_DEBUG_JSC" \
|
||||
-DCANARY="$CANARY" \
|
||||
-DGIT_SHA="$GIT_SHA"
|
||||
run_command ninja -v -j "$CPUS"
|
||||
run_command ls
|
||||
|
||||
tag="bun-$BUILDKITE_GROUP_KEY"
|
||||
if [ "$USE_LTO" == "OFF" ]; then
|
||||
# Remove OS check when LTO is enabled on macOS again
|
||||
if [[ "$tag" == *"darwin"* ]]; then
|
||||
tag="$tag-nolto"
|
||||
fi
|
||||
fi
|
||||
|
||||
for name in bun bun-profile; do
|
||||
dir="$tag"
|
||||
if [ "$name" == "bun-profile" ]; then
|
||||
dir="$tag-profile"
|
||||
fi
|
||||
run_command chmod +x "$name"
|
||||
run_command "./$name" --revision
|
||||
run_command mkdir -p "$dir"
|
||||
run_command mv "$name" "$dir/$name"
|
||||
run_command zip -r "$dir.zip" "$dir"
|
||||
source "$cwd/.buildkite/scripts/upload-artifact.sh" "$dir.zip"
|
||||
# temporary disable this so CI can run
|
||||
# this is failing because $name is now in $dir/$name and if changed to $dir/$name we get ENOENT reading "bun:internal-for-testing"
|
||||
# if [ "$name" == "bun-profile" ]; then
|
||||
# export BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING="1"
|
||||
# run_command "./$name" -e "require('fs').writeFileSync('./features.json', JSON.stringify(require('bun:internal-for-testing').crash_handler.getFeatureData()))"
|
||||
# source "$cwd/.buildkite/scripts/upload-artifact.sh" "features.json"
|
||||
# fi
|
||||
done
|
||||
37
.buildkite/scripts/build-cpp.sh
Executable file
37
.buildkite/scripts/build-cpp.sh
Executable file
@@ -0,0 +1,37 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
export FORCE_UPDATE_SUBMODULES=1
|
||||
|
||||
# env.sh calls update_submodules.sh
|
||||
source "$(dirname "$0")/env.sh"
|
||||
|
||||
{ set +x; } 2>/dev/null
|
||||
|
||||
function run_command() {
|
||||
set -x
|
||||
"$@"
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
mkdir -p build
|
||||
cd build
|
||||
mkdir -p tmp_modules tmp_functions js codegen
|
||||
|
||||
run_command cmake .. "${CMAKE_FLAGS[@]}" \
|
||||
-GNinja \
|
||||
-DBUN_CPP_ONLY="1" \
|
||||
-DNO_CONFIGURE_DEPENDS="1" \
|
||||
-DCMAKE_BUILD_TYPE="$CMAKE_BUILD_TYPE" \
|
||||
-DCPU_TARGET="$CPU_TARGET" \
|
||||
-DUSE_LTO="$USE_LTO" \
|
||||
-DUSE_DEBUG_JSC="$USE_DEBUG_JSC" \
|
||||
-DCANARY="$CANARY" \
|
||||
-DGIT_SHA="$GIT_SHA"
|
||||
|
||||
chmod +x compile-cpp-only.sh
|
||||
source compile-cpp-only.sh -v -j "$CPUS"
|
||||
{ set +x; } 2>/dev/null
|
||||
|
||||
cd ..
|
||||
source "$(dirname "$0")/upload-artifact.sh" "build/bun-cpp-objects.a" --split
|
||||
22
.buildkite/scripts/build-deps.sh
Executable file
22
.buildkite/scripts/build-deps.sh
Executable file
@@ -0,0 +1,22 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
source "$(dirname "$0")/env.sh"
|
||||
source "$(realpath $(dirname "$0")/../../scripts/all-dependencies.sh)"
|
||||
|
||||
artifacts=(
|
||||
libcrypto.a libssl.a libdecrepit.a
|
||||
libcares.a
|
||||
libarchive.a
|
||||
liblolhtml.a
|
||||
libmimalloc.a libmimalloc.o
|
||||
libtcc.a
|
||||
libz.a
|
||||
libzstd.a
|
||||
libdeflate.a
|
||||
liblshpack.a
|
||||
)
|
||||
|
||||
for artifact in "${artifacts[@]}"; do
|
||||
source "$(dirname "$0")/upload-artifact.sh" "build/bun-deps/$artifact"
|
||||
done
|
||||
40
.buildkite/scripts/build-old-js.sh
Executable file
40
.buildkite/scripts/build-old-js.sh
Executable file
@@ -0,0 +1,40 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
source "$(dirname "$0")/env.sh"
|
||||
|
||||
function assert_bun() {
|
||||
if ! command -v bun &>/dev/null; then
|
||||
echo "error: bun is not installed" 1>&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_make() {
|
||||
if ! command -v make &>/dev/null; then
|
||||
echo "error: make is not installed" 1>&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function run_command() {
|
||||
set -x
|
||||
"$@"
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
function build_node_fallbacks() {
|
||||
local cwd="src/node-fallbacks"
|
||||
run_command bun install --cwd "$cwd" --frozen-lockfile
|
||||
run_command bun run --cwd "$cwd" build
|
||||
}
|
||||
|
||||
function build_old_js() {
|
||||
run_command bun install --frozen-lockfile
|
||||
run_command make runtime_js fallback_decoder bun_error
|
||||
}
|
||||
|
||||
assert_bun
|
||||
assert_make
|
||||
build_node_fallbacks
|
||||
build_old_js
|
||||
101
.buildkite/scripts/build-zig.sh
Executable file
101
.buildkite/scripts/build-zig.sh
Executable file
@@ -0,0 +1,101 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
export CMAKE_FLAGS=""
|
||||
source "$(dirname "$0")/env.sh"
|
||||
|
||||
if [[ -n "$CMAKE_FLAGS" ]]; then
|
||||
echo "CMAKE_FLAGS should not be empty"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
function assert_target() {
|
||||
local arch="${2-$(uname -m)}"
|
||||
case "$(echo "$arch" | tr '[:upper:]' '[:lower:]')" in
|
||||
x64 | x86_64 | amd64)
|
||||
export ZIG_ARCH="x86_64"
|
||||
if [[ "$BUILDKITE_STEP_KEY" == *"baseline"* ]]; then
|
||||
export ZIG_CPU_TARGET="nehalem"
|
||||
else
|
||||
export ZIG_CPU_TARGET="haswell"
|
||||
fi
|
||||
;;
|
||||
aarch64 | arm64)
|
||||
export ZIG_ARCH="aarch64"
|
||||
export ZIG_CPU_TARGET="native"
|
||||
;;
|
||||
*)
|
||||
echo "error: Unsupported architecture: $arch" 1>&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
local os="${1-$(uname -s)}"
|
||||
case "$(echo "$os" | tr '[:upper:]' '[:lower:]')" in
|
||||
linux)
|
||||
export ZIG_OS="linux"
|
||||
export ZIG_TARGET="$ZIG_ARCH-linux-gnu"
|
||||
;;
|
||||
darwin)
|
||||
export ZIG_OS="macos"
|
||||
export ZIG_TARGET="$ZIG_ARCH-macos-none"
|
||||
;;
|
||||
windows)
|
||||
export ZIG_OS="windows"
|
||||
export ZIG_TARGET="$ZIG_ARCH-windows-msvc"
|
||||
;;
|
||||
*)
|
||||
echo "error: Unsupported operating system: $os" 1>&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
function run_command() {
|
||||
set -x
|
||||
"$@"
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
assert_target "$@"
|
||||
|
||||
# Since the zig build depends on files from the zig submodule,
|
||||
# make sure to update the submodule before building.
|
||||
run_command git submodule update --init --recursive --progress --depth=1 --checkout src/deps/zig
|
||||
|
||||
# TODO: Move these to be part of the CMake build
|
||||
source "$(dirname "$0")/build-old-js.sh"
|
||||
|
||||
cwd="$(pwd)"
|
||||
mkdir -p build
|
||||
cd build
|
||||
|
||||
# in buildkite this script to compile for windows is run on a macos machine
|
||||
# so the cmake windows detection for this logic is not ran
|
||||
ZIG_OPTIMIZE="ReleaseFast"
|
||||
if [[ "$ZIG_OS" == "windows" ]]; then
|
||||
ZIG_OPTIMIZE="ReleaseSafe"
|
||||
fi
|
||||
|
||||
run_command cmake .. "${CMAKE_FLAGS[@]}" \
|
||||
-GNinja \
|
||||
-DNO_CONFIGURE_DEPENDS="1" \
|
||||
-DNO_CODEGEN="0" \
|
||||
-DWEBKIT_DIR="omit" \
|
||||
-DBUN_ZIG_OBJ_DIR="$cwd/build" \
|
||||
-DZIG_LIB_DIR="$cwd/src/deps/zig/lib" \
|
||||
-DCMAKE_BUILD_TYPE="$CMAKE_BUILD_TYPE" \
|
||||
-DARCH="$ZIG_ARCH" \
|
||||
-DCPU_TARGET="$ZIG_CPU_TARGET" \
|
||||
-DZIG_TARGET="$ZIG_TARGET" \
|
||||
-DUSE_LTO="$USE_LTO" \
|
||||
-DUSE_DEBUG_JSC="$USE_DEBUG_JSC" \
|
||||
-DCANARY="$CANARY" \
|
||||
-DZIG_OPTIMIZE="$ZIG_OPTIMIZE" \
|
||||
-DGIT_SHA="$GIT_SHA"
|
||||
|
||||
export ONLY_ZIG="1"
|
||||
run_command ninja "$cwd/build/bun-zig.o" -v -j "$CPUS"
|
||||
|
||||
cd ..
|
||||
source "$(dirname "$0")/upload-artifact.sh" "build/bun-zig.o"
|
||||
47
.buildkite/scripts/download-artifact.ps1
Executable file
47
.buildkite/scripts/download-artifact.ps1
Executable file
@@ -0,0 +1,47 @@
|
||||
param (
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string[]] $Paths,
|
||||
[switch] $Split
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
function Assert-Buildkite-Agent() {
|
||||
if (-not (Get-Command "buildkite-agent" -ErrorAction SilentlyContinue)) {
|
||||
Write-Error "Cannot find buildkite-agent, please install it: https://buildkite.com/docs/agent/v3/install"
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
function Assert-Join-File() {
|
||||
if (-not (Get-Command "Join-File" -ErrorAction SilentlyContinue)) {
|
||||
Write-Error "Cannot find Join-File, please install it: https://www.powershellgallery.com/packages/FileSplitter/1.3"
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
function Download-Buildkite-Artifact() {
|
||||
param (
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string] $Path,
|
||||
)
|
||||
if ($Split) {
|
||||
& buildkite-agent artifact download "$Path.*" --debug --debug-http
|
||||
Join-File -Path "$(Resolve-Path .)\$Path" -Verbose -DeletePartFiles
|
||||
} else {
|
||||
& buildkite-agent artifact download "$Path" --debug --debug-http
|
||||
}
|
||||
if (-not (Test-Path $Path)) {
|
||||
Write-Error "Could not find artifact: $Path"
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
Assert-Buildkite-Agent
|
||||
if ($Split) {
|
||||
Assert-Join-File
|
||||
}
|
||||
|
||||
foreach ($Path in $Paths) {
|
||||
Download-Buildkite-Artifact $Path
|
||||
}
|
||||
59
.buildkite/scripts/download-artifact.sh
Executable file
59
.buildkite/scripts/download-artifact.sh
Executable file
@@ -0,0 +1,59 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
function assert_buildkite_agent() {
|
||||
if ! command -v buildkite-agent &>/dev/null; then
|
||||
echo "error: Cannot find buildkite-agent, please install it:"
|
||||
echo "https://buildkite.com/docs/agent/v3/install"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function download_buildkite_artifact() {
|
||||
# Check if at least one argument is provided
|
||||
if [ $# -eq 0 ]; then
|
||||
echo "error: No path provided for artifact download"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
local path="$1"
|
||||
shift
|
||||
local split="0"
|
||||
local args=()
|
||||
|
||||
while [ $# -gt 0 ]; do
|
||||
case "$1" in
|
||||
--split)
|
||||
split="1"
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
args+=("$1")
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ "$split" == "1" ]; then
|
||||
run_command buildkite-agent artifact download "$path.*" . "${args[@]:-}"
|
||||
run_command cat "$path".?? >"$path"
|
||||
run_command rm -f "$path".??
|
||||
else
|
||||
run_command buildkite-agent artifact download "$path" . "${args[@]:-}"
|
||||
fi
|
||||
|
||||
if [[ "$path" != *"*"* ]] && [ ! -f "$path" ]; then
|
||||
echo "error: Could not find artifact: $path"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function run_command() {
|
||||
set -x
|
||||
"$@"
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
assert_buildkite_agent
|
||||
download_buildkite_artifact "$@"
|
||||
146
.buildkite/scripts/env.sh
Executable file
146
.buildkite/scripts/env.sh
Executable file
@@ -0,0 +1,146 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
BUILDKITE_REPO=${BUILDKITE_REPO:-}
|
||||
BUILDKITE_CLEAN_CHECKOUT=${BUILDKITE_CLEAN_CHECKOUT:-}
|
||||
BUILDKITE_BRANCH=${BUILDKITE_BRANCH:-}
|
||||
CCACHE_DIR=${CCACHE_DIR:-}
|
||||
SCCACHE_DIR=${SCCACHE_DIR:-}
|
||||
ZIG_LOCAL_CACHE_DIR=${ZIG_LOCAL_CACHE_DIR:-}
|
||||
ZIG_GLOBAL_CACHE_DIR=${ZIG_GLOBAL_CACHE_DIR:-}
|
||||
BUN_DEPS_CACHE_DIR=${BUN_DEPS_CACHE_DIR:-}
|
||||
BUN_DEPS_CACHE_DIR=${BUN_DEPS_CACHE_DIR:-}
|
||||
BUILDKITE_STEP_KEY=${BUILDKITE_STEP_KEY:-}
|
||||
|
||||
ROOT_DIR="$(realpath "$(dirname "$0")/../../")"
|
||||
|
||||
# Fail if we cannot find the root directory
|
||||
if [ ! -d "$ROOT_DIR" ]; then
|
||||
echo "error: Cannot find root directory: '$ROOT_DIR'" 1>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
function assert_os() {
|
||||
local os="$(uname -s)"
|
||||
case "$os" in
|
||||
Linux)
|
||||
echo "linux"
|
||||
;;
|
||||
Darwin)
|
||||
echo "darwin"
|
||||
;;
|
||||
*)
|
||||
echo "error: Unsupported operating system: $os" 1>&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
function assert_arch() {
|
||||
local arch="$(uname -m)"
|
||||
case "$arch" in
|
||||
aarch64 | arm64)
|
||||
echo "aarch64"
|
||||
;;
|
||||
x86_64 | amd64)
|
||||
echo "x64"
|
||||
;;
|
||||
*)
|
||||
echo "error: Unknown architecture: $arch" 1>&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
function assert_build() {
|
||||
if [ -z "$BUILDKITE_REPO" ]; then
|
||||
echo "error: Cannot find repository for this build"
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "$BUILDKITE_COMMIT" ]; then
|
||||
echo "error: Cannot find commit for this build"
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "$BUILDKITE_STEP_KEY" ]; then
|
||||
echo "error: Cannot find step key for this build"
|
||||
exit 1
|
||||
fi
|
||||
if [ -n "$BUILDKITE_GROUP_KEY" ] && [[ "$BUILDKITE_STEP_KEY" != "$BUILDKITE_GROUP_KEY"* ]]; then
|
||||
echo "error: Build step '$BUILDKITE_STEP_KEY' does not start with group key '$BUILDKITE_GROUP_KEY'"
|
||||
exit 1
|
||||
fi
|
||||
# Skip os and arch checks for Zig, since it's cross-compiled on macOS
|
||||
if [[ "$BUILDKITE_STEP_KEY" != *"zig"* ]]; then
|
||||
local os="$(assert_os)"
|
||||
if [[ "$BUILDKITE_STEP_KEY" != *"$os"* ]]; then
|
||||
echo "error: Build step '$BUILDKITE_STEP_KEY' does not match operating system '$os'"
|
||||
exit 1
|
||||
fi
|
||||
local arch="$(assert_arch)"
|
||||
if [[ "$BUILDKITE_STEP_KEY" != *"$arch"* ]]; then
|
||||
echo "error: Build step '$BUILDKITE_STEP_KEY' does not match architecture '$arch'"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_buildkite_agent() {
|
||||
if (! command -v buildkite-agent &>/dev/null); then
|
||||
echo "error: Cannot find buildkite-agent, please install it:"
|
||||
echo "https://buildkite.com/docs/agent/v3/install"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function export_environment() {
|
||||
source "${ROOT_DIR}/scripts/env.sh"
|
||||
source "${ROOT_DIR}/scripts/update-submodules.sh"
|
||||
|
||||
{ set +x; } 2>/dev/null
|
||||
export GIT_SHA="$BUILDKITE_COMMIT"
|
||||
if [ "$BUILDKITE_CLEAN_CHECKOUT" == "true" ] || [ "$BUILDKITE_BRANCH" == "main" ]; then
|
||||
local tmpdir="$(mktemp -d 2>/dev/null || mktemp -d -t 'new')"
|
||||
export CCACHE_DIR="$tmpdir/.cache/ccache"
|
||||
export SCCACHE_DIR="$tmpdir/.cache/sccache"
|
||||
export ZIG_LOCAL_CACHE_DIR="$tmpdir/.cache/zig-cache"
|
||||
export ZIG_GLOBAL_CACHE_DIR="$tmpdir/.cache/zig-cache"
|
||||
export BUN_DEPS_CACHE_DIR="$tmpdir/.cache/bun-deps"
|
||||
export CCACHE_RECACHE="1"
|
||||
else
|
||||
export CCACHE_DIR="$HOME/.cache/ccache/$BUILDKITE_STEP_KEY"
|
||||
export SCCACHE_DIR="$HOME/.cache/sccache/$BUILDKITE_STEP_KEY"
|
||||
export ZIG_LOCAL_CACHE_DIR="$HOME/.cache/zig-cache/$BUILDKITE_STEP_KEY"
|
||||
export ZIG_GLOBAL_CACHE_DIR="$HOME/.cache/zig-cache/$BUILDKITE_STEP_KEY"
|
||||
export BUN_DEPS_CACHE_DIR="$HOME/.cache/bun-deps/$BUILDKITE_STEP_KEY"
|
||||
fi
|
||||
if [ "$(assert_os)" == "linux" ]; then
|
||||
export USE_LTO="ON"
|
||||
fi
|
||||
if [ "$(assert_arch)" == "aarch64" ]; then
|
||||
export CPU_TARGET="native"
|
||||
elif [[ "$BUILDKITE_STEP_KEY" == *"baseline"* ]]; then
|
||||
export CPU_TARGET="nehalem"
|
||||
else
|
||||
export CPU_TARGET="haswell"
|
||||
fi
|
||||
if $(buildkite-agent meta-data exists release &>/dev/null); then
|
||||
export CMAKE_BUILD_TYPE="$(buildkite-agent meta-data get release)"
|
||||
else
|
||||
export CMAKE_BUILD_TYPE="Release"
|
||||
fi
|
||||
if $(buildkite-agent meta-data exists canary &>/dev/null); then
|
||||
export CANARY="$(buildkite-agent meta-data get canary)"
|
||||
else
|
||||
export CANARY="1"
|
||||
fi
|
||||
if $(buildkite-agent meta-data exists assertions &>/dev/null); then
|
||||
export USE_DEBUG_JSC="$(buildkite-agent meta-data get assertions)"
|
||||
else
|
||||
export USE_DEBUG_JSC="OFF"
|
||||
fi
|
||||
}
|
||||
|
||||
assert_build
|
||||
assert_buildkite_agent
|
||||
export_environment
|
||||
97
.buildkite/scripts/prepare-build.sh
Executable file
97
.buildkite/scripts/prepare-build.sh
Executable file
@@ -0,0 +1,97 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
|
||||
function assert_build() {
|
||||
if [ -z "$BUILDKITE_REPO" ]; then
|
||||
echo "error: Cannot find repository for this build"
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "$BUILDKITE_COMMIT" ]; then
|
||||
echo "error: Cannot find commit for this build"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_buildkite_agent() {
|
||||
if ! command -v buildkite-agent &> /dev/null; then
|
||||
echo "error: Cannot find buildkite-agent, please install it:"
|
||||
echo "https://buildkite.com/docs/agent/v3/install"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_jq() {
|
||||
assert_command "jq" "jq" "https://stedolan.github.io/jq/"
|
||||
}
|
||||
|
||||
function assert_curl() {
|
||||
assert_command "curl" "curl" "https://curl.se/download.html"
|
||||
}
|
||||
|
||||
function assert_command() {
|
||||
local command="$1"
|
||||
local package="$2"
|
||||
local help_url="$3"
|
||||
if ! command -v "$command" &> /dev/null; then
|
||||
echo "warning: $command is not installed, installing..."
|
||||
if command -v brew &> /dev/null; then
|
||||
HOMEBREW_NO_AUTO_UPDATE=1 brew install "$package"
|
||||
else
|
||||
echo "error: Cannot install $command, please install it"
|
||||
if [ -n "$help_url" ]; then
|
||||
echo ""
|
||||
echo "hint: See $help_url for help"
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_release() {
|
||||
if [ "$RELEASE" == "1" ]; then
|
||||
run_command buildkite-agent meta-data set canary "0"
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_canary() {
|
||||
local canary="$(buildkite-agent meta-data get canary 2>/dev/null)"
|
||||
if [ -z "$canary" ]; then
|
||||
local repo=$(echo "$BUILDKITE_REPO" | sed -E 's#https://github.com/([^/]+)/([^/]+).git#\1/\2#g')
|
||||
local tag="$(curl -sL "https://api.github.com/repos/$repo/releases/latest" | jq -r ".tag_name")"
|
||||
if [ "$tag" == "null" ]; then
|
||||
canary="1"
|
||||
else
|
||||
local revision=$(curl -sL "https://api.github.com/repos/$repo/compare/$tag...$BUILDKITE_COMMIT" | jq -r ".ahead_by")
|
||||
if [ "$revision" == "null" ]; then
|
||||
canary="1"
|
||||
else
|
||||
canary="$revision"
|
||||
fi
|
||||
fi
|
||||
run_command buildkite-agent meta-data set canary "$canary"
|
||||
fi
|
||||
}
|
||||
|
||||
function upload_buildkite_pipeline() {
|
||||
local path="$1"
|
||||
if [ ! -f "$path" ]; then
|
||||
echo "error: Cannot find pipeline: $path"
|
||||
exit 1
|
||||
fi
|
||||
run_command buildkite-agent pipeline upload "$path"
|
||||
}
|
||||
|
||||
function run_command() {
|
||||
set -x
|
||||
"$@"
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
assert_build
|
||||
assert_buildkite_agent
|
||||
assert_jq
|
||||
assert_curl
|
||||
assert_release
|
||||
assert_canary
|
||||
upload_buildkite_pipeline ".buildkite/ci.yml"
|
||||
47
.buildkite/scripts/upload-artifact.ps1
Executable file
47
.buildkite/scripts/upload-artifact.ps1
Executable file
@@ -0,0 +1,47 @@
|
||||
param (
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string[]] $Paths,
|
||||
[switch] $Split
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
function Assert-Buildkite-Agent() {
|
||||
if (-not (Get-Command "buildkite-agent" -ErrorAction SilentlyContinue)) {
|
||||
Write-Error "Cannot find buildkite-agent, please install it: https://buildkite.com/docs/agent/v3/install"
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
function Assert-Split-File() {
|
||||
if (-not (Get-Command "Split-File" -ErrorAction SilentlyContinue)) {
|
||||
Write-Error "Cannot find Split-File, please install it: https://www.powershellgallery.com/packages/FileSplitter/1.3"
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
function Upload-Buildkite-Artifact() {
|
||||
param (
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string] $Path,
|
||||
)
|
||||
if (-not (Test-Path $Path)) {
|
||||
Write-Error "Could not find artifact: $Path"
|
||||
exit 1
|
||||
}
|
||||
if ($Split) {
|
||||
Remove-Item -Path "$Path.*" -Force
|
||||
Split-File -Path (Resolve-Path $Path) -PartSizeBytes "50MB" -Verbose
|
||||
$Path = "$Path.*"
|
||||
}
|
||||
& buildkite-agent artifact upload "$Path" --debug --debug-http
|
||||
}
|
||||
|
||||
Assert-Buildkite-Agent
|
||||
if ($Split) {
|
||||
Assert-Split-File
|
||||
}
|
||||
|
||||
foreach ($Path in $Paths) {
|
||||
Upload-Buildkite-Artifact $Path
|
||||
}
|
||||
71
.buildkite/scripts/upload-artifact.sh
Executable file
71
.buildkite/scripts/upload-artifact.sh
Executable file
@@ -0,0 +1,71 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
function assert_buildkite_agent() {
|
||||
if ! command -v buildkite-agent &>/dev/null; then
|
||||
echo "error: Cannot find buildkite-agent, please install it:"
|
||||
echo "https://buildkite.com/docs/agent/v3/install"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_split() {
|
||||
if ! command -v split &>/dev/null; then
|
||||
echo "error: Cannot find split, please install it:"
|
||||
echo "https://www.gnu.org/software/coreutils/split"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function upload_buildkite_artifact() {
|
||||
if [ -z "${1:-}" ]; then
|
||||
return
|
||||
fi
|
||||
|
||||
local path="$1"
|
||||
shift
|
||||
local split="0"
|
||||
local args=() # Initialize args as an empty array
|
||||
while true; do
|
||||
if [ -z "${1:-}" ]; then
|
||||
break
|
||||
fi
|
||||
case "$1" in
|
||||
--split)
|
||||
split="1"
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
args+=("$1")
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
done
|
||||
if [ ! -f "$path" ]; then
|
||||
echo "error: Could not find artifact: $path"
|
||||
exit 1
|
||||
fi
|
||||
if [ "$split" == "1" ]; then
|
||||
run_command rm -f "$path."*
|
||||
run_command split -b 50MB -d "$path" "$path."
|
||||
if [ "${args[@]:-}" != "" ]; then
|
||||
run_command buildkite-agent artifact upload "$path.*" "${args[@]}"
|
||||
else
|
||||
run_command buildkite-agent artifact upload "$path.*"
|
||||
fi
|
||||
elif [ "${args[@]:-}" != "" ]; then
|
||||
run_command buildkite-agent artifact upload "$path" "${args[@]:-}"
|
||||
else
|
||||
run_command buildkite-agent artifact upload "$path"
|
||||
fi
|
||||
}
|
||||
|
||||
function run_command() {
|
||||
set -x
|
||||
"$@"
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
assert_buildkite_agent
|
||||
upload_buildkite_artifact "$@"
|
||||
@@ -3,7 +3,19 @@
|
||||
set -eo pipefail
|
||||
|
||||
function assert_main() {
|
||||
if [[ "$BUILDKITE_PULL_REQUEST_REPO" && "$BUILDKITE_REPO" != "$BUILDKITE_PULL_REQUEST_REPO" ]]; then
|
||||
if [ "$RELEASE" == "1" ]; then
|
||||
echo "info: Skipping canary release because this is a release build"
|
||||
exit 0
|
||||
fi
|
||||
if [ -z "$BUILDKITE_REPO" ]; then
|
||||
echo "error: Cannot find repository for this build"
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "$BUILDKITE_COMMIT" ]; then
|
||||
echo "error: Cannot find commit for this build"
|
||||
exit 1
|
||||
fi
|
||||
if [ -n "$BUILDKITE_PULL_REQUEST_REPO" ] && [ "$BUILDKITE_REPO" != "$BUILDKITE_PULL_REQUEST_REPO" ]; then
|
||||
echo "error: Cannot upload release from a fork"
|
||||
exit 1
|
||||
fi
|
||||
@@ -18,77 +30,191 @@ function assert_main() {
|
||||
}
|
||||
|
||||
function assert_buildkite_agent() {
|
||||
if ! command -v buildkite-agent &> /dev/null; then
|
||||
if ! command -v "buildkite-agent" &> /dev/null; then
|
||||
echo "error: Cannot find buildkite-agent, please install it:"
|
||||
echo "https://buildkite.com/docs/agent/v3/install"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_gh() {
|
||||
if ! command -v gh &> /dev/null; then
|
||||
echo "warning: gh is not installed, installing..."
|
||||
function assert_github() {
|
||||
assert_command "gh" "gh" "https://github.com/cli/cli#installation"
|
||||
assert_buildkite_secret "GITHUB_TOKEN"
|
||||
# gh expects the token in $GH_TOKEN
|
||||
export GH_TOKEN="$GITHUB_TOKEN"
|
||||
}
|
||||
|
||||
function assert_aws() {
|
||||
assert_command "aws" "awscli" "https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html"
|
||||
for secret in "AWS_ACCESS_KEY_ID" "AWS_SECRET_ACCESS_KEY" "AWS_ENDPOINT"; do
|
||||
assert_buildkite_secret "$secret"
|
||||
done
|
||||
assert_buildkite_secret "AWS_BUCKET" --skip-redaction
|
||||
}
|
||||
|
||||
function assert_sentry() {
|
||||
assert_command "sentry-cli" "getsentry/tools/sentry-cli" "https://docs.sentry.io/cli/installation/"
|
||||
for secret in "SENTRY_AUTH_TOKEN" "SENTRY_ORG" "SENTRY_PROJECT"; do
|
||||
assert_buildkite_secret "$secret"
|
||||
done
|
||||
}
|
||||
|
||||
function run_command() {
|
||||
set -x
|
||||
"$@"
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
function assert_command() {
|
||||
local command="$1"
|
||||
local package="$2"
|
||||
local help_url="$3"
|
||||
if ! command -v "$command" &> /dev/null; then
|
||||
echo "warning: $command is not installed, installing..."
|
||||
if command -v brew &> /dev/null; then
|
||||
brew install gh
|
||||
HOMEBREW_NO_AUTO_UPDATE=1 run_command brew install "$package"
|
||||
else
|
||||
echo "error: Cannot install gh, please install it:"
|
||||
echo "https://github.com/cli/cli#installation"
|
||||
echo "error: Cannot install $command, please install it"
|
||||
if [ -n "$help_url" ]; then
|
||||
echo ""
|
||||
echo "hint: See $help_url for help"
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_gh_token() {
|
||||
local token=$(buildkite-agent secret get GITHUB_TOKEN)
|
||||
if [ -z "$token" ]; then
|
||||
echo "error: Cannot find GITHUB_TOKEN secret"
|
||||
function assert_buildkite_secret() {
|
||||
local key="$1"
|
||||
local value=$(buildkite-agent secret get "$key" ${@:2})
|
||||
if [ -z "$value" ]; then
|
||||
echo "error: Cannot find $key secret"
|
||||
echo ""
|
||||
echo "hint: Create a secret named GITHUB_TOKEN with a GitHub access token:"
|
||||
echo "hint: Create a secret named $key with a value:"
|
||||
echo "https://buildkite.com/docs/pipelines/buildkite-secrets"
|
||||
exit 1
|
||||
fi
|
||||
export GH_TOKEN="$token"
|
||||
export "$key"="$value"
|
||||
}
|
||||
|
||||
function download_artifact() {
|
||||
local name=$1
|
||||
buildkite-agent artifact download "$name" .
|
||||
if [ ! -f "$name" ]; then
|
||||
function release_tag() {
|
||||
local version="$1"
|
||||
if [ "$version" == "canary" ]; then
|
||||
echo "canary"
|
||||
else
|
||||
echo "bun-v$version"
|
||||
fi
|
||||
}
|
||||
|
||||
function create_sentry_release() {
|
||||
local version="$1"
|
||||
local release="$version"
|
||||
if [ "$version" == "canary" ]; then
|
||||
release="$BUILDKITE_COMMIT-canary"
|
||||
fi
|
||||
run_command sentry-cli releases new "$release" --finalize
|
||||
run_command sentry-cli releases set-commits "$release" --auto --ignore-missing
|
||||
if [ "$version" == "canary" ]; then
|
||||
run_command sentry-cli deploys new --env="canary" --release="$release"
|
||||
fi
|
||||
}
|
||||
|
||||
function download_buildkite_artifact() {
|
||||
local name="$1"
|
||||
local dir="$2"
|
||||
if [ -z "$dir" ]; then
|
||||
dir="."
|
||||
fi
|
||||
run_command buildkite-agent artifact download "$name" "$dir"
|
||||
if [ ! -f "$dir/$name" ]; then
|
||||
echo "error: Cannot find Buildkite artifact: $name"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function upload_assets() {
|
||||
local tag=$1
|
||||
local files=${@:2}
|
||||
gh release upload "$tag" $files --clobber --repo "$BUILDKITE_REPO"
|
||||
function upload_github_asset() {
|
||||
local version="$1"
|
||||
local tag="$(release_tag "$version")"
|
||||
local file="$2"
|
||||
run_command gh release upload "$tag" "$file" --clobber --repo "$BUILDKITE_REPO"
|
||||
|
||||
# Sometimes the upload fails, maybe this is a race condition in the gh CLI?
|
||||
while [ "$(gh release view "$tag" --repo "$BUILDKITE_REPO" | grep -c "$file")" -eq 0 ]; do
|
||||
echo "warn: Uploading $file to $tag failed, retrying..."
|
||||
sleep "$((RANDOM % 5 + 1))"
|
||||
run_command gh release upload "$tag" "$file" --clobber --repo "$BUILDKITE_REPO"
|
||||
done
|
||||
}
|
||||
|
||||
assert_main
|
||||
assert_buildkite_agent
|
||||
assert_gh
|
||||
assert_gh_token
|
||||
function update_github_release() {
|
||||
local version="$1"
|
||||
local tag="$(release_tag "$version")"
|
||||
if [ "$tag" == "canary" ]; then
|
||||
sleep 5 # There is possibly a race condition where this overwrites artifacts?
|
||||
run_command gh release edit "$tag" --repo "$BUILDKITE_REPO" \
|
||||
--notes "This release of Bun corresponds to the commit: $BUILDKITE_COMMIT"
|
||||
fi
|
||||
}
|
||||
|
||||
declare artifacts=(
|
||||
bun-darwin-aarch64.zip
|
||||
bun-darwin-aarch64-profile.zip
|
||||
bun-darwin-x64.zip
|
||||
bun-darwin-x64-profile.zip
|
||||
bun-linux-aarch64.zip
|
||||
bun-linux-aarch64-profile.zip
|
||||
bun-linux-x64.zip
|
||||
bun-linux-x64-profile.zip
|
||||
bun-linux-x64-baseline.zip
|
||||
bun-linux-x64-baseline-profile.zip
|
||||
bun-windows-x64.zip
|
||||
bun-windows-x64-profile.zip
|
||||
bun-windows-x64-baseline.zip
|
||||
bun-windows-x64-baseline-profile.zip
|
||||
)
|
||||
function upload_s3_file() {
|
||||
local folder="$1"
|
||||
local file="$2"
|
||||
run_command aws --endpoint-url="$AWS_ENDPOINT" s3 cp "$file" "s3://$AWS_BUCKET/$folder/$file"
|
||||
}
|
||||
|
||||
for artifact in "${artifacts[@]}"; do
|
||||
download_artifact $artifact
|
||||
done
|
||||
function create_release() {
|
||||
assert_main
|
||||
assert_buildkite_agent
|
||||
assert_github
|
||||
assert_aws
|
||||
assert_sentry
|
||||
|
||||
upload_assets "canary" "${artifacts[@]}"
|
||||
local tag="$1" # 'canary' or 'x.y.z'
|
||||
local artifacts=(
|
||||
bun-darwin-aarch64.zip
|
||||
bun-darwin-aarch64-profile.zip
|
||||
bun-darwin-x64.zip
|
||||
bun-darwin-x64-profile.zip
|
||||
bun-linux-aarch64.zip
|
||||
bun-linux-aarch64-profile.zip
|
||||
bun-linux-x64.zip
|
||||
bun-linux-x64-profile.zip
|
||||
bun-linux-x64-baseline.zip
|
||||
bun-linux-x64-baseline-profile.zip
|
||||
bun-windows-x64.zip
|
||||
bun-windows-x64-profile.zip
|
||||
bun-windows-x64-baseline.zip
|
||||
bun-windows-x64-baseline-profile.zip
|
||||
)
|
||||
|
||||
function upload_artifact() {
|
||||
local artifact="$1"
|
||||
download_buildkite_artifact "$artifact"
|
||||
if [ "$tag" == "canary" ]; then
|
||||
upload_s3_file "releases/$BUILDKITE_COMMIT-canary" "$artifact" &
|
||||
else
|
||||
upload_s3_file "releases/$BUILDKITE_COMMIT" "$artifact" &
|
||||
fi
|
||||
upload_s3_file "releases/$tag" "$artifact" &
|
||||
upload_github_asset "$tag" "$artifact" &
|
||||
wait
|
||||
}
|
||||
|
||||
for artifact in "${artifacts[@]}"; do
|
||||
upload_artifact "$artifact"
|
||||
done
|
||||
|
||||
update_github_release "$tag"
|
||||
create_sentry_release "$tag"
|
||||
}
|
||||
|
||||
function assert_canary() {
|
||||
local canary="$(buildkite-agent meta-data get canary 2>/dev/null)"
|
||||
if [ -z "$canary" ] || [ "$canary" == "0" ]; then
|
||||
echo "warn: Skipping release because this is not a canary build"
|
||||
exit 0
|
||||
fi
|
||||
}
|
||||
|
||||
assert_canary
|
||||
create_release "canary"
|
||||
|
||||
3
.gitattributes
vendored
3
.gitattributes
vendored
@@ -45,3 +45,6 @@ examples/**/* linguist-documentation
|
||||
|
||||
src/deps/*.c linguist-vendored
|
||||
src/deps/brotli/** linguist-vendored
|
||||
|
||||
test/js/node/test/fixtures linguist-vendored
|
||||
test/js/node/test/common linguist-vendored
|
||||
|
||||
2
.github/actions/setup-bun/action.yml
vendored
2
.github/actions/setup-bun/action.yml
vendored
@@ -42,7 +42,7 @@ runs:
|
||||
canary) release="canary";;
|
||||
*) release="bun-v${{ inputs.bun-version }}";;
|
||||
esac
|
||||
curl -LO "${{ inputs.download-url }}/${release}/${target}.zip"
|
||||
curl -LO "${{ inputs.download-url }}/${release}/${target}.zip" --retry 5
|
||||
unzip ${target}.zip
|
||||
mkdir -p ${{ runner.temp }}/.bun/bin
|
||||
mv ${target}/bun* ${{ runner.temp }}/.bun/bin/
|
||||
|
||||
286
.github/workflows/build-darwin.yml
vendored
286
.github/workflows/build-darwin.yml
vendored
@@ -1,286 +0,0 @@
|
||||
name: Build Darwin
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
actions: write
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runs-on:
|
||||
type: string
|
||||
default: macos-13-large
|
||||
tag:
|
||||
type: string
|
||||
required: true
|
||||
arch:
|
||||
type: string
|
||||
required: true
|
||||
cpu:
|
||||
type: string
|
||||
required: true
|
||||
assertions:
|
||||
type: boolean
|
||||
canary:
|
||||
type: boolean
|
||||
no-cache:
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
LLVM_VERSION: 18
|
||||
BUN_VERSION: 1.1.8
|
||||
LC_CTYPE: "en_US.UTF-8"
|
||||
LC_ALL: "en_US.UTF-8"
|
||||
# LTO is disabled because we cannot use lld on macOS currently
|
||||
BUN_ENABLE_LTO: "0"
|
||||
|
||||
jobs:
|
||||
build-submodules:
|
||||
name: Build Submodules
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
.gitmodules
|
||||
src/deps
|
||||
scripts
|
||||
- name: Hash Submodules
|
||||
id: hash
|
||||
run: |
|
||||
print_versions() {
|
||||
git submodule | grep -v WebKit
|
||||
echo "LLVM_VERSION=${{ env.LLVM_VERSION }}"
|
||||
cat $(echo scripts/build*.sh scripts/all-dependencies.sh | tr " " "\n" | sort)
|
||||
}
|
||||
echo "hash=$(print_versions | shasum)" >> $GITHUB_OUTPUT
|
||||
- name: Install Dependencies
|
||||
env:
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
run: |
|
||||
brew install \
|
||||
llvm@${{ env.LLVM_VERSION }} \
|
||||
ccache \
|
||||
rust \
|
||||
pkg-config \
|
||||
coreutils \
|
||||
libtool \
|
||||
cmake \
|
||||
libiconv \
|
||||
automake \
|
||||
openssl@1.1 \
|
||||
ninja \
|
||||
golang \
|
||||
gnu-sed --force --overwrite
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@$LLVM_VERSION
|
||||
- name: Clone Submodules
|
||||
run: |
|
||||
./scripts/update-submodules.sh
|
||||
- name: Build Submodules
|
||||
env:
|
||||
CPU_TARGET: ${{ inputs.cpu }}
|
||||
BUN_DEPS_OUT_DIR: ${{ runner.temp }}/bun-deps
|
||||
run: |
|
||||
mkdir -p $BUN_DEPS_OUT_DIR
|
||||
./scripts/all-dependencies.sh
|
||||
- name: Upload bun-${{ inputs.tag }}-deps
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-deps
|
||||
path: ${{ runner.temp }}/bun-deps
|
||||
if-no-files-found: error
|
||||
build-cpp:
|
||||
name: Build C++
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
# TODO: Figure out how to cache homebrew dependencies
|
||||
- name: Install Dependencies
|
||||
env:
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
run: |
|
||||
brew install \
|
||||
llvm@${{ env.LLVM_VERSION }} \
|
||||
ccache \
|
||||
rust \
|
||||
pkg-config \
|
||||
coreutils \
|
||||
libtool \
|
||||
cmake \
|
||||
libiconv \
|
||||
automake \
|
||||
openssl@1.1 \
|
||||
ninja \
|
||||
golang \
|
||||
gnu-sed --force --overwrite
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@$LLVM_VERSION
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- name: Compile
|
||||
env:
|
||||
CPU_TARGET: ${{ inputs.cpu }}
|
||||
SOURCE_DIR: ${{ github.workspace }}
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{ runner.temp }}/bun-deps
|
||||
CCACHE_DIR: ${{ runner.temp }}/ccache
|
||||
run: |
|
||||
mkdir -p $OBJ_DIR
|
||||
cd $OBJ_DIR
|
||||
cmake -S $SOURCE_DIR -B $OBJ_DIR \
|
||||
-G Ninja \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DUSE_LTO=ON \
|
||||
-DBUN_CPP_ONLY=1 \
|
||||
-DNO_CONFIGURE_DEPENDS=1
|
||||
chmod +x compile-cpp-only.sh
|
||||
./compile-cpp-only.sh -v
|
||||
- name: Upload bun-${{ inputs.tag }}-cpp
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-cpp
|
||||
path: ${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a
|
||||
if-no-files-found: error
|
||||
build-zig:
|
||||
name: Build Zig
|
||||
uses: ./.github/workflows/build-zig.yml
|
||||
with:
|
||||
os: darwin
|
||||
only-zig: true
|
||||
tag: ${{ inputs.tag }}
|
||||
arch: ${{ inputs.arch }}
|
||||
cpu: ${{ inputs.cpu }}
|
||||
assertions: ${{ inputs.assertions }}
|
||||
canary: ${{ inputs.canary }}
|
||||
no-cache: ${{ inputs.no-cache }}
|
||||
link:
|
||||
name: Link
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
needs:
|
||||
- build-submodules
|
||||
- build-cpp
|
||||
- build-zig
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
# TODO: Figure out how to cache homebrew dependencies
|
||||
- name: Install Dependencies
|
||||
env:
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
run: |
|
||||
brew install \
|
||||
llvm@${{ env.LLVM_VERSION }} \
|
||||
ccache \
|
||||
rust \
|
||||
pkg-config \
|
||||
coreutils \
|
||||
libtool \
|
||||
cmake \
|
||||
libiconv \
|
||||
automake \
|
||||
openssl@1.1 \
|
||||
ninja \
|
||||
golang \
|
||||
gnu-sed --force --overwrite
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@$LLVM_VERSION
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- name: Download bun-${{ inputs.tag }}-deps
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-deps
|
||||
path: ${{ runner.temp }}/bun-deps
|
||||
- name: Download bun-${{ inputs.tag }}-cpp
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-cpp
|
||||
path: ${{ runner.temp }}/bun-cpp-obj
|
||||
- name: Download bun-${{ inputs.tag }}-zig
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-zig
|
||||
path: ${{ runner.temp }}/release
|
||||
- name: Link
|
||||
env:
|
||||
CPU_TARGET: ${{ inputs.cpu }}
|
||||
run: |
|
||||
SRC_DIR=$PWD
|
||||
mkdir ${{ runner.temp }}/link-build
|
||||
cd ${{ runner.temp }}/link-build
|
||||
cmake $SRC_DIR \
|
||||
-G Ninja \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DUSE_LTO=ON \
|
||||
-DBUN_LINK_ONLY=1 \
|
||||
-DBUN_ZIG_OBJ_DIR="${{ runner.temp }}/release" \
|
||||
-DBUN_CPP_ARCHIVE="${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a" \
|
||||
-DBUN_DEPS_OUT_DIR="${{ runner.temp }}/bun-deps" \
|
||||
-DNO_CONFIGURE_DEPENDS=1
|
||||
ninja -v
|
||||
- name: Prepare
|
||||
run: |
|
||||
cd ${{ runner.temp }}/link-build
|
||||
chmod +x bun-profile bun
|
||||
mkdir -p bun-${{ inputs.tag }}-profile/ bun-${{ inputs.tag }}/
|
||||
mv bun-profile bun-${{ inputs.tag }}-profile/bun-profile
|
||||
if [ -f bun-profile.dSYM || -d bun-profile.dSYM ]; then
|
||||
mv bun-profile.dSYM bun-${{ inputs.tag }}-profile/bun-profile.dSYM
|
||||
fi
|
||||
if [ -f bun.dSYM || -d bun.dSYM ]; then
|
||||
mv bun.dSYM bun-${{ inputs.tag }}-profile/bun-profile.dSYM
|
||||
fi
|
||||
mv bun bun-${{ inputs.tag }}/bun
|
||||
zip -r bun-${{ inputs.tag }}-profile.zip bun-${{ inputs.tag }}-profile
|
||||
zip -r bun-${{ inputs.tag }}.zip bun-${{ inputs.tag }}
|
||||
- name: Upload bun-${{ inputs.tag }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}
|
||||
path: ${{ runner.temp }}/link-build/bun-${{ inputs.tag }}.zip
|
||||
if-no-files-found: error
|
||||
- name: Upload bun-${{ inputs.tag }}-profile
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-profile
|
||||
path: ${{ runner.temp }}/link-build/bun-${{ inputs.tag }}-profile.zip
|
||||
if-no-files-found: error
|
||||
on-failure:
|
||||
if: ${{ github.repository_owner == 'oven-sh' && failure() }}
|
||||
name: On Failure
|
||||
needs: link
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Send Message
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
nodetail: true
|
||||
color: "#FF0000"
|
||||
title: ""
|
||||
description: |
|
||||
### ❌ [${{ github.event.pull_request.title }}](${{ github.event.pull_request.html_url }})
|
||||
|
||||
@${{ github.actor }}, the build for bun-${{ inputs.tag }} failed.
|
||||
|
||||
**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})**
|
||||
64
.github/workflows/build-linux.yml
vendored
64
.github/workflows/build-linux.yml
vendored
@@ -1,64 +0,0 @@
|
||||
name: Build Linux
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
actions: write
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runs-on:
|
||||
type: string
|
||||
required: true
|
||||
tag:
|
||||
type: string
|
||||
required: true
|
||||
arch:
|
||||
type: string
|
||||
required: true
|
||||
cpu:
|
||||
type: string
|
||||
required: true
|
||||
assertions:
|
||||
type: boolean
|
||||
zig-optimize:
|
||||
type: string
|
||||
canary:
|
||||
type: boolean
|
||||
no-cache:
|
||||
type: boolean
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build Linux
|
||||
uses: ./.github/workflows/build-zig.yml
|
||||
with:
|
||||
os: linux
|
||||
only-zig: false
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
tag: ${{ inputs.tag }}
|
||||
arch: ${{ inputs.arch }}
|
||||
cpu: ${{ inputs.cpu }}
|
||||
assertions: ${{ inputs.assertions }}
|
||||
zig-optimize: ${{ inputs.zig-optimize }}
|
||||
canary: ${{ inputs.canary }}
|
||||
no-cache: ${{ inputs.no-cache }}
|
||||
on-failure:
|
||||
if: ${{ github.repository_owner == 'oven-sh' && failure() }}
|
||||
name: On Failure
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Send Message
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
nodetail: true
|
||||
color: "#FF0000"
|
||||
title: ""
|
||||
description: |
|
||||
### ❌ [${{ github.event.pull_request.title }}](${{ github.event.pull_request.html_url }})
|
||||
|
||||
@${{ github.actor }}, the build for bun-${{ inputs.tag }} failed.
|
||||
|
||||
**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})**
|
||||
348
.github/workflows/build-windows.yml
vendored
348
.github/workflows/build-windows.yml
vendored
@@ -1,348 +0,0 @@
|
||||
name: Build Windows
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
actions: write
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runs-on:
|
||||
type: string
|
||||
default: windows
|
||||
tag:
|
||||
type: string
|
||||
required: true
|
||||
arch:
|
||||
type: string
|
||||
required: true
|
||||
cpu:
|
||||
type: string
|
||||
required: true
|
||||
assertions:
|
||||
type: boolean
|
||||
canary:
|
||||
type: boolean
|
||||
no-cache:
|
||||
type: boolean
|
||||
bun-version:
|
||||
type: string
|
||||
default: 1.1.7
|
||||
|
||||
env:
|
||||
# Must specify exact version of LLVM for Windows
|
||||
LLVM_VERSION: 18.1.8
|
||||
BUN_VERSION: ${{ inputs.bun-version }}
|
||||
BUN_GARBAGE_COLLECTOR_LEVEL: 1
|
||||
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: 1
|
||||
CI: true
|
||||
USE_LTO: 1
|
||||
|
||||
jobs:
|
||||
build-submodules:
|
||||
name: Build Submodules
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
steps:
|
||||
- name: Install Scoop
|
||||
run: |
|
||||
Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
|
||||
Invoke-RestMethod -Uri https://get.scoop.sh | Invoke-Expression
|
||||
Join-Path (Resolve-Path ~).Path "scoop\shims" >> $Env:GITHUB_PATH
|
||||
- name: Setup Git
|
||||
run: |
|
||||
git config --global core.autocrlf false
|
||||
git config --global core.eol lf
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
.gitmodules
|
||||
src/deps
|
||||
scripts
|
||||
- name: Hash Submodules
|
||||
id: hash
|
||||
run: |
|
||||
$data = "$(& {
|
||||
git submodule | Where-Object { $_ -notmatch 'WebKit' }
|
||||
echo "LLVM_VERSION=${{ env.LLVM_VERSION }}"
|
||||
Get-Content -Path (Get-ChildItem -Path 'scripts/build*.ps1', 'scripts/all-dependencies.ps1', 'scripts/env.ps1' | Sort-Object -Property Name).FullName | Out-String
|
||||
echo 1
|
||||
})"
|
||||
$hash = ( -join ((New-Object -TypeName System.Security.Cryptography.SHA1CryptoServiceProvider).ComputeHash([System.Text.Encoding]::UTF8.GetBytes($data)) | ForEach-Object { $_.ToString("x2") } )).Substring(0, 10)
|
||||
echo "hash=${hash}" >> $env:GITHUB_OUTPUT
|
||||
- if: ${{ !inputs.no-cache }}
|
||||
name: Restore Cache
|
||||
id: cache
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: bun-deps
|
||||
key: bun-${{ inputs.tag }}-deps-${{ steps.hash.outputs.hash }}
|
||||
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
name: Install LLVM and Ninja
|
||||
run: |
|
||||
scoop install ninja
|
||||
scoop install llvm@${{ env.LLVM_VERSION }}
|
||||
scoop install nasm@2.16.01
|
||||
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
name: Clone Submodules
|
||||
run: |
|
||||
.\scripts\update-submodules.ps1
|
||||
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
name: Build Dependencies
|
||||
env:
|
||||
CPU_TARGET: ${{ inputs.cpu }}
|
||||
CCACHE_DIR: ccache
|
||||
USE_LTO: 1
|
||||
run: |
|
||||
.\scripts\env.ps1 ${{ contains(inputs.tag, '-baseline') && '-Baseline' || '' }}
|
||||
$env:BUN_DEPS_OUT_DIR = (mkdir -Force "./bun-deps")
|
||||
.\scripts\all-dependencies.ps1
|
||||
- name: Save Cache
|
||||
if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: bun-deps
|
||||
key: ${{ steps.cache.outputs.cache-primary-key }}
|
||||
- name: Upload bun-${{ inputs.tag }}-deps
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-deps
|
||||
path: bun-deps
|
||||
if-no-files-found: error
|
||||
codegen:
|
||||
name: Codegen
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Setup Git
|
||||
run: |
|
||||
git config --global core.autocrlf false
|
||||
git config --global core.eol lf
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ inputs.bun-version }}
|
||||
- name: Codegen
|
||||
run: |
|
||||
./scripts/cross-compile-codegen.sh win32 x64
|
||||
- if: ${{ inputs.canary }}
|
||||
name: Calculate Revision
|
||||
run: |
|
||||
echo "canary_revision=$(GITHUB_TOKEN="${{ github.token }}"
|
||||
bash ./scripts/calculate-canary-revision.sh --raw)" > build-codegen-win32-x64/.canary_revision
|
||||
- name: Upload bun-${{ inputs.tag }}-codegen
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-codegen
|
||||
path: build-codegen-win32-x64
|
||||
if-no-files-found: error
|
||||
build-cpp:
|
||||
name: Build C++
|
||||
needs: codegen
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
steps:
|
||||
- name: Install Scoop
|
||||
run: |
|
||||
Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
|
||||
Invoke-RestMethod -Uri https://get.scoop.sh | Invoke-Expression
|
||||
Join-Path (Resolve-Path ~).Path "scoop\shims" >> $Env:GITHUB_PATH
|
||||
- name: Setup Git
|
||||
run: |
|
||||
git config --global core.autocrlf false
|
||||
git config --global core.eol lf
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install LLVM and Ninja
|
||||
run: |
|
||||
scoop install ninja
|
||||
scoop install llvm@${{ env.LLVM_VERSION }}
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ inputs.bun-version }}
|
||||
- if: ${{ !inputs.no-cache }}
|
||||
name: Restore Cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ccache
|
||||
key: bun-${{ inputs.tag }}-cpp-${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }}
|
||||
restore-keys: |
|
||||
bun-${{ inputs.tag }}-cpp-
|
||||
- name: Download bun-${{ inputs.tag }}-codegen
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-codegen
|
||||
path: build
|
||||
- name: Compile
|
||||
env:
|
||||
CPU_TARGET: ${{ inputs.cpu }}
|
||||
CCACHE_DIR: ccache
|
||||
USE_LTO: 1
|
||||
run: |
|
||||
# $CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
|
||||
$CANARY_REVISION = 0
|
||||
.\scripts\env.ps1 ${{ contains(inputs.tag, '-baseline') && '-Baseline' || '' }}
|
||||
.\scripts\update-submodules.ps1
|
||||
.\scripts\build-libuv.ps1 -CloneOnly $True
|
||||
cd build
|
||||
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release `
|
||||
-DNO_CODEGEN=1 `
|
||||
-DUSE_LTO=1 `
|
||||
-DNO_CONFIGURE_DEPENDS=1 `
|
||||
"-DCANARY=${CANARY_REVISION}" `
|
||||
-DBUN_CPP_ONLY=1 ${{ contains(inputs.tag, '-baseline') && '-DUSE_BASELINE_BUILD=1' || '' }}
|
||||
if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" }
|
||||
.\compile-cpp-only.ps1 -v
|
||||
if ($LASTEXITCODE -ne 0) { throw "C++ compilation failed" }
|
||||
- name: Upload bun-${{ inputs.tag }}-cpp
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-cpp
|
||||
path: build/bun-cpp-objects.a
|
||||
if-no-files-found: error
|
||||
build-zig:
|
||||
name: Build Zig
|
||||
uses: ./.github/workflows/build-zig.yml
|
||||
with:
|
||||
os: windows
|
||||
zig-optimize: ReleaseSafe
|
||||
only-zig: true
|
||||
tag: ${{ inputs.tag }}
|
||||
arch: ${{ inputs.arch }}
|
||||
cpu: ${{ inputs.cpu }}
|
||||
assertions: ${{ inputs.assertions }}
|
||||
canary: ${{ inputs.canary }}
|
||||
no-cache: ${{ inputs.no-cache }}
|
||||
link:
|
||||
name: Link
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
needs:
|
||||
- build-submodules
|
||||
- build-cpp
|
||||
- build-zig
|
||||
- codegen
|
||||
steps:
|
||||
- name: Install Scoop
|
||||
run: |
|
||||
Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
|
||||
Invoke-RestMethod -Uri https://get.scoop.sh | Invoke-Expression
|
||||
Join-Path (Resolve-Path ~).Path "scoop\shims" >> $Env:GITHUB_PATH
|
||||
- name: Setup Git
|
||||
run: |
|
||||
git config --global core.autocrlf false
|
||||
git config --global core.eol lf
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install Ninja
|
||||
run: |
|
||||
scoop install ninja
|
||||
scoop install llvm@${{ env.LLVM_VERSION }}
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ inputs.bun-version }}
|
||||
- name: Download bun-${{ inputs.tag }}-deps
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-deps
|
||||
path: bun-deps
|
||||
- name: Download bun-${{ inputs.tag }}-cpp
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-cpp
|
||||
path: bun-cpp
|
||||
- name: Download bun-${{ inputs.tag }}-zig
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-zig
|
||||
path: bun-zig
|
||||
- name: Download bun-${{ inputs.tag }}-codegen
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-codegen
|
||||
path: build
|
||||
- if: ${{ !inputs.no-cache }}
|
||||
name: Restore Cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ccache
|
||||
key: bun-${{ inputs.tag }}-cpp-${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }}
|
||||
restore-keys: |
|
||||
bun-${{ inputs.tag }}-cpp-
|
||||
- name: Link
|
||||
env:
|
||||
CPU_TARGET: ${{ inputs.cpu }}
|
||||
CCACHE_DIR: ccache
|
||||
run: |
|
||||
.\scripts\update-submodules.ps1
|
||||
.\scripts\env.ps1 ${{ contains(inputs.tag, '-baseline') && '-Baseline' || '' }}
|
||||
Set-Location build
|
||||
# $CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
|
||||
$CANARY_REVISION = 0
|
||||
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release `
|
||||
-DNO_CODEGEN=1 `
|
||||
-DNO_CONFIGURE_DEPENDS=1 `
|
||||
"-DCANARY=${CANARY_REVISION}" `
|
||||
-DBUN_LINK_ONLY=1 `
|
||||
-DUSE_LTO=1 `
|
||||
"-DBUN_DEPS_OUT_DIR=$(Resolve-Path ../bun-deps)" `
|
||||
"-DBUN_CPP_ARCHIVE=$(Resolve-Path ../bun-cpp/bun-cpp-objects.a)" `
|
||||
"-DBUN_ZIG_OBJ_DIR=$(Resolve-Path ../bun-zig)" `
|
||||
${{ contains(inputs.tag, '-baseline') && '-DUSE_BASELINE_BUILD=1' || '' }}
|
||||
if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" }
|
||||
ninja -v
|
||||
if ($LASTEXITCODE -ne 0) { throw "Link failed!" }
|
||||
- name: Prepare
|
||||
run: |
|
||||
$Dist = mkdir -Force "bun-${{ inputs.tag }}"
|
||||
cp -r build\bun.exe "$Dist\bun.exe"
|
||||
Compress-Archive -Force "$Dist" "${Dist}.zip"
|
||||
$Dist = "$Dist-profile"
|
||||
MkDir -Force "$Dist"
|
||||
cp -r build\bun.exe "$Dist\bun.exe"
|
||||
cp -r build\bun.pdb "$Dist\bun.pdb"
|
||||
Compress-Archive -Force "$Dist" "$Dist.zip"
|
||||
.\build\bun.exe --print "JSON.stringify(require('bun:internal-for-testing').crash_handler.getFeatureData())" > .\features.json
|
||||
- name: Upload bun-${{ inputs.tag }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}
|
||||
path: bun-${{ inputs.tag }}.zip
|
||||
if-no-files-found: error
|
||||
- name: Upload bun-${{ inputs.tag }}-profile
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-profile
|
||||
path: bun-${{ inputs.tag }}-profile.zip
|
||||
if-no-files-found: error
|
||||
- name: Upload bun-feature-data
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-feature-data
|
||||
path: features.json
|
||||
if-no-files-found: error
|
||||
overwrite: true
|
||||
on-failure:
|
||||
if: ${{ github.repository_owner == 'oven-sh' && failure() }}
|
||||
name: On Failure
|
||||
needs: link
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Send Message
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
nodetail: true
|
||||
color: "#FF0000"
|
||||
title: ""
|
||||
description: |
|
||||
### ❌ [${{ github.event.pull_request.title }}](${{ github.event.pull_request.html_url }})
|
||||
|
||||
@${{ github.actor }}, the build for bun-${{ inputs.tag }} failed.
|
||||
|
||||
**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})**
|
||||
122
.github/workflows/build-zig.yml
vendored
122
.github/workflows/build-zig.yml
vendored
@@ -1,122 +0,0 @@
|
||||
name: Build Zig
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
actions: write
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runs-on:
|
||||
type: string
|
||||
default: ${{ github.repository_owner != 'oven-sh' && 'ubuntu-latest' || inputs.only-zig && 'namespace-profile-bun-ci-linux-x64' || inputs.arch == 'x64' && 'namespace-profile-bun-ci-linux-x64' || 'namespace-profile-bun-ci-linux-aarch64' }}
|
||||
tag:
|
||||
type: string
|
||||
required: true
|
||||
os:
|
||||
type: string
|
||||
required: true
|
||||
arch:
|
||||
type: string
|
||||
required: true
|
||||
cpu:
|
||||
type: string
|
||||
required: true
|
||||
assertions:
|
||||
type: boolean
|
||||
default: false
|
||||
zig-optimize:
|
||||
type: string # 'ReleaseSafe' or 'ReleaseFast'
|
||||
default: ReleaseFast
|
||||
canary:
|
||||
type: boolean
|
||||
default: ${{ github.ref == 'refs/heads/main' }}
|
||||
only-zig:
|
||||
type: boolean
|
||||
default: true
|
||||
no-cache:
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
jobs:
|
||||
build-zig:
|
||||
name: ${{ inputs.only-zig && 'Build Zig' || 'Build & Link' }}
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Calculate Cache Key
|
||||
id: cache
|
||||
run: |
|
||||
echo "key=${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }}-canary-${{inputs.canary}}" >> $GITHUB_OUTPUT
|
||||
- if: ${{ !inputs.no-cache }}
|
||||
name: Restore Cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
key: bun-${{ inputs.tag }}-docker-${{ steps.cache.outputs.key }}
|
||||
restore-keys: |
|
||||
bun-${{ inputs.tag }}-docker-
|
||||
path: |
|
||||
${{ runner.temp }}/dockercache
|
||||
- name: Setup Docker
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
install: true
|
||||
platforms: |
|
||||
linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
|
||||
- name: Build
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
push: false
|
||||
target: ${{ inputs.only-zig && 'build_release_obj' || 'artifact' }}
|
||||
cache-from: |
|
||||
type=local,src=${{ runner.temp }}/dockercache
|
||||
cache-to: |
|
||||
type=local,dest=${{ runner.temp }}/dockercache,mode=max
|
||||
outputs: |
|
||||
type=local,dest=${{ runner.temp }}/release
|
||||
platforms: |
|
||||
linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
|
||||
build-args: |
|
||||
GIT_SHA=${{ github.event.workflow_run.head_sha || github.sha }}
|
||||
TRIPLET=${{ inputs.os == 'darwin' && format('{0}-macos-none', inputs.arch == 'x64' && 'x86_64' || 'aarch64') || inputs.os == 'windows' && format('{0}-windows-msvc', inputs.arch == 'x64' && 'x86_64' || 'aarch64') || format('{0}-linux-gnu', inputs.arch == 'x64' && 'x86_64' || 'aarch64') }}
|
||||
ARCH=${{ inputs.arch == 'x64' && 'x86_64' || 'aarch64' }}
|
||||
BUILDARCH=${{ inputs.arch == 'x64' && 'amd64' || 'arm64' }}
|
||||
BUILD_MACHINE_ARCH=${{ inputs.arch == 'x64' && 'x86_64' || 'aarch64' }}
|
||||
CPU_TARGET=${{ inputs.arch == 'x64' && inputs.cpu || 'native' }}
|
||||
ASSERTIONS=${{ inputs.assertions && 'ON' || 'OFF' }}
|
||||
ZIG_OPTIMIZE=${{ inputs.zig-optimize }}
|
||||
CANARY=${{ inputs.canary && '1' || '0' }}
|
||||
- if: ${{ inputs.only-zig }}
|
||||
name: Upload bun-${{ inputs.tag }}-zig
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-zig
|
||||
path: ${{ runner.temp }}/release/bun-zig.o
|
||||
if-no-files-found: error
|
||||
- if: ${{ !inputs.only-zig }}
|
||||
name: Prepare
|
||||
run: |
|
||||
cd ${{ runner.temp }}/release
|
||||
chmod +x bun-profile bun
|
||||
mkdir bun-${{ inputs.tag }}-profile
|
||||
mkdir bun-${{ inputs.tag }}
|
||||
strip bun
|
||||
mv bun-profile bun-${{ inputs.tag }}-profile/bun-profile
|
||||
mv bun bun-${{ inputs.tag }}/bun
|
||||
zip -r bun-${{ inputs.tag }}-profile.zip bun-${{ inputs.tag }}-profile
|
||||
zip -r bun-${{ inputs.tag }}.zip bun-${{ inputs.tag }}
|
||||
- if: ${{ !inputs.only-zig }}
|
||||
name: Upload bun-${{ inputs.tag }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}
|
||||
path: ${{ runner.temp }}/release/bun-${{ inputs.tag }}.zip
|
||||
if-no-files-found: error
|
||||
- if: ${{ !inputs.only-zig }}
|
||||
name: Upload bun-${{ inputs.tag }}-profile
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-profile
|
||||
path: ${{ runner.temp }}/release/bun-${{ inputs.tag }}-profile.zip
|
||||
if-no-files-found: error
|
||||
245
.github/workflows/ci.yml
vendored
245
.github/workflows/ci.yml
vendored
@@ -1,245 +0,0 @@
|
||||
name: CI
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
actions: write
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name == 'workflow_dispatch' && inputs.run-id || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
run-id:
|
||||
type: string
|
||||
description: The workflow ID to download artifacts (skips the build step)
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- .vscode/**/*
|
||||
- docs/**/*
|
||||
- examples/**/*
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths-ignore:
|
||||
- .vscode/**/*
|
||||
- docs/**/*
|
||||
- examples/**/*
|
||||
|
||||
jobs:
|
||||
format:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Format
|
||||
uses: ./.github/workflows/run-format.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
zig-version: 0.13.0
|
||||
permissions:
|
||||
contents: write
|
||||
lint:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Lint
|
||||
uses: ./.github/workflows/run-lint.yml
|
||||
secrets: inherit
|
||||
linux-x64:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Build linux-x64
|
||||
uses: ./.github/workflows/build-linux.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
|
||||
tag: linux-x64
|
||||
arch: x64
|
||||
cpu: haswell
|
||||
canary: true
|
||||
no-cache: true
|
||||
linux-x64-baseline:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Build linux-x64-baseline
|
||||
uses: ./.github/workflows/build-linux.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
|
||||
tag: linux-x64-baseline
|
||||
arch: x64
|
||||
cpu: nehalem
|
||||
canary: true
|
||||
no-cache: true
|
||||
linux-aarch64:
|
||||
if: ${{ !inputs.run-id && github.repository_owner == 'oven-sh' }}
|
||||
name: Build linux-aarch64
|
||||
uses: ./.github/workflows/build-linux.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: namespace-profile-bun-ci-linux-aarch64
|
||||
tag: linux-aarch64
|
||||
arch: aarch64
|
||||
cpu: native
|
||||
canary: true
|
||||
no-cache: true
|
||||
darwin-x64:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Build darwin-x64
|
||||
uses: ./.github/workflows/build-darwin.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-large' || 'macos-13' }}
|
||||
tag: darwin-x64
|
||||
arch: x64
|
||||
cpu: haswell
|
||||
canary: true
|
||||
darwin-x64-baseline:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Build darwin-x64-baseline
|
||||
uses: ./.github/workflows/build-darwin.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-large' || 'macos-13' }}
|
||||
tag: darwin-x64-baseline
|
||||
arch: x64
|
||||
cpu: nehalem
|
||||
canary: true
|
||||
darwin-aarch64:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Build darwin-aarch64
|
||||
uses: ./.github/workflows/build-darwin.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-13' }}
|
||||
tag: darwin-aarch64
|
||||
arch: aarch64
|
||||
cpu: native
|
||||
canary: true
|
||||
windows-x64:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Build windows-x64
|
||||
uses: ./.github/workflows/build-windows.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: windows
|
||||
tag: windows-x64
|
||||
arch: x64
|
||||
cpu: haswell
|
||||
canary: true
|
||||
windows-x64-baseline:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Build windows-x64-baseline
|
||||
uses: ./.github/workflows/build-windows.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: windows
|
||||
tag: windows-x64-baseline
|
||||
arch: x64
|
||||
cpu: nehalem
|
||||
canary: true
|
||||
linux-x64-test:
|
||||
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
|
||||
name: Test linux-x64
|
||||
needs: linux-x64
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
|
||||
tag: linux-x64
|
||||
linux-x64-baseline-test:
|
||||
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
|
||||
name: Test linux-x64-baseline
|
||||
needs: linux-x64-baseline
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
|
||||
tag: linux-x64-baseline
|
||||
linux-aarch64-test:
|
||||
if: ${{ inputs.run-id || github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'}}
|
||||
name: Test linux-aarch64
|
||||
needs: linux-aarch64
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: namespace-profile-bun-ci-linux-aarch64
|
||||
tag: linux-aarch64
|
||||
darwin-x64-test:
|
||||
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
|
||||
name: Test darwin-x64
|
||||
needs: darwin-x64
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-large' || 'macos-13' }}
|
||||
tag: darwin-x64
|
||||
darwin-x64-baseline-test:
|
||||
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
|
||||
name: Test darwin-x64-baseline
|
||||
needs: darwin-x64-baseline
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-large' || 'macos-13' }}
|
||||
tag: darwin-x64-baseline
|
||||
darwin-aarch64-test:
|
||||
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
|
||||
name: Test darwin-aarch64
|
||||
needs: darwin-aarch64
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-13' }}
|
||||
tag: darwin-aarch64
|
||||
windows-x64-test:
|
||||
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
|
||||
name: Test windows-x64
|
||||
needs: windows-x64
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: windows
|
||||
tag: windows-x64
|
||||
windows-x64-baseline-test:
|
||||
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
|
||||
name: Test windows-x64-baseline
|
||||
needs: windows-x64-baseline
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: windows
|
||||
tag: windows-x64-baseline
|
||||
cleanup:
|
||||
if: ${{ always() }}
|
||||
name: Cleanup
|
||||
needs:
|
||||
- linux-x64
|
||||
- linux-x64-baseline
|
||||
- linux-aarch64
|
||||
- darwin-x64
|
||||
- darwin-x64-baseline
|
||||
- darwin-aarch64
|
||||
- windows-x64
|
||||
- windows-x64-baseline
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Cleanup Artifacts
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
with:
|
||||
name: |
|
||||
bun-*-cpp
|
||||
bun-*-zig
|
||||
bun-*-deps
|
||||
bun-*-codegen
|
||||
55
.github/workflows/comment.yml
vendored
55
.github/workflows/comment.yml
vendored
@@ -1,55 +0,0 @@
|
||||
name: Comment
|
||||
|
||||
permissions:
|
||||
actions: read
|
||||
pull-requests: write
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows:
|
||||
- CI
|
||||
types:
|
||||
- completed
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
name: Comment
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download Tests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: bun
|
||||
pattern: bun-*-tests
|
||||
github-token: ${{ github.token }}
|
||||
run-id: ${{ github.event.workflow_run.id }}
|
||||
- name: Setup Environment
|
||||
id: env
|
||||
shell: bash
|
||||
run: |
|
||||
echo "pr-number=$(<bun/bun-linux-x64-tests/pr-number.txt)" >> $GITHUB_OUTPUT
|
||||
- name: Generate Comment
|
||||
run: |
|
||||
cat bun/bun-*-tests/comment.md > comment.md
|
||||
if [ -s comment.md ]; then
|
||||
echo -e "❌ @${{ github.actor }}, your commit has failing tests :(\n\n$(cat comment.md)" > comment.md
|
||||
else
|
||||
echo -e "✅ @${{ github.actor }}, all tests passed!" > comment.md
|
||||
fi
|
||||
echo -e "\n**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.event.workflow_run.id }})**" >> comment.md
|
||||
echo -e "<!-- generated-comment workflow=${{ github.workflow }} -->" >> comment.md
|
||||
- name: Find Comment
|
||||
id: comment
|
||||
uses: peter-evans/find-comment@v3
|
||||
with:
|
||||
issue-number: ${{ steps.env.outputs.pr-number }}
|
||||
comment-author: github-actions[bot]
|
||||
body-includes: <!-- generated-comment workflow=${{ github.workflow }} -->
|
||||
- name: Write Comment
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
with:
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.env.outputs.pr-number }}
|
||||
body-path: comment.md
|
||||
edit-mode: replace
|
||||
183
.github/workflows/create-release-build.yml
vendored
183
.github/workflows/create-release-build.yml
vendored
@@ -1,183 +0,0 @@
|
||||
name: Create Release Build
|
||||
run-name: Compile Bun v${{ inputs.version }} by ${{ github.actor }}
|
||||
|
||||
concurrency:
|
||||
group: release
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
actions: write
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
type: string
|
||||
required: true
|
||||
description: "Release version. Example: 1.1.4. Exclude the 'v' prefix."
|
||||
tag:
|
||||
type: string
|
||||
required: true
|
||||
description: "GitHub tag to use"
|
||||
clobber:
|
||||
type: boolean
|
||||
required: false
|
||||
default: false
|
||||
description: "Overwrite existing release artifacts?"
|
||||
release:
|
||||
types:
|
||||
- created
|
||||
|
||||
jobs:
|
||||
notify-start:
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
name: Notify Start
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Send Message
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK_PUBLIC }}
|
||||
nodetail: true
|
||||
color: "#1F6FEB"
|
||||
title: "Bun v${{ inputs.version }} is compiling"
|
||||
description: |
|
||||
### @${{ github.actor }} started compiling Bun v${{inputs.version}}
|
||||
- name: Send Message
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.BUN_DISCORD_GITHUB_CHANNEL_WEBHOOK }}
|
||||
nodetail: true
|
||||
color: "#1F6FEB"
|
||||
title: "Bun v${{ inputs.version }} is compiling"
|
||||
description: |
|
||||
### @${{ github.actor }} started compiling Bun v${{inputs.version}}
|
||||
|
||||
**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})**
|
||||
linux-x64:
|
||||
name: Build linux-x64
|
||||
uses: ./.github/workflows/build-linux.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
|
||||
tag: linux-x64
|
||||
arch: x64
|
||||
cpu: haswell
|
||||
canary: false
|
||||
linux-x64-baseline:
|
||||
name: Build linux-x64-baseline
|
||||
uses: ./.github/workflows/build-linux.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
|
||||
tag: linux-x64-baseline
|
||||
arch: x64
|
||||
cpu: nehalem
|
||||
canary: false
|
||||
linux-aarch64:
|
||||
name: Build linux-aarch64
|
||||
uses: ./.github/workflows/build-linux.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: namespace-profile-bun-ci-linux-aarch64
|
||||
tag: linux-aarch64
|
||||
arch: aarch64
|
||||
cpu: native
|
||||
canary: false
|
||||
darwin-x64:
|
||||
name: Build darwin-x64
|
||||
uses: ./.github/workflows/build-darwin.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-large' || 'macos-13' }}
|
||||
tag: darwin-x64
|
||||
arch: x64
|
||||
cpu: haswell
|
||||
canary: false
|
||||
darwin-x64-baseline:
|
||||
name: Build darwin-x64-baseline
|
||||
uses: ./.github/workflows/build-darwin.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-large' || 'macos-13' }}
|
||||
tag: darwin-x64-baseline
|
||||
arch: x64
|
||||
cpu: nehalem
|
||||
canary: false
|
||||
darwin-aarch64:
|
||||
name: Build darwin-aarch64
|
||||
uses: ./.github/workflows/build-darwin.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-13' }}
|
||||
tag: darwin-aarch64
|
||||
arch: aarch64
|
||||
cpu: native
|
||||
canary: false
|
||||
windows-x64:
|
||||
name: Build windows-x64
|
||||
uses: ./.github/workflows/build-windows.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: windows
|
||||
tag: windows-x64
|
||||
arch: x64
|
||||
cpu: haswell
|
||||
canary: false
|
||||
windows-x64-baseline:
|
||||
name: Build windows-x64-baseline
|
||||
uses: ./.github/workflows/build-windows.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: windows
|
||||
tag: windows-x64-baseline
|
||||
arch: x64
|
||||
cpu: nehalem
|
||||
canary: false
|
||||
|
||||
upload-artifacts:
|
||||
needs:
|
||||
- linux-x64
|
||||
- linux-x64-baseline
|
||||
- linux-aarch64
|
||||
- darwin-x64
|
||||
- darwin-x64-baseline
|
||||
- darwin-aarch64
|
||||
- windows-x64
|
||||
- windows-x64-baseline
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download Artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: bun-releases
|
||||
pattern: bun-*
|
||||
merge-multiple: true
|
||||
github-token: ${{ github.token }}
|
||||
- name: Check for Artifacts
|
||||
run: |
|
||||
if [ ! -d "bun-releases" ] || [ -z "$(ls -A bun-releases)" ]; then
|
||||
echo "Error: No artifacts were downloaded or 'bun-releases' directory does not exist."
|
||||
exit 1 # Fail the job if the condition is met
|
||||
else
|
||||
echo "Artifacts downloaded successfully."
|
||||
fi
|
||||
- name: Send Message
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
nodetail: true
|
||||
color: "#FF0000"
|
||||
title: "Bun v${{ inputs.version }} release artifacts uploaded"
|
||||
- name: "Upload Artifacts"
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
# Unzip one level deep each artifact
|
||||
cd bun-releases
|
||||
for f in *.zip; do
|
||||
unzip -o $f
|
||||
done
|
||||
cd ..
|
||||
gh release upload --repo=${{ github.repository }} ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.tag || github.event.release.id }} ${{ inputs.clobber && '--clobber' || '' }} bun-releases/*.zip
|
||||
29
.github/workflows/format.yml
vendored
Normal file
29
.github/workflows/format.yml
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
name: format
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name == 'workflow_dispatch' && inputs.run-id || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
run-id:
|
||||
type: string
|
||||
description: The workflow ID to download artifacts (skips the build step)
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
paths:
|
||||
- "**.{ts,js,json,md,toml,yml,yaml,tsx,jsx,zig,cjs,mjs}"
|
||||
|
||||
jobs:
|
||||
format:
|
||||
name: Run format
|
||||
uses: ./.github/workflows/run-format.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: write
|
||||
with:
|
||||
zig-version: 0.13.0
|
||||
36
.github/workflows/labeled.yml
vendored
36
.github/workflows/labeled.yml
vendored
@@ -7,6 +7,42 @@ on:
|
||||
types: [labeled]
|
||||
|
||||
jobs:
|
||||
# on-bug:
|
||||
# runs-on: ubuntu-latest
|
||||
# if: github.event.label.name == 'bug' || github.event.label.name == 'crash'
|
||||
# permissions:
|
||||
# issues: write
|
||||
# steps:
|
||||
# - name: Checkout
|
||||
# uses: actions/checkout@v4
|
||||
# with:
|
||||
# sparse-checkout: |
|
||||
# scripts
|
||||
# .github
|
||||
# CMakeLists.txt
|
||||
# - name: Setup Bun
|
||||
# uses: ./.github/actions/setup-bun
|
||||
# with:
|
||||
# bun-version: "1.1.24"
|
||||
# - name: "categorize bug"
|
||||
# id: add-labels
|
||||
# env:
|
||||
# GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
|
||||
# GITHUB_ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
# ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
# shell: bash
|
||||
# run: |
|
||||
# echo '{"dependencies": { "@anthropic-ai/sdk": "latest" }}' > scripts/package.json && bun install --cwd=./scripts
|
||||
# LABELS=$(bun scripts/label-issue.ts)
|
||||
# echo "labels=$LABELS" >> $GITHUB_OUTPUT
|
||||
# - name: Add labels
|
||||
# uses: actions-cool/issues-helper@v3
|
||||
# if: steps.add-labels.outputs.labels != ''
|
||||
# with:
|
||||
# actions: "add-labels"
|
||||
# token: ${{ secrets.GITHUB_TOKEN }}
|
||||
# issue-number: ${{ github.event.issue.number }}
|
||||
# labels: ${{ steps.add-labels.outputs.labels }}
|
||||
on-labeled:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.label.name == 'crash' || github.event.label.name == 'needs repro'
|
||||
|
||||
6
.github/workflows/release.yml
vendored
6
.github/workflows/release.yml
vendored
@@ -1,3 +1,6 @@
|
||||
# TODO: Move this to bash scripts intead of Github Actions
|
||||
# so it can be run from Buildkite, see: .buildkite/scripts/release.sh
|
||||
|
||||
name: Release
|
||||
concurrency: release
|
||||
|
||||
@@ -85,6 +88,9 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
# To workaround issue
|
||||
ref: main
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
|
||||
12
.github/workflows/run-format.yml
vendored
12
.github/workflows/run-format.yml
vendored
@@ -14,24 +14,27 @@ jobs:
|
||||
format:
|
||||
name: Format
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.ref != 'refs/heads/main' }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
.prettierrc-ci
|
||||
.github
|
||||
src
|
||||
scripts
|
||||
packages
|
||||
test
|
||||
bench
|
||||
package.json
|
||||
bun.lockb
|
||||
.vscode
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.1.20"
|
||||
bun-version: "1.1.25"
|
||||
- name: Setup Zig
|
||||
uses: goto-bus-stop/setup-zig@c7b6cdd3adba8f8b96984640ff172c37c93f73ee
|
||||
uses: mlugg/setup-zig@v1
|
||||
with:
|
||||
version: ${{ inputs.zig-version }}
|
||||
- name: Install Dependencies
|
||||
@@ -43,9 +46,6 @@ jobs:
|
||||
- name: Format Zig
|
||||
run: |
|
||||
bun fmt:zig
|
||||
- name: Generate submodule versions
|
||||
run: |
|
||||
bash ./scripts/write-versions.sh
|
||||
- name: Commit
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
|
||||
4
.github/workflows/run-lint-cpp.yml
vendored
4
.github/workflows/run-lint-cpp.yml
vendored
@@ -3,7 +3,7 @@ name: lint-cpp
|
||||
permissions:
|
||||
contents: read
|
||||
env:
|
||||
LLVM_VERSION: 16
|
||||
LLVM_VERSION: 18
|
||||
LC_CTYPE: "en_US.UTF-8"
|
||||
LC_ALL: "en_US.UTF-8"
|
||||
|
||||
@@ -26,7 +26,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: latest
|
||||
bun-version: 1.1.23
|
||||
- name: Install Dependencies
|
||||
env:
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
|
||||
|
||||
224
.github/workflows/run-test.yml
vendored
224
.github/workflows/run-test.yml
vendored
@@ -1,224 +0,0 @@
|
||||
name: Test
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runs-on:
|
||||
type: string
|
||||
required: true
|
||||
tag:
|
||||
type: string
|
||||
required: true
|
||||
pr-number:
|
||||
type: string
|
||||
required: true
|
||||
run-id:
|
||||
type: string
|
||||
default: ${{ github.run_id }}
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Tests
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
steps:
|
||||
- if: ${{ runner.os == 'Windows' }}
|
||||
name: Setup Git
|
||||
run: |
|
||||
git config --global core.autocrlf false
|
||||
git config --global core.eol lf
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
package.json
|
||||
bun.lockb
|
||||
test
|
||||
packages/bun-internal-test
|
||||
packages/bun-types
|
||||
- name: Setup Environment
|
||||
shell: bash
|
||||
run: |
|
||||
echo "${{ inputs.pr-number }}" > pr-number.txt
|
||||
- name: Download Bun
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}
|
||||
path: bun
|
||||
github-token: ${{ github.token }}
|
||||
run-id: ${{ inputs.run-id || github.run_id }}
|
||||
- name: Download pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 8
|
||||
- if: ${{ runner.os != 'Windows' }}
|
||||
name: Setup Bun
|
||||
shell: bash
|
||||
run: |
|
||||
unzip bun/bun-*.zip
|
||||
cd bun-*
|
||||
pwd >> $GITHUB_PATH
|
||||
- if: ${{ runner.os == 'Windows' }}
|
||||
name: Setup Cygwin
|
||||
uses: secondlife/setup-cygwin@v3
|
||||
with:
|
||||
packages: bash
|
||||
- if: ${{ runner.os == 'Windows' }}
|
||||
name: Setup Bun (Windows)
|
||||
run: |
|
||||
unzip bun/bun-*.zip
|
||||
cd bun-*
|
||||
pwd >> $env:GITHUB_PATH
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
- name: Install Dependencies
|
||||
timeout-minutes: 5
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
bun install
|
||||
- name: Install Dependencies (test)
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
bun install --cwd test
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Install Dependencies (runner)
|
||||
timeout-minutes: 5
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
bun install --cwd packages/bun-internal-test
|
||||
- name: Run Tests
|
||||
id: test
|
||||
timeout-minutes: 90
|
||||
shell: bash
|
||||
env:
|
||||
IS_BUN_CI: 1
|
||||
TMPDIR: ${{ runner.temp }}
|
||||
BUN_TAG: ${{ inputs.tag }}
|
||||
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true"
|
||||
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
|
||||
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
TEST_INFO_STRIPE: ${{ secrets.TEST_INFO_STRIPE }}
|
||||
TEST_INFO_AZURE_SERVICE_BUS: ${{ secrets.TEST_INFO_AZURE_SERVICE_BUS }}
|
||||
SHELLOPTS: igncr
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
node packages/bun-internal-test/src/runner.node.mjs $(which bun)
|
||||
- if: ${{ always() }}
|
||||
name: Upload Results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-tests
|
||||
path: |
|
||||
test-report.*
|
||||
comment.md
|
||||
pr-number.txt
|
||||
if-no-files-found: error
|
||||
overwrite: true
|
||||
- if: ${{ always() && steps.test.outputs.failing_tests != '' && github.event.pull_request && github.repository_owner == 'oven-sh' }}
|
||||
name: Send Message
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
nodetail: true
|
||||
color: "#FF0000"
|
||||
title: ""
|
||||
description: |
|
||||
### ❌ [${{ github.event.pull_request.title }}](${{ github.event.pull_request.html_url }})
|
||||
|
||||
@${{ github.actor }}, there are ${{ steps.test.outputs.failing_tests_count || 'some' }} failing tests on bun-${{ inputs.tag }}.
|
||||
|
||||
${{ steps.test.outputs.failing_tests }}
|
||||
|
||||
**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})**
|
||||
- name: Fail
|
||||
if: ${{ failure() || always() && steps.test.outputs.failing_tests != '' }}
|
||||
run: |
|
||||
echo "There are ${{ steps.test.outputs.failing_tests_count || 'some' }} failing tests on bun-${{ inputs.tag }}."
|
||||
exit 1
|
||||
test-node:
|
||||
name: Node.js Tests
|
||||
# TODO: enable when we start paying attention to the results. In the meantime, this causes CI to queue jobs wasting developer time.
|
||||
if: 0
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
steps:
|
||||
- if: ${{ runner.os == 'Windows' }}
|
||||
name: Setup Git
|
||||
run: |
|
||||
git config --global core.autocrlf false
|
||||
git config --global core.eol lf
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
test/node.js
|
||||
- name: Setup Environment
|
||||
shell: bash
|
||||
run: |
|
||||
echo "${{ inputs.pr-number }}" > pr-number.txt
|
||||
- name: Download Bun
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}
|
||||
path: bun
|
||||
github-token: ${{ github.token }}
|
||||
run-id: ${{ inputs.run-id || github.run_id }}
|
||||
- if: ${{ runner.os != 'Windows' }}
|
||||
name: Setup Bun
|
||||
shell: bash
|
||||
run: |
|
||||
unzip bun/bun-*.zip
|
||||
cd bun-*
|
||||
pwd >> $GITHUB_PATH
|
||||
- if: ${{ runner.os == 'Windows' }}
|
||||
name: Setup Cygwin
|
||||
uses: secondlife/setup-cygwin@v3
|
||||
with:
|
||||
packages: bash
|
||||
- if: ${{ runner.os == 'Windows' }}
|
||||
name: Setup Bun (Windows)
|
||||
run: |
|
||||
unzip bun/bun-*.zip
|
||||
cd bun-*
|
||||
pwd >> $env:GITHUB_PATH
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
- name: Checkout Tests
|
||||
shell: bash
|
||||
working-directory: test/node.js
|
||||
run: |
|
||||
node runner.mjs --pull
|
||||
- name: Install Dependencies
|
||||
timeout-minutes: 5
|
||||
shell: bash
|
||||
working-directory: test/node.js
|
||||
run: |
|
||||
bun install
|
||||
- name: Run Tests
|
||||
timeout-minutes: 10 # Increase when more tests are added
|
||||
shell: bash
|
||||
working-directory: test/node.js
|
||||
env:
|
||||
TMPDIR: ${{ runner.temp }}
|
||||
BUN_GARBAGE_COLLECTOR_LEVEL: "0"
|
||||
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true"
|
||||
run: |
|
||||
node runner.mjs
|
||||
- name: Upload Results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-node-tests
|
||||
path: |
|
||||
test/node.js/summary/*.json
|
||||
if-no-files-found: error
|
||||
overwrite: true
|
||||
30
.github/workflows/stale.yaml
vendored
Normal file
30
.github/workflows/stale.yaml
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
name: Close inactive issues
|
||||
on:
|
||||
# schedule:
|
||||
# - cron: "15 * * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
close-issues:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/stale@v5
|
||||
with:
|
||||
days-before-issue-close: 5
|
||||
any-of-issue-labels: "needs repro,waiting-for-author"
|
||||
exempt-issue-labels: "neverstale"
|
||||
exempt-pr-labels: "neverstale"
|
||||
remove-stale-when-updated: true
|
||||
stale-issue-label: "stale"
|
||||
stale-pr-label: "stale"
|
||||
stale-issue-message: "This issue is stale and may be closed due to inactivity. If you're still running into this, please leave a comment."
|
||||
close-issue-message: "This issue was closed because it has been inactive for 5 days since being marked as stale."
|
||||
days-before-pr-stale: 30
|
||||
days-before-pr-close: 14
|
||||
stale-pr-message: "This pull request is stale and may be closed due to inactivity."
|
||||
close-pr-message: "This pull request has been closed due to inactivity."
|
||||
repo-token: ${{ github.token }}
|
||||
operations-per-run: 1000
|
||||
94
.github/workflows/upload.yml
vendored
94
.github/workflows/upload.yml
vendored
@@ -1,94 +0,0 @@
|
||||
name: Upload Artifacts
|
||||
run-name: Canary release ${{github.sha}} upload
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows:
|
||||
- CI
|
||||
types:
|
||||
- completed
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
upload:
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
name: Upload Artifacts
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download Artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: bun
|
||||
pattern: bun-*
|
||||
merge-multiple: true
|
||||
github-token: ${{ github.token }}
|
||||
run-id: ${{ github.event.workflow_run.id }}
|
||||
- name: Check for Artifacts
|
||||
run: |
|
||||
if [ ! -d "bun" ] || [ -z "$(ls -A bun)" ]; then
|
||||
echo "Error: No artifacts were downloaded or 'bun' directory does not exist."
|
||||
exit 1 # Fail the job if the condition is met
|
||||
else
|
||||
echo "Artifacts downloaded successfully."
|
||||
fi
|
||||
- name: Upload to GitHub Releases
|
||||
uses: ncipollo/release-action@v1
|
||||
with:
|
||||
tag: canary
|
||||
name: Canary (${{ github.sha }})
|
||||
prerelease: true
|
||||
body: This canary release of Bun corresponds to the commit [${{ github.sha }}]
|
||||
allowUpdates: true
|
||||
replacesArtifacts: true
|
||||
generateReleaseNotes: true
|
||||
artifactErrorsFailBuild: true
|
||||
artifacts: bun/**/bun-*.zip
|
||||
token: ${{ github.token }}
|
||||
- name: Upload to S3 (using SHA)
|
||||
uses: shallwefootball/s3-upload-action@4350529f410221787ccf424e50133cbc1b52704e
|
||||
with:
|
||||
endpoint: ${{ secrets.AWS_ENDPOINT }}
|
||||
aws_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
|
||||
aws_bucket: ${{ secrets.AWS_BUCKET }}
|
||||
source_dir: bun
|
||||
destination_dir: releases/${{ github.event.workflow_run.head_sha || github.sha }}-canary
|
||||
- name: Upload to S3 (using tag)
|
||||
uses: shallwefootball/s3-upload-action@4350529f410221787ccf424e50133cbc1b52704e
|
||||
with:
|
||||
endpoint: ${{ secrets.AWS_ENDPOINT }}
|
||||
aws_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
|
||||
aws_bucket: ${{ secrets.AWS_BUCKET }}
|
||||
source_dir: bun
|
||||
destination_dir: releases/canary
|
||||
- name: Announce on Discord
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.BUN_DISCORD_GITHUB_CHANNEL_WEBHOOK }}
|
||||
nodetail: true
|
||||
color: "#1F6FEB"
|
||||
title: "New Bun Canary available"
|
||||
url: https://github.com/oven-sh/bun/commit/${{ github.sha }}
|
||||
description: |
|
||||
A new canary build of Bun has been automatically uploaded. To upgrade, run:
|
||||
```sh
|
||||
bun upgrade --canary
|
||||
# bun upgrade --stable <- to downgrade
|
||||
```
|
||||
# If notifying sentry fails, don't fail the rest of the build.
|
||||
- name: Notify Sentry
|
||||
uses: getsentry/action-release@v1.7.0
|
||||
env:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
|
||||
with:
|
||||
ignore_missing: true
|
||||
ignore_empty: true
|
||||
version: ${{ github.event.workflow_run.head_sha || github.sha }}-canary
|
||||
environment: canary
|
||||
@@ -1,2 +1,4 @@
|
||||
command script import src/deps/zig/tools/lldb_pretty_printers.py
|
||||
command script import src/bun.js/WebKit/Tools/lldb/lldb_webkit.py
|
||||
|
||||
# type summary add --summary-string "${var} | inner=${var[0-30]}, source=${var[33-64]}, tag=${var[31-32]}" "unsigned long"
|
||||
|
||||
@@ -5,3 +5,6 @@ test/js/deno
|
||||
test/node.js
|
||||
src/react-refresh.js
|
||||
*.min.js
|
||||
test/js/node/test/fixtures
|
||||
test/js/node/test/common
|
||||
test/snippets
|
||||
|
||||
31
.prettierrc-ci
Normal file
31
.prettierrc-ci
Normal file
@@ -0,0 +1,31 @@
|
||||
{
|
||||
"arrowParens": "avoid",
|
||||
"printWidth": 120,
|
||||
"trailingComma": "all",
|
||||
"useTabs": false,
|
||||
"quoteProps": "preserve",
|
||||
"plugins": [
|
||||
"prettier-plugin-organize-imports"
|
||||
],
|
||||
"overrides": [
|
||||
{
|
||||
"files": [
|
||||
".vscode/*.json"
|
||||
],
|
||||
"options": {
|
||||
"parser": "jsonc",
|
||||
"quoteProps": "preserve",
|
||||
"singleQuote": false,
|
||||
"trailingComma": "all"
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": [
|
||||
"*.md"
|
||||
],
|
||||
"options": {
|
||||
"printWidth": 80
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
125
.vscode/launch.json
generated
vendored
125
.vscode/launch.json
generated
vendored
@@ -17,6 +17,8 @@
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "1",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
@@ -32,7 +34,6 @@
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "1",
|
||||
"BUN_DEBUG_FileReader": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
@@ -53,6 +54,7 @@
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
@@ -67,6 +69,7 @@
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "0",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
@@ -81,6 +84,7 @@
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
@@ -95,6 +99,7 @@
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
@@ -109,6 +114,7 @@
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
"BUN_INSPECT": "ws://localhost:0/?wait=1",
|
||||
},
|
||||
@@ -129,6 +135,7 @@
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
"BUN_INSPECT": "ws://localhost:0/?break=1",
|
||||
},
|
||||
@@ -145,14 +152,12 @@
|
||||
"request": "launch",
|
||||
"name": "bun run [file]",
|
||||
"program": "${workspaceFolder}/build/bun-debug",
|
||||
"args": ["run", "${file}"],
|
||||
"args": ["run", "${fileBasename}"],
|
||||
"cwd": "${fileDirname}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "0",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_EventLoop": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
"BUN_DEBUG_ALL": "1",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
},
|
||||
@@ -263,6 +268,7 @@
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
@@ -277,6 +283,7 @@
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
@@ -290,7 +297,8 @@
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "0",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
@@ -305,6 +313,7 @@
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
@@ -319,6 +328,7 @@
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
@@ -333,6 +343,7 @@
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
"BUN_INSPECT": "ws://localhost:0/?wait=1",
|
||||
},
|
||||
@@ -353,6 +364,7 @@
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
"BUN_INSPECT": "ws://localhost:0/?break=1",
|
||||
},
|
||||
@@ -448,6 +460,11 @@
|
||||
"program": "node",
|
||||
"args": ["test/runner.node.mjs"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
},
|
||||
// Windows: bun test [file]
|
||||
@@ -474,7 +491,6 @@
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "1",
|
||||
@@ -501,19 +517,7 @@
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_EventLoop",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_uv",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_SYS",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_PipeWriter",
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
@@ -541,6 +545,10 @@
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "0",
|
||||
@@ -566,6 +574,10 @@
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "0",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "2",
|
||||
@@ -591,6 +603,10 @@
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "2",
|
||||
@@ -625,6 +641,10 @@
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "2",
|
||||
@@ -660,6 +680,10 @@
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "2",
|
||||
@@ -681,7 +705,10 @@
|
||||
"name": "FORCE_COLOR",
|
||||
"value": "1",
|
||||
},
|
||||
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "0",
|
||||
@@ -705,7 +732,7 @@
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "0",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
@@ -801,6 +828,10 @@
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "2",
|
||||
@@ -826,6 +857,10 @@
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "0",
|
||||
@@ -851,6 +886,10 @@
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "0",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "2",
|
||||
@@ -876,6 +915,10 @@
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "2",
|
||||
@@ -901,6 +944,10 @@
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "2",
|
||||
@@ -926,6 +973,10 @@
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "2",
|
||||
@@ -960,6 +1011,10 @@
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "2",
|
||||
@@ -1046,6 +1101,10 @@
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "0",
|
||||
@@ -1069,7 +1128,11 @@
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "0",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
@@ -1096,6 +1159,24 @@
|
||||
"program": "node",
|
||||
"args": ["test/runner.node.mjs"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_jest",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "2",
|
||||
},
|
||||
],
|
||||
"console": "internalConsole",
|
||||
},
|
||||
],
|
||||
|
||||
4
.vscode/settings.json
vendored
4
.vscode/settings.json
vendored
@@ -15,6 +15,9 @@
|
||||
"src/bun.js/WebKit": true,
|
||||
"src/deps/*/**": true,
|
||||
"test/node.js/upstream": true,
|
||||
// This will fill up your whole search history.
|
||||
"test/js/node/test/fixtures": true,
|
||||
"test/js/node/test/common": true,
|
||||
},
|
||||
"search.followSymlinks": false,
|
||||
"search.useIgnoreFiles": true,
|
||||
@@ -135,6 +138,7 @@
|
||||
},
|
||||
"files.associations": {
|
||||
"*.idl": "cpp",
|
||||
"array": "cpp",
|
||||
},
|
||||
"C_Cpp.files.exclude": {
|
||||
"**/.vscode": true,
|
||||
|
||||
107
CMakeLists.txt
107
CMakeLists.txt
@@ -3,8 +3,8 @@ cmake_policy(SET CMP0091 NEW)
|
||||
cmake_policy(SET CMP0067 NEW)
|
||||
|
||||
set(CMAKE_POLICY_DEFAULT_CMP0069 NEW)
|
||||
set(Bun_VERSION "1.1.21")
|
||||
set(WEBKIT_TAG 49907bff8781719bc2ded068b0c934f6d0074d1e)
|
||||
set(Bun_VERSION "1.1.27")
|
||||
set(WEBKIT_TAG 147ed53838e21525677492c27099567a6cd19c6b)
|
||||
|
||||
set(BUN_WORKDIR "${CMAKE_CURRENT_BINARY_DIR}")
|
||||
message(STATUS "Configuring Bun ${Bun_VERSION} in ${BUN_WORKDIR}")
|
||||
@@ -15,8 +15,24 @@ set(CMAKE_C_STANDARD 17)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||
set(CMAKE_C_STANDARD_REQUIRED ON)
|
||||
|
||||
option(ZIG_CACHE_DIR "Path to the Zig cache directory" "")
|
||||
|
||||
if(NOT ZIG_CACHE_DIR)
|
||||
SET(ZIG_CACHE_DIR "${BUN_WORKDIR}")
|
||||
cmake_path(APPEND ZIG_CACHE_DIR "zig-cache")
|
||||
endif()
|
||||
|
||||
set(LOCAL_ZIG_CACHE_DIR "${ZIG_CACHE_DIR}")
|
||||
set(GLOBAL_ZIG_CACHE_DIR "${ZIG_CACHE_DIR}")
|
||||
|
||||
cmake_path(APPEND LOCAL_ZIG_CACHE_DIR "local")
|
||||
cmake_path(APPEND GLOBAL_ZIG_CACHE_DIR "global")
|
||||
|
||||
# Used in process.version, process.versions.node, napi, and elsewhere
|
||||
set(REPORTED_NODEJS_VERSION "22.3.0")
|
||||
set(REPORTED_NODEJS_VERSION "22.6.0")
|
||||
|
||||
# Used in process.versions.modules and compared while loading V8 modules
|
||||
set(REPORTED_NODEJS_ABI_VERSION "127")
|
||||
|
||||
# WebKit uses -std=gnu++20 on non-macOS non-Windows
|
||||
# If we do not set this, it will crash at startup on the first memory allocation.
|
||||
@@ -334,7 +350,7 @@ option(USE_LTO "Enable Link-Time Optimization" ${DEFAULT_LTO})
|
||||
|
||||
if(APPLE AND USE_LTO)
|
||||
set(USE_LTO OFF)
|
||||
message(WARNING "Link-Time Optimization is not supported on macOS because it requires -fuse-ld=lld and lld causes many segfaults on macOS (likely related to stack size)")
|
||||
message(FATAL_ERROR "Link-Time Optimization is not supported on macOS because it requires -fuse-ld=lld and lld causes many segfaults on macOS (likely related to stack size)")
|
||||
endif()
|
||||
|
||||
if(WIN32 AND USE_LTO)
|
||||
@@ -463,6 +479,8 @@ elseif(NOT BUN_CPP_ONLY AND NOT BUN_LINK_ONLY AND NOT BUN_TIDY_ONLY AND NOT BUN_
|
||||
|
||||
message(STATUS "Installed Zig Compiler: ${ZIG_COMPILER}")
|
||||
set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS "build.zig")
|
||||
|
||||
message(STATUS "Using zig cache directory: ${ZIG_CACHE_DIR}")
|
||||
endif()
|
||||
|
||||
# Bun
|
||||
@@ -487,7 +505,7 @@ if(USE_UNIFIED_SOURCES)
|
||||
endif()
|
||||
|
||||
# CCache
|
||||
find_program(CCACHE_PROGRAM sccache)
|
||||
# find_program(CCACHE_PROGRAM sccache)
|
||||
find_program(CCACHE_PROGRAM ccache)
|
||||
|
||||
if(CCACHE_PROGRAM)
|
||||
@@ -649,6 +667,7 @@ file(GLOB BUN_CPP ${CONFIGURE_DEPENDS}
|
||||
"${BUN_SRC}/bun.js/bindings/sqlite/*.cpp"
|
||||
"${BUN_SRC}/bun.js/bindings/webcrypto/*.cpp"
|
||||
"${BUN_SRC}/bun.js/bindings/webcrypto/*/*.cpp"
|
||||
"${BUN_SRC}/bun.js/bindings/v8/*.cpp"
|
||||
"${BUN_SRC}/deps/picohttpparser/picohttpparser.c"
|
||||
)
|
||||
list(APPEND BUN_RAW_SOURCES ${BUN_CPP})
|
||||
@@ -689,6 +708,32 @@ add_custom_command(
|
||||
)
|
||||
list(APPEND BUN_RAW_SOURCES "${BUN_WORKDIR}/codegen/ZigGeneratedClasses.cpp")
|
||||
|
||||
if(NOT NO_CODEGEN)
|
||||
# --- ErrorCode Generator ---
|
||||
file(GLOB NODE_ERRORS_TS ${CONFIGURE_DEPENDS}
|
||||
"${BUN_SRC}/bun.js/bindings/ErrorCode.ts"
|
||||
)
|
||||
add_custom_command(
|
||||
OUTPUT "${BUN_WORKDIR}/codegen/ErrorCode+List.h" "${BUN_WORKDIR}/codegen/ErrorCode+Data.h" "${BUN_WORKDIR}/codegen/ErrorCode.zig"
|
||||
COMMAND ${BUN_EXECUTABLE} run "${BUN_CODEGEN_SRC}/generate-node-errors.ts" "${BUN_WORKDIR}/codegen"
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
MAIN_DEPENDENCY "${BUN_CODEGEN_SRC}/generate-node-errors.ts"
|
||||
DEPENDS ${NODE_ERRORS_TS}
|
||||
VERBATIM
|
||||
COMMENT "Generating ErrorCode.zig"
|
||||
)
|
||||
|
||||
# This needs something to force it to be regenerated
|
||||
WEBKIT_ADD_SOURCE_DEPENDENCIES(
|
||||
"${BUN_SRC}/bun.js/bindings/ErrorCode.cpp"
|
||||
"${BUN_WORKDIR}/codegen/ErrorCode+List.h"
|
||||
)
|
||||
WEBKIT_ADD_SOURCE_DEPENDENCIES(
|
||||
"${BUN_SRC}/bun.js/bindings/ErrorCode.h"
|
||||
"${BUN_WORKDIR}/codegen/ErrorCode+Data.h"
|
||||
)
|
||||
endif()
|
||||
|
||||
# --- JSSink Generator ---
|
||||
add_custom_command(
|
||||
OUTPUT "${BUN_WORKDIR}/codegen/JSSink.cpp"
|
||||
@@ -762,7 +807,7 @@ if(NOT NO_CODEGEN)
|
||||
OUTPUT ${BUN_IDENTIFIER_CACHE_OUT}
|
||||
MAIN_DEPENDENCY "${BUN_SRC}/js_lexer/identifier_data.zig"
|
||||
DEPENDS "${BUN_SRC}/js_lexer/identifier_cache.zig"
|
||||
COMMAND ${ZIG_COMPILER} run "--zig-lib-dir" "${ZIG_LIB_DIR}" "${BUN_SRC}/js_lexer/identifier_data.zig"
|
||||
COMMAND ${ZIG_COMPILER} run "--zig-lib-dir" "${ZIG_LIB_DIR}" "--cache-dir" "${LOCAL_ZIG_CACHE_DIR}" "--global-cache-dir" "${GLOBAL_ZIG_CACHE_DIR}" "${BUN_SRC}/js_lexer/identifier_data.zig"
|
||||
VERBATIM
|
||||
COMMENT "Building Identifier Cache"
|
||||
)
|
||||
@@ -783,6 +828,7 @@ if(NOT NO_CODEGEN)
|
||||
"${BUN_SRC}/js/thirdparty/*.ts"
|
||||
"${BUN_SRC}/js/internal/*.js"
|
||||
"${BUN_SRC}/js/internal/*.ts"
|
||||
"${BUN_SRC}/js/internal/cluster/*.ts"
|
||||
"${BUN_SRC}/js/internal/util/*.js"
|
||||
"${BUN_SRC}/js/internal/fs/*.ts"
|
||||
"${BUN_SRC}/js/node/*.js"
|
||||
@@ -911,10 +957,13 @@ if(NOT BUN_LINK_ONLY AND NOT BUN_CPP_ONLY)
|
||||
"-Denable_logs=${ENABLE_LOGS}"
|
||||
"-Dreported_nodejs_version=${REPORTED_NODEJS_VERSION}"
|
||||
"-Dobj_format=${BUN_ZIG_OBJ_FORMAT}"
|
||||
"--cache-dir" "${LOCAL_ZIG_CACHE_DIR}"
|
||||
"--global-cache-dir" "${GLOBAL_ZIG_CACHE_DIR}"
|
||||
DEPENDS
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/build.zig"
|
||||
"${ZIG_FILES}"
|
||||
"${BUN_WORKDIR}/codegen/ZigGeneratedClasses.zig"
|
||||
"${BUN_WORKDIR}/codegen/ErrorCode.zig"
|
||||
"${BUN_WORKDIR}/codegen/ResolvedSourceTag.zig"
|
||||
"${BUN_IDENTIFIER_CACHE_OUT}"
|
||||
"${BUN_SRC}/api/schema.zig"
|
||||
@@ -986,7 +1035,6 @@ add_compile_definitions(
|
||||
"LIBUS_USE_BORINGSSL=1"
|
||||
"WITH_BORINGSSL=1"
|
||||
"STATICALLY_LINKED_WITH_JavaScriptCore=1"
|
||||
"STATICALLY_LINKED_WITH_WTF=1"
|
||||
"STATICALLY_LINKED_WITH_BMALLOC=1"
|
||||
"BUILDING_WITH_CMAKE=1"
|
||||
"JSC_OBJC_API_ENABLED=0"
|
||||
@@ -997,11 +1045,24 @@ add_compile_definitions(
|
||||
"BUILDING_JSCONLY__"
|
||||
"BUN_DYNAMIC_JS_LOAD_PATH=\"${BUN_WORKDIR}/js\""
|
||||
"REPORTED_NODEJS_VERSION=\"${REPORTED_NODEJS_VERSION}\""
|
||||
"REPORTED_NODEJS_ABI_VERSION=${REPORTED_NODEJS_ABI_VERSION}"
|
||||
)
|
||||
|
||||
if(NOT ASSERT_ENABLED)
|
||||
if(APPLE)
|
||||
add_compile_definitions("_LIBCXX_ENABLE_ASSERTIONS=0")
|
||||
add_compile_definitions("_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_NONE")
|
||||
endif()
|
||||
|
||||
add_compile_definitions("NDEBUG=1")
|
||||
else()
|
||||
if(APPLE)
|
||||
add_compile_definitions("_LIBCXX_ENABLE_ASSERTIONS=1")
|
||||
add_compile_definitions("_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_DEBUG")
|
||||
elseif(CMAKE_SYSTEM_NAME STREQUAL "Linux")
|
||||
add_compile_definitions("_GLIBCXX_ASSERTIONS=1")
|
||||
endif()
|
||||
|
||||
add_compile_definitions("ASSERT_ENABLED=1")
|
||||
endif()
|
||||
|
||||
@@ -1071,12 +1132,25 @@ if(CMAKE_BUILD_TYPE STREQUAL "Debug")
|
||||
-Werror=uninitialized
|
||||
-Werror=conditional-uninitialized
|
||||
-Werror=suspicious-memaccess
|
||||
-Werror=int-conversion
|
||||
-Werror=nonnull
|
||||
-Werror=move
|
||||
-Werror=sometimes-uninitialized
|
||||
-Werror=unused
|
||||
-Wno-unused-function
|
||||
-Wno-nullability-completeness
|
||||
-Werror
|
||||
-fsanitize=null
|
||||
-fsanitize-recover=all
|
||||
-fsanitize=bounds
|
||||
-fsanitize=return
|
||||
-fsanitize=nullability-arg
|
||||
-fsanitize=nullability-assign
|
||||
-fsanitize=nullability-return
|
||||
-fsanitize=returns-nonnull-attribute
|
||||
-fsanitize=unreachable
|
||||
)
|
||||
target_link_libraries(${bun} PRIVATE -fsanitize=null)
|
||||
else()
|
||||
target_compile_options(${bun} PUBLIC /Od /Z7)
|
||||
endif()
|
||||
@@ -1098,8 +1172,11 @@ elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
-Werror=uninitialized
|
||||
-Werror=conditional-uninitialized
|
||||
-Werror=suspicious-memaccess
|
||||
-Werror=int-conversion
|
||||
-Werror=nonnull
|
||||
-Werror=move
|
||||
-Werror=sometimes-uninitialized
|
||||
-Wno-nullability-completeness
|
||||
-Werror
|
||||
)
|
||||
else()
|
||||
@@ -1196,6 +1273,9 @@ else()
|
||||
-fno-pic
|
||||
-fno-pie
|
||||
-faddrsig
|
||||
-ffile-prefix-map="${CMAKE_CURRENT_SOURCE_DIR}"=.
|
||||
-ffile-prefix-map="${BUN_DEPS_DIR}"=src/deps
|
||||
-ffile-prefix-map="${BUN_DEPS_OUT_DIR}"=src/deps
|
||||
)
|
||||
endif()
|
||||
|
||||
@@ -1211,7 +1291,7 @@ endif()
|
||||
|
||||
if(UNIX AND NOT APPLE)
|
||||
target_link_options(${bun} PUBLIC
|
||||
-fuse-ld=lld
|
||||
-fuse-ld=lld-${LLVM_VERSION}
|
||||
-fno-pic
|
||||
-static-libstdc++
|
||||
-static-libgcc
|
||||
@@ -1518,7 +1598,10 @@ endif()
|
||||
if(NOT WIN32)
|
||||
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libWTF.a")
|
||||
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libJavaScriptCore.a")
|
||||
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libbmalloc.a")
|
||||
|
||||
if(NOT APPLE OR EXISTS "${WEBKIT_LIB_DIR}/libbmalloc.a")
|
||||
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libbmalloc.a")
|
||||
endif()
|
||||
else()
|
||||
target_link_libraries(${bun} PRIVATE
|
||||
"${WEBKIT_LIB_DIR}/WTF.lib"
|
||||
@@ -1550,12 +1633,14 @@ endif()
|
||||
|
||||
if(BUN_TIDY_ONLY)
|
||||
find_program(CLANG_TIDY_EXE NAMES "clang-tidy")
|
||||
set(CLANG_TIDY_COMMAND "${CLANG_TIDY_EXE}" "-checks=-*,clang-analyzer-*,-clang-analyzer-webkit.UncountedLambdaCapturesChecker" "--fix" "--fix-errors" "--format-style=webkit" "--warnings-as-errors=*")
|
||||
|
||||
# webkit ones are disabled disabled because it's noisy, e.g. for JavaScriptCore/Lookup.h
|
||||
set(CLANG_TIDY_COMMAND "${CLANG_TIDY_EXE}" "-checks=-*,clang-analyzer-*,-clang-analyzer-webkit.UncountedLambdaCapturesChecker,-clang-analyzer-optin.core.EnumCastOutOfRange,-clang-analyzer-webkit.RefCntblBaseVirtualDtor" "--fix" "--fix-errors" "--format-style=webkit" "--warnings-as-errors=*")
|
||||
set_target_properties(${bun} PROPERTIES CXX_CLANG_TIDY "${CLANG_TIDY_COMMAND}")
|
||||
endif()
|
||||
|
||||
if(BUN_TIDY_ONLY_EXTRA)
|
||||
find_program(CLANG_TIDY_EXE NAMES "clang-tidy")
|
||||
set(CLANG_TIDY_COMMAND "${CLANG_TIDY_EXE}" "-checks=-*,clang-analyzer-*,performance-*,-clang-analyzer-webkit.UncountedLambdaCapturesChecker" "--fix" "--fix-errors" "--format-style=webkit" "--warnings-as-errors=*")
|
||||
set(CLANG_TIDY_COMMAND "${CLANG_TIDY_EXE}" "-checks=-*,clang-analyzer-*,performance-*,-clang-analyzer-webkit.UncountedLambdaCapturesChecker,-clang-analyzer-optin.core.EnumCastOutOfRange,-clang-analyzer-webkit.RefCntblBaseVirtualDtor" "--fix" "--fix-errors" "--format-style=webkit" "--warnings-as-errors=*")
|
||||
set_target_properties(${bun} PROPERTIES CXX_CLANG_TIDY "${CLANG_TIDY_COMMAND}")
|
||||
endif()
|
||||
|
||||
@@ -63,7 +63,7 @@ Bun requires LLVM 16 (`clang` is part of LLVM). This version requirement is to m
|
||||
{% codetabs %}
|
||||
|
||||
```bash#macOS (Homebrew)
|
||||
$ brew install llvm@16
|
||||
$ brew install llvm@18
|
||||
```
|
||||
|
||||
```bash#Ubuntu/Debian
|
||||
|
||||
@@ -52,7 +52,7 @@ ENV CI 1
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
ENV BUILDARCH=${BUILDARCH}
|
||||
ENV BUN_DEPS_OUT_DIR=${BUN_DEPS_OUT_DIR}
|
||||
ENV BUN_ENABLE_LTO 1
|
||||
ENV USE_LTO 1
|
||||
|
||||
ENV LC_CTYPE=en_US.UTF-8
|
||||
ENV LC_ALL=en_US.UTF-8
|
||||
|
||||
3
Makefile
3
Makefile
@@ -366,7 +366,7 @@ ifeq ($(OS_NAME),linux)
|
||||
endif
|
||||
|
||||
ifeq ($(OS_NAME),darwin)
|
||||
MACOS_MIN_FLAG=-mmacosx-version-min=$(MIN_MACOS_VERSION)
|
||||
MACOS_MIN_FLAG=-mmacos-version-min=$(MIN_MACOS_VERSION)
|
||||
POSIX_PKG_MANAGER=brew
|
||||
INCLUDE_DIRS += $(MAC_INCLUDE_DIRS)
|
||||
endif
|
||||
@@ -1309,6 +1309,7 @@ jsc-build-mac-compile-debug:
|
||||
-DCMAKE_BUILD_TYPE=Debug \
|
||||
-DUSE_THIN_ARCHIVES=OFF \
|
||||
-DENABLE_FTL_JIT=ON \
|
||||
-DENABLE_MALLOC_HEAP_BREAKDOWN=ON \
|
||||
-DCMAKE_EXPORT_COMPILE_COMMANDS=ON \
|
||||
-DUSE_BUN_JSC_ADDITIONS=ON \
|
||||
-DENABLE_BUN_SKIP_FAILING_ASSERTIONS=ON \
|
||||
|
||||
47
README.md
47
README.md
@@ -24,8 +24,6 @@
|
||||
|
||||
## What is Bun?
|
||||
|
||||
> **Bun is under active development.** Use it to speed up your development workflows or run simpler production code in resource-constrained environments like serverless functions. We're working on more complete Node.js compatibility and integration with existing frameworks. Join the [Discord](https://bun.sh/discord) and watch the [GitHub repository](https://github.com/oven-sh/bun) to keep tabs on future releases.
|
||||
|
||||
Bun is an all-in-one toolkit for JavaScript and TypeScript apps. It ships as a single executable called `bun`.
|
||||
|
||||
At its core is the _Bun runtime_, a fast JavaScript runtime designed as a drop-in replacement for Node.js. It's written in Zig and powered by JavaScriptCore under the hood, dramatically reducing startup times and memory usage.
|
||||
@@ -87,16 +85,19 @@ bun upgrade --canary
|
||||
## Quick links
|
||||
|
||||
- Intro
|
||||
|
||||
- [What is Bun?](https://bun.sh/docs/index)
|
||||
- [Installation](https://bun.sh/docs/installation)
|
||||
- [Quickstart](https://bun.sh/docs/quickstart)
|
||||
- [TypeScript](https://bun.sh/docs/typescript)
|
||||
|
||||
- Templating
|
||||
|
||||
- [`bun init`](https://bun.sh/docs/cli/init)
|
||||
- [`bun create`](https://bun.sh/docs/cli/bun-create)
|
||||
|
||||
- Runtime
|
||||
|
||||
- [`bun run`](https://bun.sh/docs/cli/run)
|
||||
- [File types](https://bun.sh/docs/runtime/loaders)
|
||||
- [TypeScript](https://bun.sh/docs/runtime/typescript)
|
||||
@@ -115,6 +116,7 @@ bun upgrade --canary
|
||||
- [Framework API](https://bun.sh/docs/runtime/framework)
|
||||
|
||||
- Package manager
|
||||
|
||||
- [`bun install`](https://bun.sh/docs/cli/install)
|
||||
- [`bun add`](https://bun.sh/docs/cli/add)
|
||||
- [`bun remove`](https://bun.sh/docs/cli/remove)
|
||||
@@ -130,6 +132,7 @@ bun upgrade --canary
|
||||
- [Overrides and resolutions](https://bun.sh/docs/install/overrides)
|
||||
|
||||
- Bundler
|
||||
|
||||
- [`Bun.build`](https://bun.sh/docs/bundler)
|
||||
- [Loaders](https://bun.sh/docs/bundler/loaders)
|
||||
- [Plugins](https://bun.sh/docs/bundler/plugins)
|
||||
@@ -137,6 +140,7 @@ bun upgrade --canary
|
||||
- [vs esbuild](https://bun.sh/docs/bundler/vs-esbuild)
|
||||
|
||||
- Test runner
|
||||
|
||||
- [`bun test`](https://bun.sh/docs/cli/test)
|
||||
- [Writing tests](https://bun.sh/docs/test/writing)
|
||||
- [Watch mode](https://bun.sh/docs/test/hot)
|
||||
@@ -148,9 +152,11 @@ bun upgrade --canary
|
||||
- [Code coverage](https://bun.sh/docs/test/coverage)
|
||||
|
||||
- Package runner
|
||||
|
||||
- [`bunx`](https://bun.sh/docs/cli/bunx)
|
||||
|
||||
- API
|
||||
|
||||
- [HTTP server](https://bun.sh/docs/api/http)
|
||||
- [WebSockets](https://bun.sh/docs/api/websockets)
|
||||
- [Workers](https://bun.sh/docs/api/workers)
|
||||
@@ -183,9 +189,10 @@ bun upgrade --canary
|
||||
- [Building Windows](https://bun.sh/docs/project/building-windows)
|
||||
- [License](https://bun.sh/docs/project/licensing)
|
||||
|
||||
## Guides
|
||||
## Guides
|
||||
|
||||
- Binary
|
||||
|
||||
- Binary
|
||||
- [Convert a Blob to a DataView](https://bun.sh/guides/binary/blob-to-dataview)
|
||||
- [Convert a Blob to a ReadableStream](https://bun.sh/guides/binary/blob-to-stream)
|
||||
- [Convert a Blob to a string](https://bun.sh/guides/binary/blob-to-string)
|
||||
@@ -209,7 +216,8 @@ bun upgrade --canary
|
||||
- [Convert an ArrayBuffer to a Uint8Array](https://bun.sh/guides/binary/arraybuffer-to-typedarray)
|
||||
- [Convert an ArrayBuffer to an array of numbers](https://bun.sh/guides/binary/arraybuffer-to-array)
|
||||
|
||||
- Ecosystem
|
||||
- Ecosystem
|
||||
|
||||
- [Build a frontend using Vite and Bun](https://bun.sh/guides/ecosystem/vite)
|
||||
- [Build an app with Astro and Bun](https://bun.sh/guides/ecosystem/astro)
|
||||
- [Build an app with Next.js and Bun](https://bun.sh/guides/ecosystem/nextjs)
|
||||
@@ -236,7 +244,8 @@ bun upgrade --canary
|
||||
- [Use React and JSX](https://bun.sh/guides/ecosystem/react)
|
||||
- [Add Sentry to a Bun app](https://bun.sh/guides/ecosystem/sentry)
|
||||
|
||||
- HTTP
|
||||
- HTTP
|
||||
|
||||
- [Common HTTP server usage](https://bun.sh/guides/http/server)
|
||||
- [Configure TLS on an HTTP server](https://bun.sh/guides/http/tls)
|
||||
- [fetch with unix domain sockets in Bun](https://bun.sh/guides/http/fetch-unix)
|
||||
@@ -250,7 +259,8 @@ bun upgrade --canary
|
||||
- [Upload files via HTTP using FormData](https://bun.sh/guides/http/file-uploads)
|
||||
- [Write a simple HTTP server](https://bun.sh/guides/http/simple)
|
||||
|
||||
- Install
|
||||
- Install
|
||||
|
||||
- [Add a dependency](https://bun.sh/guides/install/add)
|
||||
- [Add a development dependency](https://bun.sh/guides/install/add-dev)
|
||||
- [Add a Git dependency](https://bun.sh/guides/install/add-git)
|
||||
@@ -268,7 +278,8 @@ bun upgrade --canary
|
||||
- [Using bun install with an Azure Artifacts npm registry](https://bun.sh/guides/install/azure-artifacts)
|
||||
- [Using bun install with Artifactory](https://bun.sh/guides/install/jfrog-artifactory)
|
||||
|
||||
- Process
|
||||
- Process
|
||||
|
||||
- [Get the process uptime in nanoseconds](https://bun.sh/guides/process/nanoseconds)
|
||||
- [Listen for CTRL+C](https://bun.sh/guides/process/ctrl-c)
|
||||
- [Listen to OS signals](https://bun.sh/guides/process/os-signals)
|
||||
@@ -279,7 +290,8 @@ bun upgrade --canary
|
||||
- [Spawn a child process](https://bun.sh/guides/process/spawn)
|
||||
- [Spawn a child process and communicate using IPC](https://bun.sh/guides/process/ipc)
|
||||
|
||||
- Read file
|
||||
- Read file
|
||||
|
||||
- [Check if a file exists](https://bun.sh/guides/read-file/exists)
|
||||
- [Get the MIME type of a file](https://bun.sh/guides/read-file/mime)
|
||||
- [Read a file as a ReadableStream](https://bun.sh/guides/read-file/stream)
|
||||
@@ -290,7 +302,8 @@ bun upgrade --canary
|
||||
- [Read a JSON file](https://bun.sh/guides/read-file/json)
|
||||
- [Watch a directory for changes](https://bun.sh/guides/read-file/watch)
|
||||
|
||||
- Runtime
|
||||
- Runtime
|
||||
|
||||
- [Debugging Bun with the VS Code extension](https://bun.sh/guides/runtime/vscode-debugger)
|
||||
- [Debugging Bun with the web debugger](https://bun.sh/guides/runtime/web-debugger)
|
||||
- [Define and replace static globals & constants](https://bun.sh/guides/runtime/define-constant)
|
||||
@@ -305,7 +318,8 @@ bun upgrade --canary
|
||||
- [Set a time zone in Bun](https://bun.sh/guides/runtime/timezone)
|
||||
- [Set environment variables](https://bun.sh/guides/runtime/set-env)
|
||||
|
||||
- Streams
|
||||
- Streams
|
||||
|
||||
- [Convert a Node.js Readable to a Blob](https://bun.sh/guides/streams/node-readable-to-blob)
|
||||
- [Convert a Node.js Readable to a string](https://bun.sh/guides/streams/node-readable-to-string)
|
||||
- [Convert a Node.js Readable to an ArrayBuffer](https://bun.sh/guides/streams/node-readable-to-arraybuffer)
|
||||
@@ -318,7 +332,8 @@ bun upgrade --canary
|
||||
- [Convert a ReadableStream to an ArrayBuffer](https://bun.sh/guides/streams/to-arraybuffer)
|
||||
- [Convert a ReadableStream to JSON](https://bun.sh/guides/streams/to-json)
|
||||
|
||||
- Test
|
||||
- Test
|
||||
|
||||
- [Bail early with the Bun test runner](https://bun.sh/guides/test/bail)
|
||||
- [Generate code coverage reports with the Bun test runner](https://bun.sh/guides/test/coverage)
|
||||
- [Mark a test as a "todo" with the Bun test runner](https://bun.sh/guides/test/todo-tests)
|
||||
@@ -336,7 +351,8 @@ bun upgrade --canary
|
||||
- [Use snapshot testing in `bun test`](https://bun.sh/guides/test/snapshot)
|
||||
- [Write browser DOM tests with Bun and happy-dom](https://bun.sh/guides/test/happy-dom)
|
||||
|
||||
- Util
|
||||
- Util
|
||||
|
||||
- [Check if the current file is the entrypoint](https://bun.sh/guides/util/entrypoint)
|
||||
- [Check if two objects are deeply equal](https://bun.sh/guides/util/deep-equals)
|
||||
- [Compress and decompress data with DEFLATE](https://bun.sh/guides/util/deflate)
|
||||
@@ -355,13 +371,14 @@ bun upgrade --canary
|
||||
- [Hash a password](https://bun.sh/guides/util/hash-a-password)
|
||||
- [Sleep for a fixed number of milliseconds](https://bun.sh/guides/util/sleep)
|
||||
|
||||
- WebSocket
|
||||
- WebSocket
|
||||
|
||||
- [Build a publish-subscribe WebSocket server](https://bun.sh/guides/websocket/pubsub)
|
||||
- [Build a simple WebSocket server](https://bun.sh/guides/websocket/simple)
|
||||
- [Enable compression for WebSocket messages](https://bun.sh/guides/websocket/compression)
|
||||
- [Set per-socket contextual data on a WebSocket](https://bun.sh/guides/websocket/context)
|
||||
|
||||
- Write file
|
||||
- Write file
|
||||
- [Append content to a file](https://bun.sh/guides/write-file/append)
|
||||
- [Copy a file to another location](https://bun.sh/guides/write-file/file-cp)
|
||||
- [Delete a file](https://bun.sh/guides/write-file/unlink)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { run, bench } from "mitata";
|
||||
import { bench, run } from "mitata";
|
||||
|
||||
bench("sync", () => {});
|
||||
bench("async", async () => {});
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { run, bench } from "../node_modules/mitata/src/cli.mjs";
|
||||
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
|
||||
|
||||
bench("sync", () => {});
|
||||
bench("async", async () => {});
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { run, bench } from "mitata";
|
||||
import { bench, run } from "mitata";
|
||||
|
||||
bench("sync", () => {});
|
||||
bench("async", async () => {});
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { copyFileSync, writeFileSync, readFileSync, statSync } from "node:fs";
|
||||
import { bench, run } from "mitata";
|
||||
import { copyFileSync, statSync, writeFileSync } from "node:fs";
|
||||
|
||||
function runner(ready) {
|
||||
for (let size of [1, 10, 100, 1000, 10000, 100000, 1000000, 10000000]) {
|
||||
|
||||
27
bench/deepEqual/map.js
Normal file
27
bench/deepEqual/map.js
Normal file
@@ -0,0 +1,27 @@
|
||||
import { expect } from "bun:test";
|
||||
import { bench, run } from "mitata";
|
||||
|
||||
const MAP_SIZE = 10_000;
|
||||
|
||||
function* genPairs(count) {
|
||||
for (let i = 0; i < MAP_SIZE; i++) {
|
||||
yield ["k" + i, "v" + i];
|
||||
}
|
||||
}
|
||||
|
||||
class CustomMap extends Map {
|
||||
abc = 123;
|
||||
constructor(iterable) {
|
||||
super(iterable);
|
||||
}
|
||||
}
|
||||
|
||||
const a = new Map(genPairs());
|
||||
const b = new Map(genPairs());
|
||||
bench("deepEqual Map", () => expect(a).toEqual(b));
|
||||
|
||||
const x = new CustomMap(genPairs());
|
||||
const y = new CustomMap(genPairs());
|
||||
bench("deepEqual CustomMap", () => expect(x).toEqual(y));
|
||||
|
||||
await run();
|
||||
27
bench/deepEqual/set.js
Normal file
27
bench/deepEqual/set.js
Normal file
@@ -0,0 +1,27 @@
|
||||
import { expect } from "bun:test";
|
||||
import { bench, run } from "mitata";
|
||||
|
||||
const SET_SIZE = 10_000;
|
||||
|
||||
function* genValues(count) {
|
||||
for (let i = 0; i < SET_SIZE; i++) {
|
||||
yield "v" + i;
|
||||
}
|
||||
}
|
||||
|
||||
class CustomSet extends Set {
|
||||
abc = 123;
|
||||
constructor(iterable) {
|
||||
super(iterable);
|
||||
}
|
||||
}
|
||||
|
||||
const a = new Set(genValues());
|
||||
const b = new Set(genValues());
|
||||
bench("deepEqual Set", () => expect(a).toEqual(b));
|
||||
|
||||
const x = new CustomSet(genValues());
|
||||
const y = new CustomSet(genValues());
|
||||
bench("deepEqual CustomSet", () => expect(x).toEqual(y));
|
||||
|
||||
await run();
|
||||
@@ -1,4 +1,3 @@
|
||||
import EventEmitter3 from "eventemitter3";
|
||||
import { group } from "mitata";
|
||||
import EventEmitterNative from "node:events";
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// bun:test automatically rewrites this import to bun:test when run in bun
|
||||
import { test, expect } from "@jest/globals";
|
||||
import { expect, test } from "@jest/globals";
|
||||
|
||||
const N = parseInt(process.env.RUN_COUNT || "10000", 10);
|
||||
if (!Number.isSafeInteger(N)) {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { test, expect } from "vitest";
|
||||
import { expect, test } from "vitest";
|
||||
|
||||
const N = parseInt(process.env.RUN_COUNT || "10000", 10);
|
||||
if (!Number.isSafeInteger(N)) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { ptr, dlopen, CString, toBuffer } from "bun:ffi";
|
||||
import { run, bench, group } from "mitata";
|
||||
import { CString, dlopen, ptr } from "bun:ffi";
|
||||
import { bench, group, run } from "mitata";
|
||||
|
||||
const { napiNoop, napiHash, napiString } = require(import.meta.dir + "/src/ffi_napi_bench.node");
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { run, bench, group } from "../node_modules/mitata/src/cli.mjs";
|
||||
import { bench, group, run } from "../node_modules/mitata/src/cli.mjs";
|
||||
|
||||
const extension = "darwin" !== Deno.build.os ? "so" : "dylib";
|
||||
const path = new URL("src/target/release/libffi_napi_bench." + extension, import.meta.url).pathname;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { run, bench, group } from "mitata";
|
||||
import { bench, group, run } from "mitata";
|
||||
import { createRequire } from "node:module";
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import braces from "braces";
|
||||
import { group, bench, run } from "mitata";
|
||||
import { bench, group, run } from "mitata";
|
||||
|
||||
// const iterations = 1000;
|
||||
const iterations = 100;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { run, bench, group } from "mitata";
|
||||
import fg from "fast-glob";
|
||||
import { fdir } from "fdir";
|
||||
import { bench, group, run } from "mitata";
|
||||
|
||||
const normalPattern = "*.ts";
|
||||
const recursivePattern = "**/*.ts";
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { run, bench, group } from "mitata";
|
||||
import { gzipSync, gunzipSync } from "bun";
|
||||
import { gunzipSync, gzipSync } from "bun";
|
||||
import { bench, group, run } from "mitata";
|
||||
|
||||
const data = await Bun.file(require.resolve("@babel/standalone/babel.min.js")).arrayBuffer();
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { run, bench } from "../node_modules/mitata/src/cli.mjs";
|
||||
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
|
||||
|
||||
const data = new TextEncoder().encode("Hello World!".repeat(9999));
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { run, bench } from "mitata";
|
||||
import { gzipSync, gunzipSync } from "zlib";
|
||||
import { createRequire } from "module";
|
||||
import { readFileSync } from "fs";
|
||||
import { bench, run } from "mitata";
|
||||
import { createRequire } from "module";
|
||||
import { gunzipSync, gzipSync } from "zlib";
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
const data = readFileSync(require.resolve("@babel/standalone/babel.min.js"));
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { Main } from "./main";
|
||||
import classNames from "classnames";
|
||||
import ReactDOM from "react-dom";
|
||||
import { Main } from "./main";
|
||||
|
||||
const Base = ({}) => {
|
||||
const name = typeof location !== "undefined" ? decodeURIComponent(location.search.substring(1)) : null;
|
||||
|
||||
@@ -4,11 +4,11 @@
|
||||
* For more information, see https://remix.run/docs/en/main/file-conventions/entry.server
|
||||
*/
|
||||
|
||||
import { PassThrough } from "node:stream";
|
||||
import type { EntryContext } from "@remix-run/node";
|
||||
import { Response } from "@remix-run/node";
|
||||
import { RemixServer } from "@remix-run/react";
|
||||
import isbot from "isbot";
|
||||
import { PassThrough } from "node:stream";
|
||||
import { renderToPipeableStream } from "react-dom/server";
|
||||
|
||||
const ABORT_DELAY = 5_000;
|
||||
|
||||
@@ -1,21 +1,21 @@
|
||||
import { bench, run } from "mitata";
|
||||
import {
|
||||
arch,
|
||||
cpus,
|
||||
endianness,
|
||||
arch,
|
||||
uptime,
|
||||
networkInterfaces,
|
||||
getPriority,
|
||||
totalmem,
|
||||
freemem,
|
||||
getPriority,
|
||||
homedir,
|
||||
hostname,
|
||||
loadavg,
|
||||
networkInterfaces,
|
||||
platform,
|
||||
release,
|
||||
setPriority,
|
||||
tmpdir,
|
||||
totalmem,
|
||||
type,
|
||||
uptime,
|
||||
userInfo,
|
||||
version,
|
||||
} from "node:os";
|
||||
|
||||
@@ -1,21 +1,21 @@
|
||||
import { bench, run } from "mitata";
|
||||
import {
|
||||
arch,
|
||||
cpus,
|
||||
endianness,
|
||||
arch,
|
||||
uptime,
|
||||
networkInterfaces,
|
||||
getPriority,
|
||||
totalmem,
|
||||
freemem,
|
||||
getPriority,
|
||||
homedir,
|
||||
hostname,
|
||||
loadavg,
|
||||
networkInterfaces,
|
||||
platform,
|
||||
release,
|
||||
setPriority,
|
||||
tmpdir,
|
||||
totalmem,
|
||||
type,
|
||||
uptime,
|
||||
userInfo,
|
||||
version,
|
||||
} from "node:os";
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { renderToReadableStream } from "https://esm.run/react-dom/server";
|
||||
import * as React from "https://esm.run/react";
|
||||
import { renderToReadableStream } from "https://esm.run/react-dom/server";
|
||||
|
||||
const App = () => (
|
||||
<html>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// react-ssr.tsx
|
||||
import { renderToPipeableStream } from "react-dom/server.node";
|
||||
import React from "react";
|
||||
import { renderToPipeableStream } from "react-dom/server.node";
|
||||
const http = require("http");
|
||||
const App = () => (
|
||||
<html>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { ActionFunction, LoaderFunction } from "remix";
|
||||
import { useParams } from "remix";
|
||||
import type { LoaderFunction, ActionFunction } from "remix";
|
||||
|
||||
export const loader: LoaderFunction = async ({ params }) => {
|
||||
console.log(params.postId);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { bench, group, run } from "./runner.mjs";
|
||||
import * as assert from "assert";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
|
||||
bench("deepEqual", () => {
|
||||
assert.deepEqual({ foo: "123", bar: "baz" }, { foo: "123", bar: "baz" });
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
// @runtime bun,node,deno
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import process from "node:process";
|
||||
import { Buffer } from "node:buffer";
|
||||
import process from "node:process";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
|
||||
const N = parseInt(process.env.RUN_COUNTER ?? "10000", 10);
|
||||
var isBuffer = new Buffer(0);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { Buffer } from "node:buffer";
|
||||
import crypto from "node:crypto";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
|
||||
const bigBuffer = Buffer.from("hello world".repeat(10000));
|
||||
const converted = bigBuffer.toString("base64");
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { bench, group, run } from "./runner.mjs";
|
||||
import { readFileSync } from "fs";
|
||||
import { allocUnsafe } from "bun";
|
||||
import { readFileSync } from "fs";
|
||||
import { bench, group, run } from "./runner.mjs";
|
||||
|
||||
function polyfill(chunks) {
|
||||
var size = 0;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// so it can run in environments without node module resolution
|
||||
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
|
||||
import crypto from "node:crypto";
|
||||
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
|
||||
var foo = new Uint8Array(65536);
|
||||
bench("crypto.getRandomValues(65536)", () => {
|
||||
crypto.getRandomValues(foo);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { bench, group, run } from "./runner.mjs";
|
||||
import fastDeepEquals from "fast-deep-equal/es6/index";
|
||||
import { bench, group, run } from "./runner.mjs";
|
||||
// const Date = globalThis.Date;
|
||||
|
||||
function func1() {}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { dns } from "bun";
|
||||
import { bench, run, group } from "./runner.mjs";
|
||||
import { bench, group, run } from "./runner.mjs";
|
||||
|
||||
async function forEachBackend(name, fn) {
|
||||
group(name, () => {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { run, bench } from "../node_modules/mitata/src/cli.mjs";
|
||||
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
|
||||
|
||||
const encoder = new TextEncoder();
|
||||
|
||||
|
||||
@@ -1,7 +1,4 @@
|
||||
import { group } from "./runner.mjs";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { encode as htmlEntityEncode } from "html-entities";
|
||||
import { escape as heEscape } from "he";
|
||||
import { bench, group, run } from "./runner.mjs";
|
||||
|
||||
var bunEscapeHTML = globalThis.escapeHTML || Bun.escapeHTML;
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { viewSource, dlopen, CString, ptr, toBuffer, toArrayBuffer, FFIType, callback } from "bun:ffi";
|
||||
import { dlopen } from "bun:ffi";
|
||||
import { bench, group, run } from "./runner.mjs";
|
||||
|
||||
const types = {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { IncomingMessage } from "node:http";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
|
||||
const headers = {
|
||||
date: "Mon, 06 Nov 2023 05:12:49 GMT",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// @runtime node, bun
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import * as vm from "node:vm";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
|
||||
const context = {
|
||||
animal: "cat",
|
||||
|
||||
@@ -24,7 +24,7 @@ const obj = {
|
||||
w: 23,
|
||||
};
|
||||
|
||||
import { bench, group, run } from "./runner.mjs";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
|
||||
var val = 0;
|
||||
bench("Object.values(literal)", () => {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { pbkdf2, pbkdf2Sync } from "node:crypto";
|
||||
import { pbkdf2 } from "node:crypto";
|
||||
|
||||
import { bench, run } from "./runner.mjs";
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { bench, run } from "mitata";
|
||||
import { peek } from "bun";
|
||||
import { bench, run } from "mitata";
|
||||
|
||||
let pending = Bun.sleep(1000);
|
||||
let resolved = Promise.resolve(1);
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { group } from "./runner.mjs";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
bench("performance.now x 1000", () => {
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { performance } from "perf_hooks";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
|
||||
bench("process.memoryUsage()", () => {
|
||||
process.memoryUsage();
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { bench, group, run } from "./runner.mjs";
|
||||
import { renderToReadableStream } from "react-dom/server.browser";
|
||||
import { renderToReadableStream as renderToReadableStreamBun } from "react-dom/server";
|
||||
import { renderToReadableStream } from "react-dom/server.browser";
|
||||
import { bench, group, run } from "./runner.mjs";
|
||||
|
||||
const App = () => (
|
||||
<div>
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { tmpdir } from "node:os";
|
||||
import { bench, group, run } from "./runner.mjs";
|
||||
import { createReadStream, writeFileSync } from "node:fs";
|
||||
import { tmpdir } from "node:os";
|
||||
import { sep } from "node:path";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
|
||||
if (!Promise.withResolvers) {
|
||||
Promise.withResolvers = function () {
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { readdirSync, readdir as readdirCb } from "fs";
|
||||
import { createHash } from "crypto";
|
||||
import { readdirSync } from "fs";
|
||||
import { readdir } from "fs/promises";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { relative, resolve } from "path";
|
||||
import { argv } from "process";
|
||||
import { fileURLToPath } from "url";
|
||||
import { relative, resolve } from "path";
|
||||
import { createHash } from "crypto";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
|
||||
let dir = resolve(argv.length > 2 ? argv[2] : fileURLToPath(new URL("../../node_modules", import.meta.url)));
|
||||
if (dir.includes(process.cwd())) {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { readFileSync, existsSync } from "node:fs";
|
||||
import { readFileSync } from "node:fs";
|
||||
import { readFile } from "node:fs/promises";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
|
||||
bench(`readFileSync(/tmp/404-not-found)`, () => {
|
||||
try {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { realpathSync } from "node:fs";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
const count = parseInt(process.env.ITERATIONS || "1", 10) || 1;
|
||||
const arg = process.argv[process.argv.length - 1];
|
||||
import { bench, run } from "./runner.mjs";
|
||||
|
||||
bench("realpathSync x " + count, () => {
|
||||
for (let i = 0; i < count; i++) realpathSync(arg, "utf-8");
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import decoding from "./jsx-entity-decoding";
|
||||
import ReactDOMServer from "react-dom/server.browser";
|
||||
import decoding from "./jsx-entity-decoding";
|
||||
|
||||
console.log(ReactDOMServer.renderToString(decoding));
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { builtinModules } from "node:module";
|
||||
import { writeFile } from "node:fs/promises";
|
||||
import { spawnSync } from "child_process";
|
||||
import { writeFile } from "node:fs/promises";
|
||||
import { builtinModules } from "node:module";
|
||||
|
||||
for (let builtin of builtinModules) {
|
||||
const path = `/tmp/require.${builtin.replaceAll("/", "_")}.cjs`;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { existsSync, mkdirSync, promises } from "node:fs";
|
||||
import { tmpdir } from "node:os";
|
||||
import { promises, existsSync, mkdirSync } from "node:fs";
|
||||
const count = 1024 * 12;
|
||||
|
||||
var queue = new Array(count);
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
// note: this isn't done yet
|
||||
// we look for `// @runtime` in the file to determine which runtimes to run the benchmark in
|
||||
import { spawnSync } from "bun";
|
||||
import { readdirSync, readFileSync } from "node:fs";
|
||||
import { Database } from "bun:sqlite";
|
||||
import { extname, basename } from "path";
|
||||
import { readdirSync, readFileSync } from "node:fs";
|
||||
import { basename, extname } from "path";
|
||||
|
||||
const exts = [".js", ".ts", ".mjs", ".tsx"];
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import * as Mitata from "../node_modules/mitata/src/cli.mjs";
|
||||
import process from "node:process";
|
||||
import * as Mitata from "../node_modules/mitata/src/cli.mjs";
|
||||
|
||||
const asJSON = !!process?.env?.BENCHMARK_RUNNER;
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { serialize, deserialize } from "node:v8";
|
||||
import { deserialize, serialize } from "node:v8";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
const obj = {
|
||||
"id": 1296269,
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
|
||||
|
||||
let count = 20_000_000;
|
||||
const batchSize = 1_000_000;
|
||||
console.time("Run");
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { SHA512 } from "bun";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
|
||||
bench('SHA512.hash("hello world")', () => {
|
||||
SHA512.hash("hello world");
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { createHash } from "crypto";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
|
||||
bench('createHash("sha256").update("hello world").digest()', () => {
|
||||
createHash("sha256").update("hello world").digest();
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { $ as zx } from "zx";
|
||||
import { $ as execa$ } from "execa";
|
||||
import { bench, run, group } from "./runner.mjs";
|
||||
import { $ as zx } from "zx";
|
||||
import { bench, group, run } from "./runner.mjs";
|
||||
|
||||
const execa = execa$({ stdio: "ignore", cwd: import.meta.dirname });
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { readdirSync, statSync } from "fs";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { statSync } from "fs";
|
||||
import { argv } from "process";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
|
||||
const dir = argv.length > 2 ? argv[2] : "/tmp";
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user