mirror of
https://github.com/oven-sh/bun
synced 2026-02-03 07:28:53 +00:00
Compare commits
459 Commits
ciro/fix-p
...
fix-node-h
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7b566e2cfc | ||
|
|
6ae4158762 | ||
|
|
8dc95b041a | ||
|
|
211fd4fa06 | ||
|
|
10665821c4 | ||
|
|
a3585ff961 | ||
|
|
2aeff10a85 | ||
|
|
f2c8e63ae1 | ||
|
|
40bfda0f87 | ||
|
|
9888570456 | ||
|
|
a1690cd708 | ||
|
|
e602e2b887 | ||
|
|
eae2d61f12 | ||
|
|
8e246e1e67 | ||
|
|
f30ca39242 | ||
|
|
9f68db4818 | ||
|
|
f1cd5abfaa | ||
|
|
a8a7da3466 | ||
|
|
da9c980d26 | ||
|
|
27cf0d5eaf | ||
|
|
b5cbf16cb8 | ||
|
|
2024fa09d7 | ||
|
|
46b2a58c25 | ||
|
|
d871b2ebdc | ||
|
|
dc51dab7bc | ||
|
|
e39305dd91 | ||
|
|
6e1f1c4da7 | ||
|
|
21a42a0dee | ||
|
|
982083b3e9 | ||
|
|
40e222c43b | ||
|
|
c8634668e7 | ||
|
|
fa9bb75ad3 | ||
|
|
c47e402025 | ||
|
|
a3f48c1d47 | ||
|
|
de048cb474 | ||
|
|
0c5ee31707 | ||
|
|
c820b0c5e1 | ||
|
|
d09e381cbc | ||
|
|
53d631f1bd | ||
|
|
74768449bc | ||
|
|
294adc2269 | ||
|
|
ff97424667 | ||
|
|
cbf8d7cad6 | ||
|
|
0c41951b58 | ||
|
|
50b36696f8 | ||
|
|
de4182f305 | ||
|
|
4214cc0aaa | ||
|
|
d1c77f5061 | ||
|
|
45e01cdaf2 | ||
|
|
2fc19daeec | ||
|
|
60c0b9ab96 | ||
|
|
7f948f9c3e | ||
|
|
66fb9f1097 | ||
|
|
062a5b9bf8 | ||
|
|
5bedf15462 | ||
|
|
d7aee40387 | ||
|
|
26f08fabd7 | ||
|
|
05b48ce57c | ||
|
|
1ed87f4e83 | ||
|
|
b089558674 | ||
|
|
45df1dbba0 | ||
|
|
beb32770f0 | ||
|
|
3eec297282 | ||
|
|
b0b6c979ee | ||
|
|
7d69ac03ec | ||
|
|
d7e08abce8 | ||
|
|
d907942966 | ||
|
|
fe0e737f7b | ||
|
|
8da959df85 | ||
|
|
d7a047a533 | ||
|
|
c260223127 | ||
|
|
e834a80b7b | ||
|
|
7011dd6524 | ||
|
|
cde668b54c | ||
|
|
01db86e915 | ||
|
|
85376147a4 | ||
|
|
d2ecce272c | ||
|
|
7ee0b428d6 | ||
|
|
9482e4c86a | ||
|
|
42276a9500 | ||
|
|
ae8f78c84d | ||
|
|
9636852224 | ||
|
|
5f72715a42 | ||
|
|
c60b5dd4d6 | ||
|
|
42c474a21f | ||
|
|
04078fbf61 | ||
|
|
28ebbb3f20 | ||
|
|
96fa32bcc1 | ||
|
|
b3246b6971 | ||
|
|
0345414ded | ||
|
|
01d214b276 | ||
|
|
fdd181d68d | ||
|
|
5c7df736bf | ||
|
|
29870cb572 | ||
|
|
32223e90e3 | ||
|
|
31198cdbd9 | ||
|
|
971f2b1ed7 | ||
|
|
832cf91e88 | ||
|
|
2e010073aa | ||
|
|
4c93b72906 | ||
|
|
7091fd5791 | ||
|
|
e5edd388a0 | ||
|
|
b887270e25 | ||
|
|
fc0d0ad8d3 | ||
|
|
ddfc8555f7 | ||
|
|
6d0739f7d9 | ||
|
|
fdd750e4b5 | ||
|
|
9a5afe371a | ||
|
|
5123561889 | ||
|
|
ba7f59355f | ||
|
|
a79f92df9e | ||
|
|
8bc88763ec | ||
|
|
4a0e982bb2 | ||
|
|
013fdddc6e | ||
|
|
a9ca465ad0 | ||
|
|
cd4d75ee7b | ||
|
|
aa2e109f5f | ||
|
|
45e3c9da70 | ||
|
|
cee026b87e | ||
|
|
1a68ce05dc | ||
|
|
bf0253df1d | ||
|
|
2e3e6a15e0 | ||
|
|
589fa6274d | ||
|
|
4cf0d39e58 | ||
|
|
a1952c71f7 | ||
|
|
48df26462d | ||
|
|
66cf62c3c4 | ||
|
|
9d6729fef3 | ||
|
|
20144ced54 | ||
|
|
2e6cbd9a4d | ||
|
|
85f49a7a1a | ||
|
|
6ba858dfbb | ||
|
|
7b423d5ff8 | ||
|
|
ae19729a72 | ||
|
|
2d45ae7441 | ||
|
|
e6cb0de539 | ||
|
|
924e50b6e9 | ||
|
|
1d32e78cf4 | ||
|
|
b5bca2d976 | ||
|
|
1acd4039b6 | ||
|
|
438ec5d1eb | ||
|
|
1a9cadf5f4 | ||
|
|
304b471281 | ||
|
|
6028683f87 | ||
|
|
0b58e791b3 | ||
|
|
1570d4f0a7 | ||
|
|
b6b6efc839 | ||
|
|
d502df353c | ||
|
|
7161326baa | ||
|
|
903d6d058c | ||
|
|
96b305389f | ||
|
|
2a74f0d8f4 | ||
|
|
6e99733320 | ||
|
|
94ab3007a4 | ||
|
|
4645c309ca | ||
|
|
852830eb54 | ||
|
|
4840217156 | ||
|
|
78fb3ce64d | ||
|
|
368ddfdd14 | ||
|
|
f9ebabe898 | ||
|
|
60eb2c4ecb | ||
|
|
8a177f6c85 | ||
|
|
5053d0eaaf | ||
|
|
6ec2b98336 | ||
|
|
bae0921ef7 | ||
|
|
7eab65df99 | ||
|
|
a41d773aaa | ||
|
|
4ef7a43939 | ||
|
|
7dc2e8e98e | ||
|
|
63636f19f1 | ||
|
|
23314188ca | ||
|
|
d429e35cdf | ||
|
|
99d85be529 | ||
|
|
2d0cadc949 | ||
|
|
821f42dd8e | ||
|
|
0d4bd61ae0 | ||
|
|
483302d09d | ||
|
|
5aa2913bce | ||
|
|
1803f73b15 | ||
|
|
9141337c7d | ||
|
|
70dbf582a6 | ||
|
|
1a6a34700f | ||
|
|
6e140b4b13 | ||
|
|
ac07af11de | ||
|
|
078318f33c | ||
|
|
a620db7025 | ||
|
|
99cbdfb004 | ||
|
|
887173c3c3 | ||
|
|
184506ae86 | ||
|
|
25c95f3bdc | ||
|
|
1bf13aa671 | ||
|
|
671d876cf3 | ||
|
|
50856459e6 | ||
|
|
8db2844e80 | ||
|
|
2d74c0162a | ||
|
|
7882418c5f | ||
|
|
01fb872095 | ||
|
|
12a2f412fc | ||
|
|
ee6bdc1588 | ||
|
|
11979f69eb | ||
|
|
65b8b220d2 | ||
|
|
c9e4153826 | ||
|
|
febf6593a6 | ||
|
|
e7790894d9 | ||
|
|
cef38030df | ||
|
|
2fb121e2ed | ||
|
|
7ad46cb118 | ||
|
|
4f58ff7933 | ||
|
|
838c3bbb8b | ||
|
|
fe49ac1a3d | ||
|
|
fbe4d57bae | ||
|
|
f4937678e4 | ||
|
|
b124ba056c | ||
|
|
0237baee92 | ||
|
|
cc481465b5 | ||
|
|
7a033e49c5 | ||
|
|
59551ebc79 | ||
|
|
ad766f2402 | ||
|
|
efabdcbe1f | ||
|
|
174a0f70df | ||
|
|
deeebf0538 | ||
|
|
63bc08e3ca | ||
|
|
d09daca867 | ||
|
|
b3edef5989 | ||
|
|
baca1f4634 | ||
|
|
8bb6dd3cee | ||
|
|
215da32660 | ||
|
|
5c6e20aeb4 | ||
|
|
1060558456 | ||
|
|
a2d028462b | ||
|
|
ec4b9d198b | ||
|
|
fd9a5ea668 | ||
|
|
e8249d885c | ||
|
|
ac8fb0e1f5 | ||
|
|
39fdabc364 | ||
|
|
144a9c2f6d | ||
|
|
caff4e6008 | ||
|
|
1322adbb16 | ||
|
|
11e5a6a2c7 | ||
|
|
a130816004 | ||
|
|
f4e0684603 | ||
|
|
2db9ab4c72 | ||
|
|
2f48282cbd | ||
|
|
1574df835e | ||
|
|
04973a1520 | ||
|
|
4e3c9bc1d1 | ||
|
|
d4c1114f9d | ||
|
|
b829590356 | ||
|
|
04f985523b | ||
|
|
32e6049be0 | ||
|
|
94274b7198 | ||
|
|
46246bb526 | ||
|
|
09ab840114 | ||
|
|
211824bb3e | ||
|
|
b7e5a38975 | ||
|
|
cbeffe1b48 | ||
|
|
a8c8fa15b9 | ||
|
|
032f99285c | ||
|
|
db5b915559 | ||
|
|
445fe2ac4a | ||
|
|
82c26f0a58 | ||
|
|
943dc53a3b | ||
|
|
61edc58362 | ||
|
|
7a35567b45 | ||
|
|
47f9bb84e8 | ||
|
|
b02156e793 | ||
|
|
6aa62fe4bf | ||
|
|
2206c14314 | ||
|
|
5167ed20f9 | ||
|
|
0bb0bf7c08 | ||
|
|
4978fb8baf | ||
|
|
c2a9cf5bbd | ||
|
|
d474b54ad1 | ||
|
|
9503f7b0b9 | ||
|
|
01d932d7e4 | ||
|
|
5c8da4436c | ||
|
|
d862966631 | ||
|
|
f6c3b92f73 | ||
|
|
363bdf5c6c | ||
|
|
04703bd3cc | ||
|
|
8c4d3ff801 | ||
|
|
fb6f7e43d8 | ||
|
|
78f4b20600 | ||
|
|
bda1ad192d | ||
|
|
8f7143882e | ||
|
|
8f888be7d5 | ||
|
|
84ad89cc95 | ||
|
|
18440d4b11 | ||
|
|
7f0b117496 | ||
|
|
94e5071947 | ||
|
|
8c32eb8354 | ||
|
|
3b956757d9 | ||
|
|
fbc4aa480b | ||
|
|
dc5fae461d | ||
|
|
a3ea521c98 | ||
|
|
27c90786ca | ||
|
|
226275c26d | ||
|
|
b082572dcb | ||
|
|
275a34b014 | ||
|
|
aef6a173ee | ||
|
|
1b271fd45e | ||
|
|
6e45e3bf1e | ||
|
|
1e6fdc9f15 | ||
|
|
9b515d74aa | ||
|
|
7c6d9cac50 | ||
|
|
4811899bc5 | ||
|
|
e284c500a4 | ||
|
|
86a4f306ee | ||
|
|
92a91ef2fd | ||
|
|
6b2486a95d | ||
|
|
0efc4eaf97 | ||
|
|
f3d18fc587 | ||
|
|
9cf9a26330 | ||
|
|
5ae28d27a0 | ||
|
|
1de31292fb | ||
|
|
fcdddf6425 | ||
|
|
0622ad57b4 | ||
|
|
a5fb10981b | ||
|
|
cc04d51dc3 | ||
|
|
4d0e9a968b | ||
|
|
99a3b01bd0 | ||
|
|
32c17d8656 | ||
|
|
527412626a | ||
|
|
0d1a00fa0f | ||
|
|
5e4ebf4381 | ||
|
|
31bd9a3ac0 | ||
|
|
636d2459bb | ||
|
|
b89b5d5710 | ||
|
|
f0e7251b61 | ||
|
|
f29e912a91 | ||
|
|
ef8bd44e98 | ||
|
|
cdf62b35ff | ||
|
|
59f3d1ca31 | ||
|
|
f1a5e78033 | ||
|
|
e3e4264208 | ||
|
|
0b6aa96672 | ||
|
|
e01548c6e9 | ||
|
|
3711280d44 | ||
|
|
78e52006c5 | ||
|
|
905fbee768 | ||
|
|
0405e1451c | ||
|
|
b418b7794a | ||
|
|
600343fff7 | ||
|
|
43367817a4 | ||
|
|
7b65ca2a71 | ||
|
|
29c737b2b9 | ||
|
|
b4f34b03d6 | ||
|
|
b44769c751 | ||
|
|
10663d7912 | ||
|
|
a2c64ad706 | ||
|
|
79afefa488 | ||
|
|
baee1c10d3 | ||
|
|
6353fa4806 | ||
|
|
f17ce2b756 | ||
|
|
c445cdaf13 | ||
|
|
ea65a2ad48 | ||
|
|
eb145870cb | ||
|
|
2ea879e29b | ||
|
|
a127f1ab59 | ||
|
|
506ea28b36 | ||
|
|
23ba9af43c | ||
|
|
1bb1f59b9e | ||
|
|
7403088c3b | ||
|
|
2b97d61deb | ||
|
|
e22c6c5dbe | ||
|
|
bdccbbc828 | ||
|
|
0b6d468b74 | ||
|
|
251c2b7d06 | ||
|
|
321500c625 | ||
|
|
3a231a62b4 | ||
|
|
7adb2b9502 | ||
|
|
c2edbe848f | ||
|
|
02d4534561 | ||
|
|
cfebfe7731 | ||
|
|
14b93e2ab9 | ||
|
|
9755e02e17 | ||
|
|
a23c11e381 | ||
|
|
d814f3e6d8 | ||
|
|
4a7e56b532 | ||
|
|
fb0f28aab9 | ||
|
|
ba8573494a | ||
|
|
14164920b5 | ||
|
|
2644bad5d4 | ||
|
|
584db03a74 | ||
|
|
f912e0abc4 | ||
|
|
6e887c8e45 | ||
|
|
374195ea30 | ||
|
|
0da7025fb0 | ||
|
|
180500181f | ||
|
|
c970922456 | ||
|
|
93af28751f | ||
|
|
4d2a8650e5 | ||
|
|
146ec7791b | ||
|
|
253faed1cf | ||
|
|
1fe2c3b426 | ||
|
|
fad856c03c | ||
|
|
2770ecad5c | ||
|
|
1684c6246d | ||
|
|
d2cdb5031d | ||
|
|
0c8658b350 | ||
|
|
fc7bd569f5 | ||
|
|
5620a7dfac | ||
|
|
1ccc13ecf7 | ||
|
|
4d004b90ca | ||
|
|
dcf0b719a5 | ||
|
|
8634ee3065 | ||
|
|
1819b01932 | ||
|
|
b39d84690c | ||
|
|
d63fe71a6d | ||
|
|
fa55ca31e1 | ||
|
|
a8d159da22 | ||
|
|
0861c03b37 | ||
|
|
7a918d24a7 | ||
|
|
7bef462257 | ||
|
|
9cfa3a558b | ||
|
|
97ae35dfde | ||
|
|
1ea14f483c | ||
|
|
ec751159c6 | ||
|
|
fa502506e5 | ||
|
|
06b16fc11e | ||
|
|
00a5c4af5a | ||
|
|
cc68f4f025 | ||
|
|
aac951bd47 | ||
|
|
34419c5f0d | ||
|
|
1595b1cc2b | ||
|
|
5366c9db33 | ||
|
|
87281b6d48 | ||
|
|
43fd9326ba | ||
|
|
26d3688e53 | ||
|
|
1ddf3fc097 | ||
|
|
73bcff9d01 | ||
|
|
c1708ea6ab | ||
|
|
447121235c | ||
|
|
1fa42d81af | ||
|
|
22a23add8d | ||
|
|
d4ce421982 | ||
|
|
322098fa54 | ||
|
|
9acb72d2ad | ||
|
|
25f6cbd471 | ||
|
|
b098c9ed89 | ||
|
|
212944a5b6 | ||
|
|
61bf221510 | ||
|
|
891057cd20 | ||
|
|
c51196a8dc | ||
|
|
f6ec0da125 | ||
|
|
b59b793a32 | ||
|
|
3a4df79a64 | ||
|
|
91f2be57b5 | ||
|
|
b612bc4f47 | ||
|
|
f454c27365 | ||
|
|
892764ec43 | ||
|
|
574a41b03f | ||
|
|
c8f8d2c0bb | ||
|
|
29839737df | ||
|
|
4d5ece3f63 | ||
|
|
93a89e5866 | ||
|
|
676e8d1632 | ||
|
|
5633ec4334 | ||
|
|
160bf9d563 |
@@ -1,12 +1,12 @@
|
||||
ARG LLVM_VERSION="18"
|
||||
ARG REPORTED_LLVM_VERSION="18.1.8"
|
||||
ARG LLVM_VERSION="19"
|
||||
ARG REPORTED_LLVM_VERSION="19.1.7"
|
||||
ARG OLD_BUN_VERSION="1.1.38"
|
||||
ARG DEFAULT_CFLAGS="-mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -ffunction-sections -fdata-sections -faddrsig -fno-unwind-tables -fno-asynchronous-unwind-tables"
|
||||
ARG DEFAULT_CXXFLAGS="-flto=full -fwhole-program-vtables -fforce-emit-vtables"
|
||||
ARG BUILDKITE_AGENT_TAGS="queue=linux,os=linux,arch=${TARGETARCH}"
|
||||
|
||||
FROM --platform=$BUILDPLATFORM ubuntu:20.04 as base-arm64
|
||||
FROM --platform=$BUILDPLATFORM ubuntu:18.04 as base-amd64
|
||||
FROM --platform=$BUILDPLATFORM ubuntu:20.04 as base-amd64
|
||||
FROM base-$TARGETARCH as base
|
||||
|
||||
ARG LLVM_VERSION
|
||||
|
||||
@@ -107,9 +107,9 @@ const buildPlatforms = [
|
||||
{ os: "linux", arch: "aarch64", distro: "amazonlinux", release: "2023", features: ["docker"] },
|
||||
{ os: "linux", arch: "x64", distro: "amazonlinux", release: "2023", features: ["docker"] },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "amazonlinux", release: "2023", features: ["docker"] },
|
||||
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.20" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.20" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.20" },
|
||||
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.21" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.21" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.21" },
|
||||
{ os: "windows", arch: "x64", release: "2019" },
|
||||
{ os: "windows", arch: "x64", baseline: true, release: "2019" },
|
||||
];
|
||||
@@ -134,9 +134,9 @@ const testPlatforms = [
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "24.04", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "22.04", tier: "previous" },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "20.04", tier: "oldest" },
|
||||
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.20", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.20", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.20", tier: "latest" },
|
||||
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.21", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.21", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.21", tier: "latest" },
|
||||
{ os: "windows", arch: "x64", release: "2019", tier: "oldest" },
|
||||
{ os: "windows", arch: "x64", release: "2019", baseline: true, tier: "oldest" },
|
||||
];
|
||||
@@ -323,6 +323,7 @@ function getCppAgent(platform, options) {
|
||||
*/
|
||||
function getZigAgent(platform, options) {
|
||||
const { arch } = platform;
|
||||
|
||||
return {
|
||||
queue: "build-zig",
|
||||
};
|
||||
@@ -383,14 +384,22 @@ function getBuildEnv(target, options) {
|
||||
const { canary } = options;
|
||||
const revision = typeof canary === "number" ? canary : 1;
|
||||
|
||||
const isMusl = abi === "musl";
|
||||
|
||||
let CMAKE_BUILD_TYPE = release ? "Release" : profile === "debug" ? "Debug" : "RelWithDebInfo";
|
||||
if (isMusl && release) {
|
||||
CMAKE_BUILD_TYPE = "MinSizeRel";
|
||||
}
|
||||
|
||||
return {
|
||||
CMAKE_BUILD_TYPE: release ? "Release" : profile === "debug" ? "Debug" : "RelWithDebInfo",
|
||||
CMAKE_BUILD_TYPE,
|
||||
ENABLE_BASELINE: baseline ? "ON" : "OFF",
|
||||
ENABLE_CANARY: revision > 0 ? "ON" : "OFF",
|
||||
CANARY_REVISION: revision,
|
||||
ENABLE_ASSERTIONS: release ? "OFF" : "ON",
|
||||
ENABLE_LOGS: release ? "OFF" : "ON",
|
||||
ABI: abi === "musl" ? "musl" : undefined,
|
||||
ABI: isMusl ? "musl" : undefined,
|
||||
CMAKE_TLS_VERIFY: "0",
|
||||
};
|
||||
}
|
||||
|
||||
@@ -462,7 +471,7 @@ function getBuildZigStep(platform, options) {
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
env: getBuildEnv(platform, options),
|
||||
command: `bun run build:ci --target bun-zig --toolchain ${toolchain}`,
|
||||
timeout_in_minutes: 25,
|
||||
timeout_in_minutes: 35,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -619,6 +628,21 @@ function getReleaseStep(buildPlatforms, options) {
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {Step}
|
||||
*/
|
||||
function getBenchmarkStep() {
|
||||
return {
|
||||
key: "benchmark",
|
||||
label: "📊",
|
||||
agents: {
|
||||
queue: "build-zig",
|
||||
},
|
||||
command: "bun .buildkite/scripts/upload-benchmark.ts",
|
||||
depends_on: [`linux-x64-build-bun`],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {Object} Pipeline
|
||||
* @property {Step[]} [steps]
|
||||
@@ -1097,6 +1121,8 @@ async function getPipeline(options = {}) {
|
||||
steps.push(getReleaseStep(buildPlatforms, options));
|
||||
}
|
||||
|
||||
steps.push(getBenchmarkStep());
|
||||
|
||||
/** @type {Map<string, GroupStep>} */
|
||||
const stepsByGroup = new Map();
|
||||
|
||||
|
||||
7
.buildkite/scripts/upload-benchmark.ts
Normal file
7
.buildkite/scripts/upload-benchmark.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import { getCommit, getSecret } from "../../scripts/utils.mjs";
|
||||
|
||||
console.log("Submitting...");
|
||||
const response = await fetch(getSecret("BENCHMARK_URL") + "?tag=_&commit=" + getCommit() + "&artifact_url=_", {
|
||||
method: "POST",
|
||||
});
|
||||
console.log("Got status " + response.status);
|
||||
@@ -158,25 +158,36 @@ function upload_s3_file() {
|
||||
run_command aws --endpoint-url="$AWS_ENDPOINT" s3 cp "$file" "s3://$AWS_BUCKET/$folder/$file"
|
||||
}
|
||||
|
||||
function send_bench_webhook() {
|
||||
if [ -z "$BENCHMARK_URL" ]; then
|
||||
echo "error: \$BENCHMARK_URL is not set"
|
||||
# exit 1 # TODO: this isn't live yet
|
||||
function send_discord_announcement() {
|
||||
local value=$(buildkite-agent secret get "BUN_ANNOUNCE_CANARY_WEBHOOK_URL")
|
||||
if [ -z "$value" ]; then
|
||||
echo "warn: BUN_ANNOUNCE_CANARY_WEBHOOK_URL not set, skipping Discord announcement"
|
||||
return
|
||||
fi
|
||||
|
||||
local tag="$1"
|
||||
local version="$1"
|
||||
local commit="$BUILDKITE_COMMIT"
|
||||
local artifact_path="${commit}"
|
||||
local short_sha="${commit:0:7}"
|
||||
local commit_url="https://github.com/oven-sh/bun/commit/$commit"
|
||||
|
||||
if [ "$tag" == "canary" ]; then
|
||||
artifact_path="${commit}-canary"
|
||||
if [ "$version" == "canary" ]; then
|
||||
local json_payload=$(cat <<EOF
|
||||
{
|
||||
"embeds": [{
|
||||
"title": "New Bun Canary now available",
|
||||
"description": "A new canary build of Bun has been automatically uploaded ([${short_sha}](${commit_url})). To upgrade, run:\n\n\`\`\`shell\nbun upgrade --canary\n\`\`\`\nCommit: \`${commit}\`",
|
||||
"color": 16023551,
|
||||
"timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)"
|
||||
}]
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
curl -H "Content-Type: application/json" \
|
||||
-d "$json_payload" \
|
||||
-sf \
|
||||
"$value" >/dev/null
|
||||
fi
|
||||
|
||||
local artifact_url="https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/$artifact_path/bun-linux-x64.zip"
|
||||
local webhook_url="$BENCHMARK_URL?tag=$tag&commit=$commit&artifact_url=$artifact_url"
|
||||
|
||||
curl -X POST "$webhook_url"
|
||||
}
|
||||
|
||||
function create_release() {
|
||||
@@ -190,6 +201,8 @@ function create_release() {
|
||||
local artifacts=(
|
||||
bun-darwin-aarch64.zip
|
||||
bun-darwin-aarch64-profile.zip
|
||||
bun-darwin-x64.zip
|
||||
bun-darwin-x64-profile.zip
|
||||
bun-linux-aarch64.zip
|
||||
bun-linux-aarch64-profile.zip
|
||||
bun-linux-x64.zip
|
||||
@@ -227,7 +240,7 @@ function create_release() {
|
||||
|
||||
update_github_release "$tag"
|
||||
create_sentry_release "$tag"
|
||||
send_bench_webhook "$tag"
|
||||
send_discord_announcement "$tag"
|
||||
}
|
||||
|
||||
function assert_canary() {
|
||||
|
||||
3
.clangd
3
.clangd
@@ -6,6 +6,3 @@ CompileFlags:
|
||||
|
||||
Diagnostics:
|
||||
UnusedIncludes: None
|
||||
|
||||
HeaderInsertion:
|
||||
IncludeBlocks: Preserve # Do not auto-include headers.
|
||||
|
||||
27
.cursor/rules/building-bun.mdc
Normal file
27
.cursor/rules/building-bun.mdc
Normal file
@@ -0,0 +1,27 @@
|
||||
---
|
||||
description: How to build Bun
|
||||
globs:
|
||||
---
|
||||
# How to build Bun
|
||||
|
||||
## CMake
|
||||
|
||||
Bun is built using CMake, which you can find in `CMakeLists.txt` and in the `cmake/` directory.
|
||||
|
||||
* `CMakeLists.txt`
|
||||
* `cmake/`
|
||||
* `Globals.cmake` - macros and functions used by all the other files
|
||||
* `Options.cmake` - build options for configuring the build (e.g. debug/release mode)
|
||||
* `CompilerFlags.cmake` - compiler and linker flags used by all the targets
|
||||
* `tools/` - setup scripts for various build tools (e.g. llvm, zig, webkit, rust, etc.)
|
||||
* `targets/` - targets for bun and its dependencies (e.g. brotli, boringssl, libuv, etc.)
|
||||
|
||||
## How to
|
||||
|
||||
There are `package.json` scripts that make it easy to build Bun without calling CMake directly, for example:
|
||||
|
||||
```sh
|
||||
bun run build # builds a debug build: `build/debug/bun-debug`
|
||||
bun run build:release # builds a release build: `build/release/bun`
|
||||
bun run build:assert # builds a release build with debug assertions: `build/assert/bun`
|
||||
```
|
||||
139
.cursor/rules/dev-server-tests.mdc
Normal file
139
.cursor/rules/dev-server-tests.mdc
Normal file
@@ -0,0 +1,139 @@
|
||||
---
|
||||
description: Writing HMR/Dev Server tests
|
||||
globs: test/bake/*
|
||||
---
|
||||
|
||||
# Writing HMR/Dev Server tests
|
||||
|
||||
Dev server tests validate that hot-reloading is robust, correct, and reliable. Remember to write thorough, yet concise tests.
|
||||
|
||||
## File Structure
|
||||
|
||||
- `test/bake/bake-harness.ts` - shared utilities and test harness
|
||||
- primary test functions `devTest` / `prodTest` / `devAndProductionTest`
|
||||
- class `Dev` (controls subprocess for dev server)
|
||||
- class `Client` (controls a happy-dom subprocess for having the page open)
|
||||
- more helpers
|
||||
- `test/bake/client-fixture.mjs` - subprocess for what `Client` controls. it loads a page and uses IPC to query parts of the page, run javascript, and much more.
|
||||
- `test/bake/dev/*.test.ts` - these call `devTest` to test dev server and hot reloading
|
||||
- `test/bake/dev-and-prod.ts` - these use `devAndProductionTest` to run the same test on dev and production mode. these tests cannot really test hot reloading for obvious reasons.
|
||||
|
||||
## Categories
|
||||
|
||||
bundle.test.ts - Bundle tests are tests concerning bundling bugs that only occur in DevServer.
|
||||
css.test.ts - CSS tests concern bundling bugs with CSS files
|
||||
plugins.test.ts - Plugin tests concern plugins in development mode.
|
||||
ecosystem.test.ts - These tests involve ensuring certain libraries are correct. It is preferred to test more concrete bugs than testing entire packages.
|
||||
esm.test.ts - ESM tests are about various esm features in development mode.
|
||||
html.test.ts - HTML tests are tests relating to HTML files themselves.
|
||||
react-spa.test.ts - Tests relating to React, our react-refresh transform, and basic server component transforms.
|
||||
sourcemap.test.ts - Tests verifying source-maps are correct.
|
||||
|
||||
## `devTest` Basics
|
||||
|
||||
A test takes in two primary inputs: `files` and `async test(dev) {`
|
||||
|
||||
```ts
|
||||
import { devTest, emptyHtmlFile } from "../bake-harness";
|
||||
|
||||
devTest("html file is watched", {
|
||||
files: {
|
||||
"index.html": emptyHtmlFile({
|
||||
scripts: ["/script.ts"],
|
||||
body: "<h1>Hello</h1>",
|
||||
}),
|
||||
"script.ts": `
|
||||
console.log("hello");
|
||||
`,
|
||||
},
|
||||
async test(dev) {
|
||||
await dev.fetch("/").expect.toInclude("<h1>Hello</h1>");
|
||||
await dev.fetch("/").expect.toInclude("<h1>Hello</h1>");
|
||||
await dev.patch("index.html", {
|
||||
find: "Hello",
|
||||
replace: "World",
|
||||
});
|
||||
await dev.fetch("/").expect.toInclude("<h1>World</h1>");
|
||||
|
||||
// Works
|
||||
await using c = await dev.client("/");
|
||||
await c.expectMessage("hello");
|
||||
|
||||
// Editing HTML reloads
|
||||
await c.expectReload(async () => {
|
||||
await dev.patch("index.html", {
|
||||
find: "World",
|
||||
replace: "Hello",
|
||||
});
|
||||
await dev.fetch("/").expect.toInclude("<h1>Hello</h1>");
|
||||
});
|
||||
await c.expectMessage("hello");
|
||||
|
||||
await c.expectReload(async () => {
|
||||
await dev.patch("index.html", {
|
||||
find: "Hello",
|
||||
replace: "Bar",
|
||||
});
|
||||
await dev.fetch("/").expect.toInclude("<h1>Bar</h1>");
|
||||
});
|
||||
await c.expectMessage("hello");
|
||||
|
||||
await c.expectReload(async () => {
|
||||
await dev.patch("script.ts", {
|
||||
find: "hello",
|
||||
replace: "world",
|
||||
});
|
||||
});
|
||||
await c.expectMessage("world");
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
`files` holds the initial state, and the callback runs with the server running. `dev.fetch()` runs HTTP requests, while `dev.client()` opens a browser instance to the code.
|
||||
|
||||
Functions `dev.write` and `dev.patch` and `dev.delete` mutate the filesystem. Do not use `node:fs` APIs, as the dev server ones are hooked to wait for hot-reload, and all connected clients to recieve changes.
|
||||
|
||||
When a change performs a hard-reload, that must be explicitly annotated with `expectReload`. This tells `client-fixture.mjs` that the test is meant to reload the page once; All other hard reloads automatically fail the test.
|
||||
|
||||
Client's have `console.log` instrumented, so that any unasserted logs fail the test. This makes it more obvious when an extra reload or re-evaluation. Messages are awaited via `c.expectMessage("log")` or with multiple arguments if there are multiple logs.
|
||||
|
||||
## Testing for bundling errors
|
||||
|
||||
By default, a client opening a page to an error will fail the test. This makes testing errors explicit.
|
||||
|
||||
```ts
|
||||
devTest("import then create", {
|
||||
files: {
|
||||
"index.html": `
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head></head>
|
||||
<body>
|
||||
<script type="module" src="/script.ts"></script>
|
||||
</body>
|
||||
</html>
|
||||
`,
|
||||
"script.ts": `
|
||||
import data from "./data";
|
||||
console.log(data);
|
||||
`,
|
||||
},
|
||||
async test(dev) {
|
||||
const c = await dev.client("/", {
|
||||
errors: ['script.ts:1:18: error: Could not resolve: "./data"'],
|
||||
});
|
||||
await c.expectReload(async () => {
|
||||
await dev.write("data.ts", "export default 'data';");
|
||||
});
|
||||
await c.expectMessage("data");
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
Many functions take an options value to allow specifying it will produce errors. For example, this delete is going to cause a resolution failure.
|
||||
|
||||
```ts
|
||||
await dev.delete("other.ts", {
|
||||
errors: ['index.ts:1:16: error: Could not resolve: "./other"'],
|
||||
});
|
||||
```
|
||||
408
.cursor/rules/javascriptcore-class.mdc
Normal file
408
.cursor/rules/javascriptcore-class.mdc
Normal file
@@ -0,0 +1,408 @@
|
||||
---
|
||||
description: JavaScript class implemented in C++
|
||||
globs: *.cpp
|
||||
---
|
||||
|
||||
# Implementing JavaScript classes in C++
|
||||
|
||||
If there is a publicly accessible Constructor and Prototype, then there are 3 classes:
|
||||
|
||||
- IF there are C++ class members we need a destructor, so `class Foo : public JSC::DestructibleObject`, if no C++ class fields (only JS properties) then we don't need a class at all usually. We can instead use JSC::constructEmptyObject(vm, structure) and `putDirectOffset` like in [NodeFSBinding.cpp](mdc:src/bun.js/bindings/NodeFSBinding.cpp).
|
||||
- class FooPrototype : public JSC::JSNonFinalObject
|
||||
- class FooConstructor : public JSC::InternalFunction
|
||||
|
||||
If there is no publicly accessible Constructor, just the Prototype and the class is necessary. In some cases, we can avoid the prototype entirely (but that's rare).
|
||||
|
||||
If there are C++ fields on the Foo class, the Foo class will need an iso subspace added to [DOMClientIsoSubspaces.h](mdc:src/bun.js/bindings/webcore/DOMClientIsoSubspaces.h) and [DOMIsoSubspaces.h](mdc:src/bun.js/bindings/webcore/DOMIsoSubspaces.h). Prototype and Constructor do not need subspaces.
|
||||
|
||||
Usually you'll need to #include "root.h" at the top of C++ files or you'll get lint errors.
|
||||
|
||||
Generally, defining the subspace looks like this:
|
||||
```c++
|
||||
|
||||
class Foo : public JSC::DestructibleObject {
|
||||
|
||||
// ...
|
||||
|
||||
template<typename MyClassT, JSC::SubspaceAccess mode>
|
||||
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
|
||||
{
|
||||
if constexpr (mode == JSC::SubspaceAccess::Concurrently)
|
||||
return nullptr;
|
||||
return WebCore::subspaceForImpl<MyClassT, WebCore::UseCustomHeapCellType::No>(
|
||||
vm,
|
||||
[](auto& spaces) { return spaces.m_clientSubspaceFor${MyClassT}.get(); },
|
||||
[](auto& spaces, auto&& space) { spaces.m_clientSubspaceFor${MyClassT} = std::forward<decltype(space)>(space); },
|
||||
[](auto& spaces) { return spaces.m_subspaceFo${MyClassT}.get(); },
|
||||
[](auto& spaces, auto&& space) { spaces.m_subspaceFor${MyClassT} = std::forward<decltype(space)>(space); });
|
||||
}
|
||||
|
||||
|
||||
```
|
||||
|
||||
It's better to put it in the .cpp file instead of the .h file, when possible.
|
||||
|
||||
## Defining properties
|
||||
|
||||
Define properties on the prototype. Use a const HashTableValues like this:
|
||||
```C++
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckEmail);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckHost);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckIP);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckIssued);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckPrivateKey);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncToJSON);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncToLegacyObject);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncToString);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncVerify);
|
||||
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_ca);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_fingerprint);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_fingerprint256);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_fingerprint512);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_subject);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_subjectAltName);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_infoAccess);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_keyUsage);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_issuer);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_issuerCertificate);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_publicKey);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_raw);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_serialNumber);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validFrom);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validTo);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validFromDate);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validToDate);
|
||||
|
||||
static const HashTableValue JSX509CertificatePrototypeTableValues[] = {
|
||||
{ "ca"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_ca, 0 } },
|
||||
{ "checkEmail"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckEmail, 2 } },
|
||||
{ "checkHost"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckHost, 2 } },
|
||||
{ "checkIP"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckIP, 1 } },
|
||||
{ "checkIssued"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckIssued, 1 } },
|
||||
{ "checkPrivateKey"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckPrivateKey, 1 } },
|
||||
{ "fingerprint"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_fingerprint, 0 } },
|
||||
{ "fingerprint256"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_fingerprint256, 0 } },
|
||||
{ "fingerprint512"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_fingerprint512, 0 } },
|
||||
{ "infoAccess"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_infoAccess, 0 } },
|
||||
{ "issuer"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_issuer, 0 } },
|
||||
{ "issuerCertificate"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_issuerCertificate, 0 } },
|
||||
{ "keyUsage"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_keyUsage, 0 } },
|
||||
{ "publicKey"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_publicKey, 0 } },
|
||||
{ "raw"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_raw, 0 } },
|
||||
{ "serialNumber"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_serialNumber, 0 } },
|
||||
{ "subject"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_subject, 0 } },
|
||||
{ "subjectAltName"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_subjectAltName, 0 } },
|
||||
{ "toJSON"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncToJSON, 0 } },
|
||||
{ "toLegacyObject"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncToLegacyObject, 0 } },
|
||||
{ "toString"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncToString, 0 } },
|
||||
{ "validFrom"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validFrom, 0 } },
|
||||
{ "validFromDate"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessorOrValue), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validFromDate, 0 } },
|
||||
{ "validTo"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validTo, 0 } },
|
||||
{ "validToDate"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessorOrValue), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validToDate, 0 } },
|
||||
{ "verify"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncVerify, 1 } },
|
||||
};
|
||||
```
|
||||
|
||||
### Creating a prototype class
|
||||
|
||||
Follow a pattern like this:
|
||||
|
||||
```c++
|
||||
class JSX509CertificatePrototype final : public JSC::JSNonFinalObject {
|
||||
public:
|
||||
using Base = JSC::JSNonFinalObject;
|
||||
static constexpr unsigned StructureFlags = Base::StructureFlags;
|
||||
|
||||
static JSX509CertificatePrototype* create(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::Structure* structure)
|
||||
{
|
||||
JSX509CertificatePrototype* prototype = new (NotNull, allocateCell<JSX509CertificatePrototype>(vm)) JSX509CertificatePrototype(vm, structure);
|
||||
prototype->finishCreation(vm);
|
||||
return prototype;
|
||||
}
|
||||
|
||||
template<typename, JSC::SubspaceAccess>
|
||||
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
|
||||
{
|
||||
return &vm.plainObjectSpace();
|
||||
}
|
||||
|
||||
DECLARE_INFO;
|
||||
|
||||
static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype)
|
||||
{
|
||||
auto* structure = JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::ObjectType, StructureFlags), info());
|
||||
structure->setMayBePrototype(true);
|
||||
return structure;
|
||||
}
|
||||
|
||||
private:
|
||||
JSX509CertificatePrototype(JSC::VM& vm, JSC::Structure* structure)
|
||||
: Base(vm, structure)
|
||||
{
|
||||
}
|
||||
|
||||
void finishCreation(JSC::VM& vm);
|
||||
};
|
||||
|
||||
const ClassInfo JSX509CertificatePrototype::s_info = { "X509Certificate"_s, &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(JSX509CertificatePrototype) };
|
||||
|
||||
void JSX509CertificatePrototype::finishCreation(VM& vm)
|
||||
{
|
||||
Base::finishCreation(vm);
|
||||
reifyStaticProperties(vm, JSX509Certificate::info(), JSX509CertificatePrototypeTableValues, *this);
|
||||
JSC_TO_STRING_TAG_WITHOUT_TRANSITION();
|
||||
}
|
||||
|
||||
} // namespace Bun
|
||||
```
|
||||
|
||||
### Getter definition:
|
||||
```C++
|
||||
|
||||
JSC_DEFINE_CUSTOM_GETTER(jsX509CertificateGetter_ca, (JSGlobalObject * globalObject, EncodedJSValue thisValue, PropertyName))
|
||||
{
|
||||
VM& vm = globalObject->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
|
||||
JSX509Certificate* thisObject = jsDynamicCast<JSX509Certificate*>(JSValue::decode(thisValue));
|
||||
if (UNLIKELY(!thisObject)) {
|
||||
Bun::throwThisTypeError(*globalObject, scope, "JSX509Certificate"_s, "ca"_s);
|
||||
return {};
|
||||
}
|
||||
|
||||
return JSValue::encode(jsBoolean(thisObject->view().isCA()));
|
||||
}
|
||||
```
|
||||
|
||||
### Setter definition
|
||||
|
||||
```C++
|
||||
JSC_DEFINE_CUSTOM_SETTER(jsImportMetaObjectSetter_require, (JSGlobalObject * jsGlobalObject, JSC::EncodedJSValue thisValue, JSC::EncodedJSValue encodedValue, PropertyName propertyName))
|
||||
{
|
||||
ImportMetaObject* thisObject = jsDynamicCast<ImportMetaObject*>(JSValue::decode(thisValue));
|
||||
if (UNLIKELY(!thisObject))
|
||||
return false;
|
||||
|
||||
JSValue value = JSValue::decode(encodedValue);
|
||||
if (!value.isCell()) {
|
||||
// TODO:
|
||||
return true;
|
||||
}
|
||||
|
||||
thisObject->requireProperty.set(thisObject->vm(), thisObject, value.asCell());
|
||||
return true;
|
||||
}
|
||||
```
|
||||
|
||||
### Function definition
|
||||
|
||||
```C++
|
||||
JSC_DEFINE_HOST_FUNCTION(jsX509CertificateProtoFuncToJSON, (JSGlobalObject * globalObject, CallFrame* callFrame))
|
||||
{
|
||||
VM& vm = globalObject->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
auto *thisObject = jsDynamicCast<MyClassT*>(callFrame->thisValue());
|
||||
if (UNLIKELY(!thisObject)) {
|
||||
Bun::throwThisTypeError(*globalObject, scope, "MyClass"_s, "myFunctionName"_s);
|
||||
return {};
|
||||
}
|
||||
|
||||
return JSValue::encode(functionThatReturnsJSValue(vm, globalObject, thisObject));
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### Constructor definition
|
||||
|
||||
```C++
|
||||
|
||||
JSC_DECLARE_HOST_FUNCTION(callStats);
|
||||
JSC_DECLARE_HOST_FUNCTION(constructStats);
|
||||
|
||||
class JSStatsConstructor final : public JSC::InternalFunction {
|
||||
public:
|
||||
using Base = JSC::InternalFunction;
|
||||
static constexpr unsigned StructureFlags = Base::StructureFlags;
|
||||
|
||||
static JSStatsConstructor* create(JSC::VM& vm, JSC::Structure* structure, JSC::JSObject* prototype)
|
||||
{
|
||||
JSStatsConstructor* constructor = new (NotNull, JSC::allocateCell<JSStatsConstructor>(vm)) JSStatsConstructor(vm, structure);
|
||||
constructor->finishCreation(vm, prototype);
|
||||
return constructor;
|
||||
}
|
||||
|
||||
DECLARE_INFO;
|
||||
|
||||
template<typename CellType, JSC::SubspaceAccess>
|
||||
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
|
||||
{
|
||||
return &vm.internalFunctionSpace();
|
||||
}
|
||||
|
||||
static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype)
|
||||
{
|
||||
return JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::InternalFunctionType, StructureFlags), info());
|
||||
}
|
||||
|
||||
private:
|
||||
JSStatsConstructor(JSC::VM& vm, JSC::Structure* structure)
|
||||
: Base(vm, structure, callStats, constructStats)
|
||||
{
|
||||
}
|
||||
|
||||
void finishCreation(JSC::VM& vm, JSC::JSObject* prototype)
|
||||
{
|
||||
Base::finishCreation(vm, 0, "Stats"_s);
|
||||
putDirectWithoutTransition(vm, vm.propertyNames->prototype, prototype, JSC::PropertyAttribute::DontEnum | JSC::PropertyAttribute::DontDelete | JSC::PropertyAttribute::ReadOnly);
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
|
||||
### Structure caching
|
||||
|
||||
If there's a class, prototype, and constructor:
|
||||
|
||||
1. Add the `JSC::LazyClassStructure` to [ZigGlobalObject.h](mdc:src/bun.js/bindings/ZigGlobalObject.h)
|
||||
2. Initialize the class structure in [ZigGlobalObject.cpp](mdc:src/bun.js/bindings/ZigGlobalObject.cpp) in `void GlobalObject::finishCreation(VM& vm)`
|
||||
3. Visit the class structure in visitChildren in [ZigGlobalObject.cpp](mdc:src/bun.js/bindings/ZigGlobalObject.cpp) in `void GlobalObject::visitChildrenImpl`
|
||||
|
||||
```c++#ZigGlobalObject.cpp
|
||||
void GlobalObject::finishCreation(VM& vm) {
|
||||
// ...
|
||||
m_JSStatsBigIntClassStructure.initLater(
|
||||
[](LazyClassStructure::Initializer& init) {
|
||||
// Call the function to initialize our class structure.
|
||||
Bun::initJSBigIntStatsClassStructure(init);
|
||||
});
|
||||
```
|
||||
|
||||
Then, implement the function that creates the structure:
|
||||
```c++
|
||||
void setupX509CertificateClassStructure(LazyClassStructure::Initializer& init)
|
||||
{
|
||||
auto* prototypeStructure = JSX509CertificatePrototype::createStructure(init.vm, init.global, init.global->objectPrototype());
|
||||
auto* prototype = JSX509CertificatePrototype::create(init.vm, init.global, prototypeStructure);
|
||||
|
||||
auto* constructorStructure = JSX509CertificateConstructor::createStructure(init.vm, init.global, init.global->functionPrototype());
|
||||
|
||||
auto* constructor = JSX509CertificateConstructor::create(init.vm, init.global, constructorStructure, prototype);
|
||||
|
||||
auto* structure = JSX509Certificate::createStructure(init.vm, init.global, prototype);
|
||||
init.setPrototype(prototype);
|
||||
init.setStructure(structure);
|
||||
init.setConstructor(constructor);
|
||||
}
|
||||
```
|
||||
|
||||
If there's only a class, use `JSC::LazyProperty<JSGlobalObject, Structure>` instead of `JSC::LazyClassStructure`:
|
||||
|
||||
1. Add the `JSC::LazyProperty<JSGlobalObject, Structure>` to @ZigGlobalObject.h
|
||||
2. Initialize the class structure in @ZigGlobalObject.cpp in `void GlobalObject::finishCreation(VM& vm)`
|
||||
3. Visit the lazy property in visitChildren in @ZigGlobalObject.cpp in `void GlobalObject::visitChildrenImpl`
|
||||
void GlobalObject::finishCreation(VM& vm) {
|
||||
// ...
|
||||
this.m_myLazyProperty.initLater([](const JSC::LazyProperty<JSC::JSGlobalObject, JSC::Structure>::Initializer& init) {
|
||||
init.set(Bun::initMyStructure(init.vm, reinterpret_cast<Zig::GlobalObject*>(init.owner)));
|
||||
});
|
||||
```
|
||||
|
||||
Then, implement the function that creates the structure:
|
||||
```c++
|
||||
Structure* setupX509CertificateStructure(JSC::VM &vm, Zig::GlobalObject* globalObject)
|
||||
{
|
||||
// If there is a prototype:
|
||||
auto* prototypeStructure = JSX509CertificatePrototype::createStructure(init.vm, init.global, init.global->objectPrototype());
|
||||
auto* prototype = JSX509CertificatePrototype::create(init.vm, init.global, prototypeStructure);
|
||||
|
||||
// If there is no prototype or it only has
|
||||
|
||||
auto* structure = JSX509Certificate::createStructure(init.vm, init.global, prototype);
|
||||
init.setPrototype(prototype);
|
||||
init.setStructure(structure);
|
||||
init.setConstructor(constructor);
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
Then, use the structure by calling `globalObject.m_myStructureName.get(globalObject)`
|
||||
|
||||
```C++
|
||||
JSC_DEFINE_HOST_FUNCTION(x509CertificateConstructorConstruct, (JSGlobalObject * globalObject, CallFrame* callFrame))
|
||||
{
|
||||
VM& vm = globalObject->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
|
||||
if (!callFrame->argumentCount()) {
|
||||
Bun::throwError(globalObject, scope, ErrorCode::ERR_MISSING_ARGS, "X509Certificate constructor requires at least one argument"_s);
|
||||
return {};
|
||||
}
|
||||
|
||||
JSValue arg = callFrame->uncheckedArgument(0);
|
||||
if (!arg.isCell()) {
|
||||
Bun::throwError(globalObject, scope, ErrorCode::ERR_INVALID_ARG_TYPE, "X509Certificate constructor argument must be a Buffer, TypedArray, or string"_s);
|
||||
return {};
|
||||
}
|
||||
|
||||
auto* zigGlobalObject = defaultGlobalObject(globalObject);
|
||||
Structure* structure = zigGlobalObject->m_JSX509CertificateClassStructure.get(zigGlobalObject);
|
||||
JSValue newTarget = callFrame->newTarget();
|
||||
if (UNLIKELY(zigGlobalObject->m_JSX509CertificateClassStructure.constructor(zigGlobalObject) != newTarget)) {
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
if (!newTarget) {
|
||||
throwTypeError(globalObject, scope, "Class constructor X509Certificate cannot be invoked without 'new'"_s);
|
||||
return {};
|
||||
}
|
||||
|
||||
auto* functionGlobalObject = defaultGlobalObject(getFunctionRealm(globalObject, newTarget.getObject()));
|
||||
RETURN_IF_EXCEPTION(scope, {});
|
||||
structure = InternalFunction::createSubclassStructure(
|
||||
globalObject, newTarget.getObject(), functionGlobalObject->NodeVMScriptStructure());
|
||||
scope.release();
|
||||
}
|
||||
|
||||
return JSValue::encode(createX509Certificate(vm, globalObject, structure, arg));
|
||||
}
|
||||
```
|
||||
|
||||
### Expose to Zig
|
||||
|
||||
To expose the constructor to zig:
|
||||
|
||||
```c++
|
||||
extern "C" JSC::EncodedJSValue Bun__JSBigIntStatsObjectConstructor(Zig::GlobalObject* globalobject)
|
||||
{
|
||||
return JSValue::encode(globalobject->m_JSStatsBigIntClassStructure.constructor(globalobject));
|
||||
}
|
||||
```
|
||||
|
||||
Zig:
|
||||
```zig
|
||||
extern "c" fn Bun__JSBigIntStatsObjectConstructor(*JSC.JSGlobalObject) JSC.JSValue;
|
||||
pub const getBigIntStatsConstructor = Bun__JSBigIntStatsObjectConstructor;
|
||||
```
|
||||
|
||||
To create an object (instance) of a JS class defined in C++ from Zig, follow the __toJS convention like this:
|
||||
```c++
|
||||
// X509* is whatever we need to create the object
|
||||
extern "C" EncodedJSValue Bun__X509__toJS(Zig::GlobalObject* globalObject, X509* cert)
|
||||
{
|
||||
// ... implementation details
|
||||
auto* structure = globalObject->m_JSX509CertificateClassStructure.get(globalObject);
|
||||
return JSValue::encode(JSX509Certificate::create(globalObject->vm(), structure, globalObject, WTFMove(cert)));
|
||||
}
|
||||
```
|
||||
|
||||
And from Zig:
|
||||
```zig
|
||||
const X509 = opaque {
|
||||
// ... class
|
||||
|
||||
extern fn Bun__X509__toJS(*JSC.JSGlobalObject, *X509) JSC.JSValue;
|
||||
|
||||
pub fn toJS(this: *X509, globalObject: *JSC.JSGlobalObject) JSC.JSValue {
|
||||
return Bun__X509__toJS(globalObject, this);
|
||||
}
|
||||
};
|
||||
```
|
||||
91
.cursor/rules/writing-tests.mdc
Normal file
91
.cursor/rules/writing-tests.mdc
Normal file
@@ -0,0 +1,91 @@
|
||||
---
|
||||
description: Writing tests for Bun
|
||||
globs:
|
||||
---
|
||||
# Writing tests for Bun
|
||||
|
||||
## Where tests are found
|
||||
|
||||
You'll find all of Bun's tests in the `test/` directory.
|
||||
|
||||
* `test/`
|
||||
* `cli/` - CLI command tests, like `bun install` or `bun init`
|
||||
* `js/` - JavaScript & TypeScript tests
|
||||
* `bun/` - `Bun` APIs tests, seperated by category, for example: `glob/` for `Bun.Glob` tests
|
||||
* `node/` - Node.js module tests, seperated by module, for example: `assert/` for `node:assert` tests
|
||||
* `test/` - Vendored Node.js tests, taken from the Node.js repository (does not conform to Bun's test style)
|
||||
* `web/` - Web API tests, seperated by category, for example: `fetch/` for `Request` and `Response` tests
|
||||
* `third_party/` - npm package tests, to validate that basic usage works in Bun
|
||||
* `napi/` - N-API tests
|
||||
* `v8/` - V8 C++ API tests
|
||||
* `bundler/` - Bundler, transpiler, CSS, and `bun build` tests
|
||||
* `regression/issue/[number]` - Regression tests, always make one when fixing a particular issue
|
||||
|
||||
## How tests are written
|
||||
|
||||
Bun's tests are written as JavaScript and TypeScript files with the Jest-style APIs, like `test`, `describe`, and `expect`. They are tested using Bun's own test runner, `bun test`.
|
||||
|
||||
```js
|
||||
import { describe, test, expect } from "bun:test";
|
||||
import assert, { AssertionError } from "assert";
|
||||
|
||||
describe("assert(expr)", () => {
|
||||
test.each([true, 1, "foo"])(`assert(%p) does not throw`, expr => {
|
||||
expect(() => assert(expr)).not.toThrow();
|
||||
});
|
||||
|
||||
test.each([false, 0, "", null, undefined])(`assert(%p) throws`, expr => {
|
||||
expect(() => assert(expr)).toThrow(AssertionError);
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Testing conventions
|
||||
|
||||
* See `test/harness.ts` for common test utilities and helpers
|
||||
* Be rigorous and test for edge-cases and unexpected inputs
|
||||
* Use data-driven tests, e.g. `test.each`, to reduce boilerplate when possible
|
||||
* When you need to test Bun as a CLI, use the following pattern:
|
||||
|
||||
```js
|
||||
import { test, expect } from "bun:test";
|
||||
import { spawn } from "bun";
|
||||
import { bunExe, bunEnv } from "harness";
|
||||
|
||||
test("bun --version", async () => {
|
||||
const { exited, stdout: stdoutStream, stderr: stderrStream } = spawn({
|
||||
cmd: [bunExe(), "--version"],
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
const [ exitCode, stdout, stderr ] = await Promise.all([
|
||||
exited,
|
||||
new Response(stdoutStream).text(),
|
||||
new Response(stderrStream).text(),
|
||||
]);
|
||||
expect({ exitCode, stdout, stderr }).toMatchObject({
|
||||
exitCode: 0,
|
||||
stdout: expect.stringContaining(Bun.version),
|
||||
stderr: "",
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Before writing a test
|
||||
|
||||
* If you are fixing a bug, write the test first and make sure it fails (as expected) with the canary version of Bun
|
||||
* If you are fixing a Node.js compatibility bug, create a throw-away snippet of code and test that it works as you expect in Node.js, then that it fails (as expected) with the canary version of Bun
|
||||
* When the expected behaviour is ambigious, defer to matching what happens in Node.js
|
||||
* Always attempt to find related tests in an existing test file before creating a new test file
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
1
.gitattributes
vendored
1
.gitattributes
vendored
@@ -15,6 +15,7 @@
|
||||
*.lock text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.map text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.md text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.mdc text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.mjs text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.mts text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
|
||||
|
||||
2
.github/actions/setup-bun/action.yml
vendored
2
.github/actions/setup-bun/action.yml
vendored
@@ -4,7 +4,7 @@ description: An internal version of the 'oven-sh/setup-bun' action.
|
||||
inputs:
|
||||
bun-version:
|
||||
type: string
|
||||
description: "The version of bun to install: 'latest', 'canary', 'bun-v1.0.0', etc."
|
||||
description: "The version of bun to install: 'latest', 'canary', 'bun-v1.2.0', etc."
|
||||
default: latest
|
||||
required: false
|
||||
baseline:
|
||||
|
||||
6
.github/workflows/clang-format.yml
vendored
6
.github/workflows/clang-format.yml
vendored
@@ -10,9 +10,9 @@ on:
|
||||
merge_group:
|
||||
|
||||
env:
|
||||
BUN_VERSION: "1.1.44"
|
||||
LLVM_VERSION: "18.1.8"
|
||||
LLVM_VERSION_MAJOR: "18"
|
||||
BUN_VERSION: "1.2.0"
|
||||
LLVM_VERSION: "19.1.7"
|
||||
LLVM_VERSION_MAJOR: "19"
|
||||
|
||||
jobs:
|
||||
clang-format:
|
||||
|
||||
6
.github/workflows/clang-tidy.yml
vendored
6
.github/workflows/clang-tidy.yml
vendored
@@ -10,9 +10,9 @@ on:
|
||||
merge_group:
|
||||
|
||||
env:
|
||||
BUN_VERSION: "1.1.44"
|
||||
LLVM_VERSION: "18.1.8"
|
||||
LLVM_VERSION_MAJOR: "18"
|
||||
BUN_VERSION: "1.2.0"
|
||||
LLVM_VERSION: "19.1.7"
|
||||
LLVM_VERSION_MAJOR: "19"
|
||||
|
||||
jobs:
|
||||
clang-tidy:
|
||||
|
||||
2
.github/workflows/lint.yml
vendored
2
.github/workflows/lint.yml
vendored
@@ -5,7 +5,7 @@ on:
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
BUN_VERSION: "1.1.44"
|
||||
BUN_VERSION: "1.2.0"
|
||||
OXLINT_VERSION: "0.15.0"
|
||||
|
||||
jobs:
|
||||
|
||||
55
.github/workflows/packages-ci.yml
vendored
Normal file
55
.github/workflows/packages-ci.yml
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
name: Packages CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "packages/**"
|
||||
- .prettierrc
|
||||
- .prettierignore
|
||||
- tsconfig.json
|
||||
- oxlint.json
|
||||
- "!**/*.md"
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "packages/**"
|
||||
- .prettierrc
|
||||
- .prettierignore
|
||||
- tsconfig.json
|
||||
- oxlint.json
|
||||
- "!**/*.md"
|
||||
|
||||
env:
|
||||
BUN_VERSION: "canary"
|
||||
|
||||
jobs:
|
||||
bun-plugin-svelte:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
bun install
|
||||
pushd ./packages/bun-plugin-svelte && bun install
|
||||
|
||||
- name: Lint
|
||||
run: |
|
||||
bunx oxlint@0.15 --format github --deny-warnings
|
||||
bunx prettier --config ../../.prettierrc --check .
|
||||
working-directory: ./packages/bun-plugin-svelte
|
||||
|
||||
- name: Check types
|
||||
run: bun check:types
|
||||
working-directory: ./packages/bun-plugin-svelte
|
||||
|
||||
- name: Test
|
||||
run: bun test
|
||||
working-directory: ./packages/bun-plugin-svelte
|
||||
2
.github/workflows/prettier-format.yml
vendored
2
.github/workflows/prettier-format.yml
vendored
@@ -10,7 +10,7 @@ on:
|
||||
merge_group:
|
||||
|
||||
env:
|
||||
BUN_VERSION: "1.1.44"
|
||||
BUN_VERSION: "1.2.0"
|
||||
|
||||
jobs:
|
||||
prettier-format:
|
||||
|
||||
66
.github/workflows/release.yml
vendored
66
.github/workflows/release.yml
vendored
@@ -44,6 +44,10 @@ on:
|
||||
description: Should types be released to npm?
|
||||
type: boolean
|
||||
default: false
|
||||
use-definitelytyped:
|
||||
description: "Should types be PR'd to DefinitelyTyped?"
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
jobs:
|
||||
sign:
|
||||
@@ -66,7 +70,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.1.44"
|
||||
bun-version: "1.2.3"
|
||||
- name: Install Dependencies
|
||||
run: bun install
|
||||
- name: Sign Release
|
||||
@@ -94,7 +98,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.1.44"
|
||||
bun-version: "1.2.3"
|
||||
- name: Install Dependencies
|
||||
run: bun install
|
||||
- name: Release
|
||||
@@ -123,7 +127,7 @@ jobs:
|
||||
if: ${{ env.BUN_VERSION != 'canary' }}
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.1.44"
|
||||
bun-version: "1.2.3"
|
||||
- name: Setup Bun
|
||||
if: ${{ env.BUN_VERSION == 'canary' }}
|
||||
uses: ./.github/actions/setup-bun
|
||||
@@ -155,6 +159,52 @@ jobs:
|
||||
with:
|
||||
package: packages/bun-types/package.json
|
||||
token: ${{ secrets.NPM_TOKEN }}
|
||||
definitelytyped:
|
||||
name: Make pr to DefinitelyTyped to update `bun-types` version
|
||||
runs-on: ubuntu-latest
|
||||
needs: npm-types
|
||||
if: ${{ github.event_name == 'release' || github.event.inputs.use-definitelytyped == 'true' }}
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Checkout (DefinitelyTyped)
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: DefinitelyTyped/DefinitelyTyped
|
||||
- name: Checkout (bun)
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
path: bun
|
||||
- name: Setup Bun
|
||||
uses: ./bun/.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.2.0"
|
||||
- id: bun-version
|
||||
run: echo "BUN_VERSION=${BUN_VERSION#bun-v}" >> "$GITHUB_OUTPUT"
|
||||
- name: Update bun-types version in package.json
|
||||
run: |
|
||||
bun -e '
|
||||
const file = Bun.file("./types/bun/package.json");
|
||||
const json = await file.json();
|
||||
const version = "${{ steps.bun-version.outputs.BUN_VERSION }}";
|
||||
json.dependencies["bun-types"] = version;
|
||||
json.version = version.slice(0, version.lastIndexOf(".")) + ".9999";
|
||||
await file.write(JSON.stringify(json, null, 4) + "\n");
|
||||
'
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
if: ${{ env.BUN_LATEST == 'true' && env.BUN_VERSION != 'canary'}}
|
||||
with:
|
||||
token: ${{ secrets.ROBOBUN_TOKEN }}
|
||||
add-paths: ./types/bun/package.json
|
||||
title: "[bun] update to ${{ steps.bun-version.outputs.BUN_VERSION }}"
|
||||
commit-message: "[bun] update to ${{ steps.bun-version.outputs.BUN_VERSION }}"
|
||||
body: |
|
||||
Update `bun-types` version to ${{ steps.bun-version.outputs.BUN_VERSION }}
|
||||
|
||||
https://bun.sh/blog/${{ env.BUN_VERSION }}
|
||||
push-to-fork: oven-sh/DefinitelyTyped
|
||||
branch: ${{env.BUN_VERSION}}
|
||||
docker:
|
||||
name: Release to Dockerhub
|
||||
runs-on: ubuntu-latest
|
||||
@@ -181,7 +231,7 @@ jobs:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Docker emulator
|
||||
uses: docker/setup-qemu-action@v2
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- id: buildx
|
||||
name: Setup Docker buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
@@ -189,7 +239,7 @@ jobs:
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- id: metadata
|
||||
name: Setup Docker metadata
|
||||
uses: docker/metadata-action@v4
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: oven/bun
|
||||
flavor: |
|
||||
@@ -206,7 +256,7 @@ jobs:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Push to Docker
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ./dockerhub/${{ matrix.dir || matrix.variant }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
@@ -265,7 +315,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.1.44"
|
||||
bun-version: "1.2.0"
|
||||
- name: Install Dependencies
|
||||
run: bun install
|
||||
- name: Release
|
||||
@@ -309,7 +359,7 @@ jobs:
|
||||
uses: ./.github/actions/setup-bun
|
||||
if: ${{ env.BUN_LATEST == 'true' }}
|
||||
with:
|
||||
bun-version: "1.1.44"
|
||||
bun-version: "1.2.0"
|
||||
- name: Bump version
|
||||
uses: ./.github/actions/bump
|
||||
if: ${{ env.BUN_LATEST == 'true' }}
|
||||
|
||||
3
.github/workflows/run-lint.yml
vendored
3
.github/workflows/run-lint.yml
vendored
@@ -3,8 +3,7 @@ name: Lint
|
||||
permissions:
|
||||
contents: read
|
||||
env:
|
||||
LLVM_VERSION: 16
|
||||
BUN_VERSION: "1.1.44"
|
||||
BUN_VERSION: "1.2.0"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
2
.github/workflows/test-bump.yml
vendored
2
.github/workflows/test-bump.yml
vendored
@@ -9,7 +9,7 @@ on:
|
||||
required: true
|
||||
|
||||
env:
|
||||
BUN_VERSION: "1.1.44"
|
||||
BUN_VERSION: "1.2.0"
|
||||
|
||||
jobs:
|
||||
bump:
|
||||
|
||||
2
.github/workflows/update-cares.yml
vendored
2
.github/workflows/update-cares.yml
vendored
@@ -89,4 +89,6 @@ jobs:
|
||||
|
||||
Updates c-ares to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Compare: https://github.com/c-ares/c-ares/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-cares.yml)
|
||||
|
||||
2
.github/workflows/update-libarchive.yml
vendored
2
.github/workflows/update-libarchive.yml
vendored
@@ -89,4 +89,6 @@ jobs:
|
||||
|
||||
Updates libarchive to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Compare: https://github.com/libarchive/libarchive/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-libarchive.yml)
|
||||
|
||||
2
.github/workflows/update-libdeflate.yml
vendored
2
.github/workflows/update-libdeflate.yml
vendored
@@ -89,4 +89,6 @@ jobs:
|
||||
|
||||
Updates libdeflate to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Compare: https://github.com/ebiggers/libdeflate/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-libdeflate.yml)
|
||||
|
||||
2
.github/workflows/update-lolhtml.yml
vendored
2
.github/workflows/update-lolhtml.yml
vendored
@@ -89,4 +89,6 @@ jobs:
|
||||
|
||||
Updates lolhtml to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Compare: https://github.com/cloudflare/lol-html/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-lolhtml.yml)
|
||||
|
||||
2
.github/workflows/update-lshpack.yml
vendored
2
.github/workflows/update-lshpack.yml
vendored
@@ -89,4 +89,6 @@ jobs:
|
||||
|
||||
Updates lshpack to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Compare: https://github.com/litespeedtech/ls-hpack/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-lshpack.yml)
|
||||
|
||||
82
.github/workflows/update-root-certs.yml
vendored
Normal file
82
.github/workflows/update-root-certs.yml
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
name: Daily Root Certs Update Check
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * *" # Runs at 00:00 UTC every day
|
||||
workflow_dispatch: # Allows manual trigger
|
||||
|
||||
jobs:
|
||||
check-and-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: latest
|
||||
|
||||
- name: Generate root certs and capture output
|
||||
id: generate-certs
|
||||
run: |
|
||||
cd packages/bun-usockets/
|
||||
OUTPUT=$(bun generate-root-certs.mjs -v)
|
||||
echo "cert_output<<EOF" >> $GITHUB_ENV
|
||||
echo "$OUTPUT" >> $GITHUB_ENV
|
||||
echo "EOF" >> $GITHUB_ENV
|
||||
|
||||
- name: Check for changes and stage files
|
||||
id: check-changes
|
||||
run: |
|
||||
if [[ -n "$(git status --porcelain)" ]]; then
|
||||
echo "Found changes, staging modified files..."
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
# Get list of modified files and add them
|
||||
git status --porcelain | while read -r status file; do
|
||||
# Remove leading status and whitespace
|
||||
file=$(echo "$file" | sed 's/^.* //')
|
||||
echo "Adding changed file: $file"
|
||||
git add "$file"
|
||||
done
|
||||
|
||||
echo "changes=true" >> $GITHUB_OUTPUT
|
||||
|
||||
# Store the list of changed files
|
||||
CHANGED_FILES=$(git status --porcelain)
|
||||
echo "changed_files<<EOF" >> $GITHUB_ENV
|
||||
echo "$CHANGED_FILES" >> $GITHUB_ENV
|
||||
echo "EOF" >> $GITHUB_ENV
|
||||
else
|
||||
echo "No changes detected"
|
||||
echo "changes=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Create Pull Request
|
||||
if: steps.check-changes.outputs.changes == 'true'
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
commit-message: "update(root_certs): Update root certificates $(date +'%Y-%m-%d')"
|
||||
title: "update(root_certs) $(date +'%Y-%m-%d')"
|
||||
body: |
|
||||
Automated root certificates update
|
||||
|
||||
${{ env.cert_output }}
|
||||
|
||||
## Changed Files:
|
||||
```
|
||||
${{ env.changed_files }}
|
||||
```
|
||||
branch: certs/update-root-certs-${{ github.run_number }}
|
||||
base: main
|
||||
delete-branch: true
|
||||
labels:
|
||||
- "automation"
|
||||
- "root-certs"
|
||||
4
.github/workflows/update-sqlite3.yml
vendored
4
.github/workflows/update-sqlite3.yml
vendored
@@ -55,7 +55,7 @@ jobs:
|
||||
|
||||
# Convert numeric version to semantic version for display
|
||||
LATEST_MAJOR=$((10#$LATEST_VERSION_NUM / 1000000))
|
||||
LATEST_MINOR=$((($LATEST_VERSION_NUM / 1000) % 1000))
|
||||
LATEST_MINOR=$((($LATEST_VERSION_NUM / 10000) % 100))
|
||||
LATEST_PATCH=$((10#$LATEST_VERSION_NUM % 1000))
|
||||
LATEST_VERSION="$LATEST_MAJOR.$LATEST_MINOR.$LATEST_PATCH"
|
||||
|
||||
@@ -106,4 +106,6 @@ jobs:
|
||||
|
||||
Updates SQLite to version ${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Compare: https://sqlite.org/src/vdiff?from=${{ steps.check-version.outputs.current }}&to=${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-sqlite3.yml)
|
||||
|
||||
2
.github/workflows/zig-format.yml
vendored
2
.github/workflows/zig-format.yml
vendored
@@ -10,7 +10,7 @@ on:
|
||||
merge_group:
|
||||
|
||||
env:
|
||||
BUN_VERSION: "1.1.44"
|
||||
BUN_VERSION: "1.2.0"
|
||||
|
||||
jobs:
|
||||
zig-format:
|
||||
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -16,6 +16,7 @@
|
||||
.vscode/clang*
|
||||
.vscode/cpp*
|
||||
.zig-cache
|
||||
.bake-debug
|
||||
*.a
|
||||
*.bc
|
||||
*.big
|
||||
@@ -151,6 +152,7 @@ src/bake/generated.ts
|
||||
test/cli/install/registry/packages/publish-pkg-*
|
||||
test/cli/install/registry/packages/@secret/publish-pkg-8
|
||||
test/js/third_party/prisma/prisma/sqlite/dev.db-journal
|
||||
tmp
|
||||
|
||||
# Dependencies
|
||||
/vendor
|
||||
@@ -178,4 +180,6 @@ test/js/third_party/prisma/prisma/sqlite/dev.db-journal
|
||||
|
||||
.buildkite/ci.yml
|
||||
*.sock
|
||||
scratch*.{js,ts,tsx,cjs,mjs}
|
||||
scratch*.{js,ts,tsx,cjs,mjs}
|
||||
|
||||
*.bun-build
|
||||
18
.lldbinit
18
.lldbinit
@@ -1,4 +1,16 @@
|
||||
# command script import vendor/zig/tools/lldb_pretty_printers.py
|
||||
command script import vendor/WebKit/Tools/lldb/lldb_webkit.py
|
||||
# Tell LLDB what to do when the debugged process receives SIGPWR: pass it through to the process
|
||||
# (-p), but do not stop the process (-s) or notify the user (-n).
|
||||
#
|
||||
# JSC's garbage collector sends this signal (as configured by Bun WebKit in
|
||||
# Thread::initializePlatformThreading() in ThreadingPOSIX.cpp) to the JS thread to suspend or resume
|
||||
# it. So stopping the process would just create noise when debugging any long-running script.
|
||||
process handle -p true -s false -n false SIGPWR
|
||||
|
||||
# type summary add --summary-string "${var} | inner=${var[0-30]}, source=${var[33-64]}, tag=${var[31-32]}" "unsigned long"
|
||||
command script import misctools/lldb/lldb_pretty_printers.py
|
||||
type category enable zig.lang
|
||||
type category enable zig.std
|
||||
|
||||
command script import misctools/lldb/lldb_webkit.py
|
||||
|
||||
command script delete btjs
|
||||
command alias btjs p {printf("gathering btjs trace...\n");printf("%s\n", (char*)dumpBtjsTrace())}
|
||||
@@ -7,3 +7,4 @@ src/react-refresh.js
|
||||
*.min.js
|
||||
test/snippets
|
||||
test/js/node/test
|
||||
bun.lock
|
||||
|
||||
47
.vscode/launch.json
generated
vendored
47
.vscode/launch.json
generated
vendored
@@ -22,7 +22,6 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -38,7 +37,6 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -60,7 +58,6 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -76,7 +73,6 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -92,7 +88,6 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -108,7 +103,6 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -125,7 +119,6 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"serverReadyAction": {
|
||||
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
|
||||
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
|
||||
@@ -147,7 +140,6 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"serverReadyAction": {
|
||||
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
|
||||
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
|
||||
@@ -169,7 +161,6 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -188,7 +179,6 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -203,7 +193,6 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -221,7 +210,6 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -236,7 +224,6 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -253,7 +240,6 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"serverReadyAction": {
|
||||
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
|
||||
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
|
||||
@@ -275,7 +261,6 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"serverReadyAction": {
|
||||
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
|
||||
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
|
||||
@@ -297,7 +282,6 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -313,7 +297,6 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -329,7 +312,6 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -345,7 +327,6 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -361,7 +342,6 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -378,7 +358,6 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"serverReadyAction": {
|
||||
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
|
||||
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
|
||||
@@ -400,7 +379,6 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"serverReadyAction": {
|
||||
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
|
||||
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
|
||||
@@ -421,7 +399,6 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
},
|
||||
// bun test [*]
|
||||
{
|
||||
@@ -437,7 +414,6 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -452,7 +428,6 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -468,7 +443,6 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"serverReadyAction": {
|
||||
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
|
||||
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
|
||||
@@ -488,7 +462,6 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -503,7 +476,6 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
},
|
||||
// Windows: bun test [file]
|
||||
{
|
||||
@@ -1129,7 +1101,24 @@
|
||||
],
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
},
|
||||
{
|
||||
"type": "bun",
|
||||
"name": "[JS] bun test [file]",
|
||||
"runtime": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"runtimeArgs": ["test", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "midas-rr",
|
||||
"request": "attach",
|
||||
"name": "rr",
|
||||
"trace": "Off",
|
||||
"setupCommands": ["handle SIGPWR nostop noprint pass"],
|
||||
},
|
||||
],
|
||||
"inputs": [
|
||||
|
||||
5
.vscode/settings.json
vendored
5
.vscode/settings.json
vendored
@@ -36,6 +36,7 @@
|
||||
// "zig.buildOnSave": true,
|
||||
"zig.buildFilePath": "${workspaceFolder}/build.zig",
|
||||
"zig.path": "${workspaceFolder}/vendor/zig/zig.exe",
|
||||
"zig.zls.path": "${workspaceFolder}/vendor/zig/zls.exe",
|
||||
"zig.formattingProvider": "zls",
|
||||
"zig.zls.enableInlayHints": false,
|
||||
"[zig]": {
|
||||
@@ -63,6 +64,7 @@
|
||||
"editor.tabSize": 4,
|
||||
"editor.defaultFormatter": "xaver.clang-format",
|
||||
},
|
||||
"clangd.arguments": ["--header-insertion=never"],
|
||||
|
||||
// JavaScript
|
||||
"prettier.enable": true,
|
||||
@@ -139,8 +141,11 @@
|
||||
"packages/bun-uws/fuzzing": true,
|
||||
},
|
||||
"files.associations": {
|
||||
"*.css": "tailwindcss",
|
||||
"*.idl": "cpp",
|
||||
"*.mdc": "markdown",
|
||||
"array": "cpp",
|
||||
"ios": "cpp",
|
||||
},
|
||||
"C_Cpp.files.exclude": {
|
||||
"**/.vscode": true,
|
||||
|
||||
@@ -12,6 +12,12 @@ list(APPEND CMAKE_MODULE_PATH
|
||||
include(Policies)
|
||||
include(Globals)
|
||||
|
||||
if (CMAKE_HOST_WIN32)
|
||||
# Workaround for TLS certificate verification issue on Windows when downloading from GitHub
|
||||
# Remove this once we've bumped the CI machines build image
|
||||
set(CMAKE_TLS_VERIFY 0)
|
||||
endif()
|
||||
|
||||
# --- Compilers ---
|
||||
|
||||
if(CMAKE_HOST_APPLE)
|
||||
|
||||
@@ -67,7 +67,7 @@ $ wget https://apt.llvm.org/llvm.sh -O - | sudo bash -s -- 18 all
|
||||
```
|
||||
|
||||
```bash#Arch
|
||||
$ sudo pacman -S llvm clang lld
|
||||
$ sudo pacman -S llvm clang18 lld
|
||||
```
|
||||
|
||||
```bash#Fedora
|
||||
@@ -80,7 +80,7 @@ $ sudo zypper install clang18 lld18 llvm18
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
If none of the above solutions apply, you will have to install it [manually](https://github.com/llvm/llvm-project/releases/tag/llvmorg-18.1.8).
|
||||
If none of the above solutions apply, you will have to install it [manually](https://github.com/llvm/llvm-project/releases/tag/llvmorg-19.1.7).
|
||||
|
||||
Make sure Clang/LLVM 18 is in your path:
|
||||
|
||||
@@ -205,18 +205,30 @@ WebKit is not cloned by default (to save time and disk space). To clone and buil
|
||||
# Clone WebKit into ./vendor/WebKit
|
||||
$ git clone https://github.com/oven-sh/WebKit vendor/WebKit
|
||||
|
||||
# Check out the commit hash specified in `set(WEBKIT_VERSION <commit_hash>)` in cmake/tools/SetupWebKit.cmake
|
||||
$ git -C vendor/WebKit checkout <commit_hash>
|
||||
|
||||
# Make a debug build of JSC. This will output build artifacts in ./vendor/WebKit/WebKitBuild/Debug
|
||||
# Optionally, you can use `make jsc` for a release build
|
||||
$ make jsc-debug
|
||||
$ make jsc-debug && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
|
||||
|
||||
# Build bun with the local JSC build
|
||||
$ bun run build:local
|
||||
```
|
||||
|
||||
Using `bun run build:local` will build Bun in the `./build/debug-local` directory (instead of `./build/debug`), you'll have to change a couple of places to use this new directory:
|
||||
|
||||
- The first line in [`src/js/builtins.d.ts`](/src/js/builtins.d.ts)
|
||||
- The `CompilationDatabase` line in [`.clangd` config](/.clangd) should be `CompilationDatabase: build/debug-local`
|
||||
- In [`build.zig`](/build.zig), the `codegen_path` option should be `build/debug-local/codegen` (instead of `build/debug/codegen`)
|
||||
- In [`.vscode/launch.json`](/.vscode/launch.json), many configurations use `./build/debug/`, change them as you see fit
|
||||
|
||||
Note that the WebKit folder, including build artifacts, is 8GB+ in size.
|
||||
|
||||
If you are using a JSC debug build and using VScode, make sure to run the `C/C++: Select a Configuration` command to configure intellisense to find the debug headers.
|
||||
|
||||
Note that if you change make changes to our [WebKit fork](https://github.com/oven-sh/WebKit), you will also have to change [`SetupWebKit.cmake`](/cmake/tools/SetupWebKit.cmake) to point to the commit hash.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### 'span' file not found on Ubuntu
|
||||
|
||||
29
Makefile
29
Makefile
@@ -91,9 +91,9 @@ ZIG ?= $(shell which zig 2>/dev/null || echo -e "error: Missing zig. Please make
|
||||
# This is easier to happen than you'd expect.
|
||||
# Using realpath here causes issues because clang uses clang++ as a symlink
|
||||
# so if that's resolved, it won't build for C++
|
||||
REAL_CC = $(shell which clang-18 2>/dev/null || which clang 2>/dev/null)
|
||||
REAL_CXX = $(shell which clang++-18 2>/dev/null || which clang++ 2>/dev/null)
|
||||
CLANG_FORMAT = $(shell which clang-format-18 2>/dev/null || which clang-format 2>/dev/null)
|
||||
REAL_CC = $(shell which clang-19 2>/dev/null || which clang 2>/dev/null)
|
||||
REAL_CXX = $(shell which clang++-19 2>/dev/null || which clang++ 2>/dev/null)
|
||||
CLANG_FORMAT = $(shell which clang-format-19 2>/dev/null || which clang-format 2>/dev/null)
|
||||
|
||||
CC = $(REAL_CC)
|
||||
CXX = $(REAL_CXX)
|
||||
@@ -117,14 +117,14 @@ CC_WITH_CCACHE = $(CCACHE_PATH) $(CC)
|
||||
ifeq ($(OS_NAME),darwin)
|
||||
# Find LLVM
|
||||
ifeq ($(wildcard $(LLVM_PREFIX)),)
|
||||
LLVM_PREFIX = $(shell brew --prefix llvm@18)
|
||||
LLVM_PREFIX = $(shell brew --prefix llvm@19)
|
||||
endif
|
||||
ifeq ($(wildcard $(LLVM_PREFIX)),)
|
||||
LLVM_PREFIX = $(shell brew --prefix llvm)
|
||||
endif
|
||||
ifeq ($(wildcard $(LLVM_PREFIX)),)
|
||||
# This is kinda ugly, but I can't find a better way to error :(
|
||||
LLVM_PREFIX = $(shell echo -e "error: Unable to find llvm. Please run 'brew install llvm@18' or set LLVM_PREFIX=/path/to/llvm")
|
||||
LLVM_PREFIX = $(shell echo -e "error: Unable to find llvm. Please run 'brew install llvm@19' or set LLVM_PREFIX=/path/to/llvm")
|
||||
endif
|
||||
|
||||
LDFLAGS += -L$(LLVM_PREFIX)/lib
|
||||
@@ -164,7 +164,7 @@ CMAKE_FLAGS_WITHOUT_RELEASE = -DCMAKE_C_COMPILER=$(CC) \
|
||||
-DCMAKE_OSX_DEPLOYMENT_TARGET=$(MIN_MACOS_VERSION) \
|
||||
$(CMAKE_CXX_COMPILER_LAUNCHER_FLAG) \
|
||||
-DCMAKE_AR=$(AR) \
|
||||
-DCMAKE_RANLIB=$(which llvm-18-ranlib 2>/dev/null || which llvm-ranlib 2>/dev/null) \
|
||||
-DCMAKE_RANLIB=$(which llvm-19-ranlib 2>/dev/null || which llvm-ranlib 2>/dev/null) \
|
||||
-DCMAKE_CXX_STANDARD=20 \
|
||||
-DCMAKE_C_STANDARD=17 \
|
||||
-DCMAKE_CXX_STANDARD_REQUIRED=ON \
|
||||
@@ -191,7 +191,7 @@ endif
|
||||
|
||||
ifeq ($(OS_NAME),linux)
|
||||
LIBICONV_PATH =
|
||||
AR = $(shell which llvm-ar-18 2>/dev/null || which llvm-ar 2>/dev/null || which ar 2>/dev/null)
|
||||
AR = $(shell which llvm-ar-19 2>/dev/null || which llvm-ar 2>/dev/null || which ar 2>/dev/null)
|
||||
endif
|
||||
|
||||
OPTIMIZATION_LEVEL=-O3 $(MARCH_NATIVE)
|
||||
@@ -255,7 +255,7 @@ DEFAULT_LINKER_FLAGS= -pthread -ldl
|
||||
endif
|
||||
ifeq ($(OS_NAME),darwin)
|
||||
_MIMALLOC_OBJECT_FILE = 0
|
||||
JSC_BUILD_STEPS += jsc-build-mac jsc-copy-headers
|
||||
JSC_BUILD_STEPS += jsc-build-mac
|
||||
JSC_BUILD_STEPS_DEBUG += jsc-build-mac-debug
|
||||
_MIMALLOC_FILE = libmimalloc.a
|
||||
_MIMALLOC_INPUT_PATH = libmimalloc.a
|
||||
@@ -286,7 +286,7 @@ STRIP=/usr/bin/strip
|
||||
endif
|
||||
|
||||
ifeq ($(OS_NAME),linux)
|
||||
STRIP=$(shell which llvm-strip 2>/dev/null || which llvm-strip-18 2>/dev/null || which strip 2>/dev/null || echo "Missing strip")
|
||||
STRIP=$(shell which llvm-strip 2>/dev/null || which llvm-strip-19 2>/dev/null || which strip 2>/dev/null || echo "Missing strip")
|
||||
endif
|
||||
|
||||
|
||||
@@ -674,7 +674,7 @@ endif
|
||||
.PHONY: assert-deps
|
||||
assert-deps:
|
||||
@echo "Checking if the required utilities are available..."
|
||||
@if [ $(CLANG_VERSION) -lt "18" ]; then echo -e "ERROR: clang version >=18 required, found: $(CLANG_VERSION). Install with:\n\n $(POSIX_PKG_MANAGER) install llvm@18"; exit 1; fi
|
||||
@if [ $(CLANG_VERSION) -lt "19" ]; then echo -e "ERROR: clang version >=19 required, found: $(CLANG_VERSION). Install with:\n\n $(POSIX_PKG_MANAGER) install llvm@19"; exit 1; fi
|
||||
@cmake --version >/dev/null 2>&1 || (echo -e "ERROR: cmake is required."; exit 1)
|
||||
@$(PYTHON) --version >/dev/null 2>&1 || (echo -e "ERROR: python is required."; exit 1)
|
||||
@$(ESBUILD) --version >/dev/null 2>&1 || (echo -e "ERROR: esbuild is required."; exit 1)
|
||||
@@ -924,7 +924,7 @@ bun-codesign-release-local-debug:
|
||||
|
||||
|
||||
.PHONY: jsc
|
||||
jsc: jsc-build jsc-copy-headers jsc-bindings
|
||||
jsc: jsc-build
|
||||
.PHONY: jsc-debug
|
||||
jsc-debug: jsc-build-debug
|
||||
.PHONY: jsc-build
|
||||
@@ -1154,7 +1154,7 @@ jsc-copy-headers:
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/StubInfoSummary.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/StubInfoSummary.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/CommonSlowPaths.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/CommonSlowPaths.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/DirectArguments.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/DirectArguments.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/GenericArguments.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/GenericArguments.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/GenericArgumentsImpl.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/GenericArgumentsImpl.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/SamplingProfiler.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/SamplingProfiler.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/ScopedArguments.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/ScopedArguments.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/JSLexicalEnvironment.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSLexicalEnvironment.h
|
||||
@@ -1205,7 +1205,7 @@ jsc-copy-headers-debug:
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/StubInfoSummary.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/StubInfoSummary.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/CommonSlowPaths.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/CommonSlowPaths.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/DirectArguments.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/DirectArguments.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/GenericArguments.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/GenericArguments.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/GenericArgumentsImpl.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/GenericArgumentsImpl.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/SamplingProfiler.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/SamplingProfiler.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/ScopedArguments.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/ScopedArguments.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/JSLexicalEnvironment.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSLexicalEnvironment.h
|
||||
@@ -1301,6 +1301,7 @@ jsc-build-mac-compile-lto:
|
||||
.PHONY: jsc-build-mac-compile-debug
|
||||
jsc-build-mac-compile-debug:
|
||||
mkdir -p $(WEBKIT_DEBUG_DIR) $(WEBKIT_DIR);
|
||||
# to disable asan, remove -DENABLE_SANITIZERS=address and add -DENABLE_MALLOC_HEAP_BREAKDOWN=ON
|
||||
cd $(WEBKIT_DEBUG_DIR) && \
|
||||
ICU_INCLUDE_DIRS="$(HOMEBREW_PREFIX)opt/icu4c/include" \
|
||||
cmake \
|
||||
@@ -1309,7 +1310,6 @@ jsc-build-mac-compile-debug:
|
||||
-DCMAKE_BUILD_TYPE=Debug \
|
||||
-DUSE_THIN_ARCHIVES=OFF \
|
||||
-DENABLE_FTL_JIT=ON \
|
||||
-DENABLE_MALLOC_HEAP_BREAKDOWN=ON \
|
||||
-DCMAKE_EXPORT_COMPILE_COMMANDS=ON \
|
||||
-DUSE_BUN_JSC_ADDITIONS=ON \
|
||||
-DUSE_BUN_EVENT_LOOP=ON \
|
||||
@@ -1321,6 +1321,7 @@ jsc-build-mac-compile-debug:
|
||||
-DUSE_PTHREAD_JIT_PERMISSIONS_API=ON \
|
||||
-DENABLE_REMOTE_INSPECTOR=ON \
|
||||
-DUSE_VISIBILITY_ATTRIBUTE=1 \
|
||||
-DENABLE_SANITIZERS=address \
|
||||
$(WEBKIT_DIR) \
|
||||
$(WEBKIT_DEBUG_DIR) && \
|
||||
CFLAGS="$(CFLAGS) -ffat-lto-objects" CXXFLAGS="$(CXXFLAGS) -ffat-lto-objects" \
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"lockfileVersion": 0,
|
||||
"lockfileVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "bench",
|
||||
|
||||
53
bench/crypto/diffie-hellman.mjs
Normal file
53
bench/crypto/diffie-hellman.mjs
Normal file
@@ -0,0 +1,53 @@
|
||||
import crypto from "node:crypto";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
// Pre-generate DH params to avoid including setup in benchmarks
|
||||
const dhSize = 1024; // Reduced from 2048 for faster testing
|
||||
const dh = crypto.createDiffieHellman(dhSize);
|
||||
const dhPrime = dh.getPrime();
|
||||
const dhGenerator = dh.getGenerator();
|
||||
|
||||
// Classical Diffie-Hellman
|
||||
bench("DH - generateKeys", () => {
|
||||
const alice = crypto.createDiffieHellman(dhPrime, dhGenerator);
|
||||
return alice.generateKeys();
|
||||
});
|
||||
|
||||
bench("DH - computeSecret", () => {
|
||||
// Setup
|
||||
const alice = crypto.createDiffieHellman(dhPrime, dhGenerator);
|
||||
const aliceKey = alice.generateKeys();
|
||||
const bob = crypto.createDiffieHellman(dhPrime, dhGenerator);
|
||||
const bobKey = bob.generateKeys();
|
||||
|
||||
// Benchmark just the secret computation
|
||||
return alice.computeSecret(bobKey);
|
||||
});
|
||||
|
||||
// ECDH with prime256v1 (P-256)
|
||||
bench("ECDH-P256 - generateKeys", () => {
|
||||
const ecdh = crypto.createECDH("prime256v1");
|
||||
return ecdh.generateKeys();
|
||||
});
|
||||
|
||||
bench("ECDH-P256 - computeSecret", () => {
|
||||
// Setup
|
||||
const alice = crypto.createECDH("prime256v1");
|
||||
const aliceKey = alice.generateKeys();
|
||||
const bob = crypto.createECDH("prime256v1");
|
||||
const bobKey = bob.generateKeys();
|
||||
|
||||
// Benchmark just the secret computation
|
||||
return alice.computeSecret(bobKey);
|
||||
});
|
||||
|
||||
// ECDH with secp384r1 (P-384)
|
||||
bench("ECDH-P384 - computeSecret", () => {
|
||||
const alice = crypto.createECDH("secp384r1");
|
||||
const aliceKey = alice.generateKeys();
|
||||
const bob = crypto.createECDH("secp384r1");
|
||||
const bobKey = bob.generateKeys();
|
||||
return alice.computeSecret(bobKey);
|
||||
});
|
||||
|
||||
await run();
|
||||
44
bench/crypto/ecdh-convert-key.mjs
Normal file
44
bench/crypto/ecdh-convert-key.mjs
Normal file
@@ -0,0 +1,44 @@
|
||||
import crypto from "node:crypto";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
function generateTestKeyPairs() {
|
||||
const curves = crypto.getCurves();
|
||||
const keys = {};
|
||||
|
||||
for (const curve of curves) {
|
||||
const ecdh = crypto.createECDH(curve);
|
||||
ecdh.generateKeys();
|
||||
|
||||
keys[curve] = {
|
||||
compressed: ecdh.getPublicKey("hex", "compressed"),
|
||||
uncompressed: ecdh.getPublicKey("hex", "uncompressed"),
|
||||
instance: ecdh,
|
||||
};
|
||||
}
|
||||
|
||||
return keys;
|
||||
}
|
||||
|
||||
const testKeys = generateTestKeyPairs();
|
||||
|
||||
bench("ECDH key format - P256 compressed to uncompressed", () => {
|
||||
const publicKey = testKeys["prime256v1"].compressed;
|
||||
return crypto.ECDH.convertKey(publicKey, "prime256v1", "hex", "hex", "uncompressed");
|
||||
});
|
||||
|
||||
bench("ECDH key format - P256 uncompressed to compressed", () => {
|
||||
const publicKey = testKeys["prime256v1"].uncompressed;
|
||||
return crypto.ECDH.convertKey(publicKey, "prime256v1", "hex", "hex", "compressed");
|
||||
});
|
||||
|
||||
bench("ECDH key format - P384 compressed to uncompressed", () => {
|
||||
const publicKey = testKeys["secp384r1"].compressed;
|
||||
return crypto.ECDH.convertKey(publicKey, "secp384r1", "hex", "hex", "uncompressed");
|
||||
});
|
||||
|
||||
bench("ECDH key format - P384 uncompressed to compressed", () => {
|
||||
const publicKey = testKeys["secp384r1"].uncompressed;
|
||||
return crypto.ECDH.convertKey(publicKey, "secp384r1", "hex", "hex", "compressed");
|
||||
});
|
||||
|
||||
await run();
|
||||
50
bench/crypto/hkdf.mjs
Normal file
50
bench/crypto/hkdf.mjs
Normal file
@@ -0,0 +1,50 @@
|
||||
import crypto from "node:crypto";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
// Sample keys with different lengths
|
||||
const keys = {
|
||||
short: "secret",
|
||||
long: "this-is-a-much-longer-secret-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
|
||||
};
|
||||
|
||||
// Test parameters
|
||||
const salts = ["", "salt"];
|
||||
const infos = ["", "info"];
|
||||
const hashes = ["sha256", "sha512"];
|
||||
const sizes = [10, 1024];
|
||||
|
||||
// Benchmark sync HKDF
|
||||
for (const hash of hashes) {
|
||||
for (const keyName of Object.keys(keys)) {
|
||||
const key = keys[keyName];
|
||||
for (const size of sizes) {
|
||||
bench(`hkdfSync ${hash} ${keyName}-key ${size} bytes`, () => {
|
||||
return crypto.hkdfSync(hash, key, "salt", "info", size);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Benchmark different combinations of salt and info
|
||||
for (const salt of salts) {
|
||||
for (const info of infos) {
|
||||
bench(`hkdfSync sha256 with ${salt ? "salt" : "no-salt"} and ${info ? "info" : "no-info"}`, () => {
|
||||
return crypto.hkdfSync("sha256", "secret", salt, info, 64);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Benchmark async HKDF (using promises for cleaner benchmark)
|
||||
// Note: async benchmarks in Mitata require returning a Promise
|
||||
for (const hash of hashes) {
|
||||
bench(`hkdf ${hash} async`, async () => {
|
||||
return new Promise((resolve, reject) => {
|
||||
crypto.hkdf(hash, "secret", "salt", "info", 64, (err, derivedKey) => {
|
||||
if (err) reject(err);
|
||||
else resolve(derivedKey);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
await run();
|
||||
43
bench/crypto/primes.mjs
Normal file
43
bench/crypto/primes.mjs
Normal file
@@ -0,0 +1,43 @@
|
||||
import { checkPrime, checkPrimeSync, generatePrime, generatePrimeSync } from "node:crypto";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const prime512 = generatePrimeSync(512);
|
||||
const prime2048 = generatePrimeSync(2048);
|
||||
|
||||
bench("checkPrimeSync 512", () => {
|
||||
return checkPrimeSync(prime512);
|
||||
});
|
||||
|
||||
bench("checkPrimeSync 2048", () => {
|
||||
return checkPrimeSync(prime2048);
|
||||
});
|
||||
|
||||
bench("checkPrime 512", async () => {
|
||||
const promises = Array.from({ length: 10 }, () => new Promise(resolve => checkPrime(prime512, resolve)));
|
||||
await Promise.all(promises);
|
||||
});
|
||||
|
||||
bench("checkPrime 2048", async () => {
|
||||
const promises = Array.from({ length: 10 }, () => new Promise(resolve => checkPrime(prime2048, resolve)));
|
||||
await Promise.all(promises);
|
||||
});
|
||||
|
||||
bench("generatePrimeSync 512", () => {
|
||||
return generatePrimeSync(512);
|
||||
});
|
||||
|
||||
bench("generatePrimeSync 2048", () => {
|
||||
return generatePrimeSync(2048);
|
||||
});
|
||||
|
||||
bench("generatePrime 512", async () => {
|
||||
const promises = Array.from({ length: 10 }, () => new Promise(resolve => generatePrime(512, resolve)));
|
||||
await Promise.all(promises);
|
||||
});
|
||||
|
||||
bench("generatePrime 2048", async () => {
|
||||
const promises = Array.from({ length: 10 }, () => new Promise(resolve => generatePrime(2048, resolve)));
|
||||
await Promise.all(promises);
|
||||
});
|
||||
|
||||
await run();
|
||||
50
bench/crypto/random.mjs
Normal file
50
bench/crypto/random.mjs
Normal file
@@ -0,0 +1,50 @@
|
||||
import crypto from "crypto";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
bench("randomInt - sync", () => {
|
||||
crypto.randomInt(1000);
|
||||
});
|
||||
|
||||
bench("randomInt - async", async () => {
|
||||
const { promise, resolve } = Promise.withResolvers();
|
||||
crypto.randomInt(1000, () => {
|
||||
resolve();
|
||||
});
|
||||
await promise;
|
||||
});
|
||||
|
||||
bench("randonBytes - 32", () => {
|
||||
crypto.randomBytes(32);
|
||||
});
|
||||
|
||||
bench("randomBytes - 256", () => {
|
||||
crypto.randomBytes(256);
|
||||
});
|
||||
|
||||
const buf = Buffer.alloc(256);
|
||||
|
||||
bench("randomFill - 32", async () => {
|
||||
const { promise, resolve } = Promise.withResolvers();
|
||||
crypto.randomFill(buf, 0, 32, () => {
|
||||
resolve();
|
||||
});
|
||||
await promise;
|
||||
});
|
||||
|
||||
bench("randomFill - 256", async () => {
|
||||
const { promise, resolve } = Promise.withResolvers();
|
||||
crypto.randomFill(buf, 0, 256, () => {
|
||||
resolve();
|
||||
});
|
||||
await promise;
|
||||
});
|
||||
|
||||
bench("randomFillSync - 32", () => {
|
||||
crypto.randomFillSync(buf, 0, 32);
|
||||
});
|
||||
|
||||
bench("randomFillSync - 256", () => {
|
||||
crypto.randomFillSync(buf, 0, 256);
|
||||
});
|
||||
|
||||
await run();
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"lockfileVersion": 0,
|
||||
"lockfileVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "expect-to-equal",
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"lockfileVersion": 0,
|
||||
"lockfileVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "bench",
|
||||
|
||||
@@ -1,19 +1,27 @@
|
||||
import micromatch from "micromatch";
|
||||
import { Glob } from "bun";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const Glob = typeof Bun !== "undefined" ? Bun.Glob : undefined;
|
||||
const doMatch = typeof Bun === "undefined" ? micromatch.isMatch : (a, b) => new Glob(b).match(a);
|
||||
|
||||
bench((Glob ? "Bun.Glob - " : "micromatch - ") + "**/*.js", () => {
|
||||
doMatch("foo/bar.js", "**/*.js");
|
||||
});
|
||||
function benchPattern(name, glob, pattern) {
|
||||
bench(name, () => {
|
||||
new Glob(glob).match(pattern);
|
||||
})
|
||||
}
|
||||
|
||||
benchPattern("max-depth" , "1{2,3{4,5{6,7{8,9{a,b{c,d{e,f{g,h{i,j{k,l}}}}}}}}}}m", "13579bdfhjlm");
|
||||
benchPattern("non-ascii", "😎/¢£.{ts,tsx,js,jsx}", "😎/¢£.jsx");
|
||||
benchPattern("utf8", "フォルダ/**/*", "フォルダ/aaa.js");
|
||||
benchPattern("non-ascii+max-depth" , "1{2,3{4,5{6,7{8,😎{a,b{c,d{e,f{g,h{i,j{k,l}}}}}}}}}}m", "1357😎bdfhjlm");
|
||||
benchPattern("pretty-average", "test/{foo/**,bar}/baz", "test/bar/baz");
|
||||
benchPattern("pretty-average-2", "a/**/c/*.md", "a/bb.bb/aa/b.b/aa/c/xyz.md");
|
||||
benchPattern("pretty-average-3", "a/b/**/c{d,e}/**/xyz.md", "a/b/cd/xyz.md");
|
||||
benchPattern("pretty-average-4", "foo/bar/**/one/**/*.*", "foo/bar/baz/one/two/three/image.png");
|
||||
benchPattern("long-pretty-average", "some/**/needle.{js,tsx,mdx,ts,jsx,txt}", "some/a/bigger/path/to/the/crazy/needle.txt");
|
||||
benchPattern("brackets-lots", "f[^eiu][^eiu][^eiu][^eiu][^eiu]r", "foo-bar");
|
||||
|
||||
bench((Glob ? "Bun.Glob - " : "micromatch - ") + "*.js", () => {
|
||||
doMatch("bar.js", "*.js");
|
||||
});
|
||||
|
||||
await run({
|
||||
avg: true,
|
||||
min_max: true,
|
||||
percentiles: true,
|
||||
});
|
||||
min_max: true,
|
||||
percentiles: true,
|
||||
avg: true,
|
||||
})
|
||||
|
||||
19
bench/glob/micromatch.mjs
Normal file
19
bench/glob/micromatch.mjs
Normal file
@@ -0,0 +1,19 @@
|
||||
import micromatch from "micromatch";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const Glob = typeof Bun !== "undefined" ? Bun.Glob : undefined;
|
||||
const doMatch = typeof Bun === "undefined" ? micromatch.isMatch : (a, b) => new Glob(b).match(a);
|
||||
|
||||
bench((Glob ? "Bun.Glob - " : "micromatch - ") + "**/*.js", () => {
|
||||
doMatch("foo/bar.js", "**/*.js");
|
||||
});
|
||||
|
||||
bench((Glob ? "Bun.Glob - " : "micromatch - ") + "*.js", () => {
|
||||
doMatch("bar.js", "*.js");
|
||||
});
|
||||
|
||||
await run({
|
||||
avg: true,
|
||||
min_max: true,
|
||||
percentiles: true,
|
||||
});
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"lockfileVersion": 0,
|
||||
"lockfileVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "bench",
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"lockfileVersion": 0,
|
||||
"lockfileVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "simple-react",
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
"eventemitter3": "^5.0.0",
|
||||
"execa": "^8.0.1",
|
||||
"fast-glob": "3.3.1",
|
||||
"fastify": "^5.0.0",
|
||||
"fdir": "^6.1.0",
|
||||
"mitata": "^1.0.25",
|
||||
"react": "^18.3.1",
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"lockfileVersion": 0,
|
||||
"lockfileVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "react-hello-world",
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"lockfileVersion": 0,
|
||||
"lockfileVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "scan",
|
||||
|
||||
@@ -39,24 +39,24 @@ bench("Buffer.from(ArrayBuffer(100))", () => {
|
||||
return Buffer.from(hundred);
|
||||
});
|
||||
|
||||
bench("new Buffer(ArrayBuffer(100))", () => {
|
||||
return new Buffer(hundred);
|
||||
});
|
||||
|
||||
var hundredArray = new Uint8Array(100);
|
||||
bench("Buffer.from(Uint8Array(100))", () => {
|
||||
return Buffer.from(hundredArray);
|
||||
});
|
||||
|
||||
bench("new Buffer(Uint8Array(100))", () => {
|
||||
return new Buffer(hundredArray);
|
||||
});
|
||||
|
||||
var empty = new Uint8Array(0);
|
||||
bench("Buffer.from(Uint8Array(0))", () => {
|
||||
return Buffer.from(empty);
|
||||
});
|
||||
|
||||
bench("new Buffer(ArrayBuffer(100))", () => {
|
||||
return new Buffer(hundred);
|
||||
});
|
||||
|
||||
bench("new Buffer(Uint8Array(100))", () => {
|
||||
return new Buffer(hundredArray);
|
||||
});
|
||||
|
||||
bench("new Buffer(Uint8Array(0))", () => {
|
||||
return new Buffer(empty);
|
||||
});
|
||||
|
||||
17
bench/snippets/buffer-includes.js
Normal file
17
bench/snippets/buffer-includes.js
Normal file
@@ -0,0 +1,17 @@
|
||||
const buf = Buffer.from(
|
||||
"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.",
|
||||
);
|
||||
const INTERVAL = 9_999_999;
|
||||
|
||||
const time = (name, fn) => {
|
||||
for (let i = 0; i < INTERVAL; i++) fn();
|
||||
|
||||
console.time(name.padEnd(30));
|
||||
for (let i = 0; i < INTERVAL; i++) fn();
|
||||
console.timeEnd(name.padEnd(30));
|
||||
};
|
||||
|
||||
console.log(`Run ${new Intl.NumberFormat().format(INTERVAL)} times with a warmup:`, "\n");
|
||||
|
||||
time("includes true", () => buf.includes("nisi"));
|
||||
time("includes false", () => buf.includes("oopwo"));
|
||||
@@ -1,7 +1,7 @@
|
||||
// import { Buffer } from "buffer";
|
||||
var buf = new Buffer(1024);
|
||||
var view = new DataView(buf.buffer);
|
||||
var INTERVAL = 9999999;
|
||||
var INTERVAL = 9_999_999;
|
||||
var time = (name, fn) => {
|
||||
for (let i = 0; i < INTERVAL; i++) fn();
|
||||
|
||||
|
||||
71
bench/snippets/decode.js
Normal file
71
bench/snippets/decode.js
Normal file
@@ -0,0 +1,71 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
let decodeURIComponentSIMD;
|
||||
if (typeof Bun !== "undefined") {
|
||||
({ decodeURIComponentSIMD } = await import("bun:internal-for-testing"));
|
||||
}
|
||||
|
||||
const hugeText = Buffer.alloc(1000000, "Hello, world!").toString();
|
||||
const hugeTextWithPercentAtEnd = Buffer.alloc(1000000, "Hello, world!%40").toString();
|
||||
|
||||
const tinyText = Buffer.alloc(100, "Hello, world!").toString();
|
||||
const tinyTextWithPercentAtEnd = Buffer.alloc(100, "Hello, world!%40").toString();
|
||||
|
||||
const veryTinyText = Buffer.alloc(8, "a").toString();
|
||||
const veryTinyTextWithPercentAtEnd = Buffer.alloc(8, "a%40").toString();
|
||||
|
||||
decodeURIComponentSIMD &&
|
||||
bench("decodeURIComponentSIMD - no % x 8 bytes", () => {
|
||||
decodeURIComponentSIMD(veryTinyText);
|
||||
});
|
||||
|
||||
bench(" decodeURIComponent - no % x 8 bytes", () => {
|
||||
decodeURIComponent(veryTinyText);
|
||||
});
|
||||
|
||||
decodeURIComponentSIMD &&
|
||||
bench("decodeURIComponentSIMD - yes % x 8 bytes", () => {
|
||||
decodeURIComponentSIMD(veryTinyTextWithPercentAtEnd);
|
||||
});
|
||||
|
||||
bench(" decodeURIComponent - yes % x 8 bytes", () => {
|
||||
decodeURIComponent(veryTinyTextWithPercentAtEnd);
|
||||
});
|
||||
|
||||
decodeURIComponentSIMD &&
|
||||
bench("decodeURIComponentSIMD - no % x 100 bytes", () => {
|
||||
decodeURIComponentSIMD(tinyText);
|
||||
});
|
||||
|
||||
bench(" decodeURIComponent - no % x 100 bytes", () => {
|
||||
decodeURIComponent(tinyText);
|
||||
});
|
||||
|
||||
decodeURIComponentSIMD &&
|
||||
bench("decodeURIComponentSIMD - yes % x 100 bytes", () => {
|
||||
decodeURIComponentSIMD(tinyTextWithPercentAtEnd);
|
||||
});
|
||||
|
||||
bench(" decodeURIComponent - yes % x 100 bytes", () => {
|
||||
decodeURIComponent(tinyTextWithPercentAtEnd);
|
||||
});
|
||||
|
||||
decodeURIComponentSIMD &&
|
||||
bench("decodeURIComponentSIMD - no % x 1 MB", () => {
|
||||
decodeURIComponentSIMD(hugeText);
|
||||
});
|
||||
|
||||
bench(" decodeURIComponent - no % x 1 MB", () => {
|
||||
decodeURIComponent(hugeText);
|
||||
});
|
||||
|
||||
decodeURIComponentSIMD &&
|
||||
bench("decodeURIComponentSIMD - yes % x 1 MB", () => {
|
||||
decodeURIComponentSIMD(hugeTextWithPercentAtEnd);
|
||||
});
|
||||
|
||||
bench(" decodeURIComponent - yes % x 1 MB", () => {
|
||||
decodeURIComponent(hugeTextWithPercentAtEnd);
|
||||
});
|
||||
|
||||
await run();
|
||||
13
bench/snippets/express-hello.mjs
Normal file
13
bench/snippets/express-hello.mjs
Normal file
@@ -0,0 +1,13 @@
|
||||
import express from "express";
|
||||
|
||||
const app = express();
|
||||
const port = 3000;
|
||||
|
||||
var i = 0;
|
||||
app.get("/", (req, res) => {
|
||||
res.send("Hello World!" + i++);
|
||||
});
|
||||
|
||||
app.listen(port, () => {
|
||||
console.log(`Express app listening at http://localhost:${port}`);
|
||||
});
|
||||
20
bench/snippets/fastify.mjs
Normal file
20
bench/snippets/fastify.mjs
Normal file
@@ -0,0 +1,20 @@
|
||||
import Fastify from "fastify";
|
||||
|
||||
const fastify = Fastify({
|
||||
logger: false,
|
||||
});
|
||||
|
||||
fastify.get("/", async (request, reply) => {
|
||||
return { hello: "world" };
|
||||
});
|
||||
|
||||
const start = async () => {
|
||||
try {
|
||||
await fastify.listen({ port: 3000 });
|
||||
} catch (err) {
|
||||
fastify.log.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
start();
|
||||
25
bench/snippets/path-relative.mjs
Normal file
25
bench/snippets/path-relative.mjs
Normal file
@@ -0,0 +1,25 @@
|
||||
import { posix } from "path";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const pathConfigurations = [
|
||||
["", ""],
|
||||
[".", "."],
|
||||
["/foo/bar", "/foo/bar"],
|
||||
["/foo/bar/baz", "/foo/bar"],
|
||||
["/foo/bar", "/foo/bar/baz"],
|
||||
["/foo/bar/baz", "/foo/bar/qux"],
|
||||
["/foo/bar/baz", "/foo/bar/baz/qux"],
|
||||
["/foo/bar/baz", "/foo/bar/baz/qux/quux"],
|
||||
["/", "/foo"],
|
||||
["/foo", "/"],
|
||||
["foo/bar/baz", "foo/bar/qux"],
|
||||
["../foo/bar", "../foo/baz"],
|
||||
];
|
||||
|
||||
pathConfigurations.forEach(([from, to]) => {
|
||||
bench(`relative(${JSON.stringify(from)}, ${JSON.stringify(to)})`, () => {
|
||||
globalThis.abc = posix.relative(from, to);
|
||||
});
|
||||
});
|
||||
|
||||
await run();
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"lockfileVersion": 0,
|
||||
"lockfileVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "bench",
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"lockfileVersion": 0,
|
||||
"lockfileVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "websocket-server",
|
||||
|
||||
95
build.zig
95
build.zig
@@ -19,16 +19,17 @@ const OperatingSystem = @import("src/env.zig").OperatingSystem;
|
||||
const pathRel = fs.path.relative;
|
||||
|
||||
/// Do not rename this constant. It is scanned by some scripts to determine which zig version to install.
|
||||
const recommended_zig_version = "0.13.0";
|
||||
const recommended_zig_version = "0.14.0";
|
||||
|
||||
comptime {
|
||||
if (!std.mem.eql(u8, builtin.zig_version_string, recommended_zig_version)) {
|
||||
@compileError(
|
||||
"" ++
|
||||
"Bun requires Zig version " ++ recommended_zig_version ++ ". This is" ++
|
||||
"automatically configured via Bun's CMake setup. You likely meant to run" ++
|
||||
"`bun setup`. If you are trying to upgrade the Zig compiler," ++
|
||||
"run `./scripts/download-zig.sh master` or comment this message out.",
|
||||
"Bun requires Zig version " ++ recommended_zig_version ++ ", but you have " ++
|
||||
builtin.zig_version_string ++ ". This is automatically configured via Bun's " ++
|
||||
"CMake setup. You likely meant to run `bun run build`. If you are trying to " ++
|
||||
"upgrade the Zig compiler, edit ZIG_COMMIT in cmake/tools/SetupZig.cmake or " ++
|
||||
"comment this error out.",
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -46,6 +47,7 @@ const BunBuildOptions = struct {
|
||||
sha: []const u8,
|
||||
/// enable debug logs in release builds
|
||||
enable_logs: bool = false,
|
||||
enable_asan: bool,
|
||||
tracy_callstack_depth: u16,
|
||||
reported_nodejs_version: Version,
|
||||
/// To make iterating on some '@embedFile's faster, we load them at runtime
|
||||
@@ -81,10 +83,7 @@ const BunBuildOptions = struct {
|
||||
|
||||
var opts = b.addOptions();
|
||||
opts.addOption([]const u8, "base_path", b.pathFromRoot("."));
|
||||
opts.addOption([]const u8, "codegen_path", std.fs.path.resolve(b.graph.arena, &.{
|
||||
b.build_root.path.?,
|
||||
this.codegen_path,
|
||||
}) catch @panic("OOM"));
|
||||
opts.addOption([]const u8, "codegen_path", std.fs.path.resolve(b.graph.arena, &.{ b.build_root.path.?, this.codegen_path }) catch @panic("OOM"));
|
||||
|
||||
opts.addOption(bool, "codegen_embed", this.shouldEmbedCode());
|
||||
opts.addOption(u32, "canary_revision", this.canary_revision orelse 0);
|
||||
@@ -154,8 +153,6 @@ pub fn build(b: *Build) !void {
|
||||
std.log.info("zig compiler v{s}", .{builtin.zig_version_string});
|
||||
checked_file_exists = std.AutoHashMap(u64, void).init(b.allocator);
|
||||
|
||||
b.zig_lib_dir = b.zig_lib_dir orelse b.path("vendor/zig/lib");
|
||||
|
||||
// TODO: Upgrade path for 0.14.0
|
||||
// b.graph.zig_lib_directory = brk: {
|
||||
// const sub_path = "vendor/zig/lib";
|
||||
@@ -209,7 +206,7 @@ pub fn build(b: *Build) !void {
|
||||
const bun_version = b.option([]const u8, "version", "Value of `Bun.version`") orelse "0.0.0";
|
||||
|
||||
b.reference_trace = ref_trace: {
|
||||
const trace = b.option(u32, "reference-trace", "Set the reference trace") orelse 16;
|
||||
const trace = b.option(u32, "reference-trace", "Set the reference trace") orelse 24;
|
||||
break :ref_trace if (trace == 0) null else trace;
|
||||
};
|
||||
|
||||
@@ -277,6 +274,7 @@ pub fn build(b: *Build) !void {
|
||||
|
||||
.tracy_callstack_depth = b.option(u16, "tracy_callstack_depth", "") orelse 10,
|
||||
.enable_logs = b.option(bool, "enable_logs", "Enable logs in release") orelse false,
|
||||
.enable_asan = b.option(bool, "enable_asan", "Enable asan") orelse false,
|
||||
};
|
||||
|
||||
// zig build obj
|
||||
@@ -321,7 +319,21 @@ pub fn build(b: *Build) !void {
|
||||
.{ .os = .linux, .arch = .aarch64 },
|
||||
.{ .os = .linux, .arch = .x86_64, .musl = true },
|
||||
.{ .os = .linux, .arch = .aarch64, .musl = true },
|
||||
});
|
||||
}, &.{ .Debug, .ReleaseFast });
|
||||
}
|
||||
|
||||
// zig build check-all-debug
|
||||
{
|
||||
const step = b.step("check-all-debug", "Check for semantic analysis errors on all supported platforms in debug mode");
|
||||
addMultiCheck(b, step, build_options, &.{
|
||||
.{ .os = .windows, .arch = .x86_64 },
|
||||
.{ .os = .mac, .arch = .x86_64 },
|
||||
.{ .os = .mac, .arch = .aarch64 },
|
||||
.{ .os = .linux, .arch = .x86_64 },
|
||||
.{ .os = .linux, .arch = .aarch64 },
|
||||
.{ .os = .linux, .arch = .x86_64, .musl = true },
|
||||
.{ .os = .linux, .arch = .aarch64, .musl = true },
|
||||
}, &.{.Debug});
|
||||
}
|
||||
|
||||
// zig build check-windows
|
||||
@@ -329,13 +341,27 @@ pub fn build(b: *Build) !void {
|
||||
const step = b.step("check-windows", "Check for semantic analysis errors on Windows");
|
||||
addMultiCheck(b, step, build_options, &.{
|
||||
.{ .os = .windows, .arch = .x86_64 },
|
||||
});
|
||||
}, &.{ .Debug, .ReleaseFast });
|
||||
}
|
||||
{
|
||||
const step = b.step("check-macos", "Check for semantic analysis errors on Windows");
|
||||
addMultiCheck(b, step, build_options, &.{
|
||||
.{ .os = .mac, .arch = .x86_64 },
|
||||
.{ .os = .mac, .arch = .aarch64 },
|
||||
}, &.{ .Debug, .ReleaseFast });
|
||||
}
|
||||
{
|
||||
const step = b.step("check-linux", "Check for semantic analysis errors on Windows");
|
||||
addMultiCheck(b, step, build_options, &.{
|
||||
.{ .os = .linux, .arch = .x86_64 },
|
||||
.{ .os = .linux, .arch = .aarch64 },
|
||||
}, &.{ .Debug, .ReleaseFast });
|
||||
}
|
||||
|
||||
// zig build translate-c-headers
|
||||
{
|
||||
const step = b.step("translate-c", "Copy generated translated-c-headers.zig to zig-out");
|
||||
step.dependOn(&b.addInstallFile(getTranslateC(b, b.host, .Debug).getOutput(), "translated-c-headers.zig").step);
|
||||
step.dependOn(&b.addInstallFile(getTranslateC(b, b.graph.host, .Debug).getOutput(), "translated-c-headers.zig").step);
|
||||
}
|
||||
|
||||
// zig build enum-extractor
|
||||
@@ -357,13 +383,14 @@ pub fn addMultiCheck(
|
||||
parent_step: *Step,
|
||||
root_build_options: BunBuildOptions,
|
||||
to_check: []const struct { os: OperatingSystem, arch: Arch, musl: bool = false },
|
||||
optimize: []const std.builtin.OptimizeMode,
|
||||
) void {
|
||||
for (to_check) |check| {
|
||||
for ([_]std.builtin.Mode{ .Debug, .ReleaseFast }) |mode| {
|
||||
for (optimize) |mode| {
|
||||
const check_target = b.resolveTargetQuery(.{
|
||||
.os_tag = OperatingSystem.stdOSTag(check.os),
|
||||
.cpu_arch = check.arch,
|
||||
.cpu_model = getCpuModel(check.os, check.arch) orelse .determined_by_cpu_arch,
|
||||
.cpu_model = getCpuModel(check.os, check.arch) orelse .determined_by_arch_os,
|
||||
.os_version_min = getOSVersionMin(check.os),
|
||||
.glibc_version = if (check.musl) null else getOSGlibCVersion(check.os),
|
||||
});
|
||||
@@ -381,6 +408,7 @@ pub fn addMultiCheck(
|
||||
.reported_nodejs_version = root_build_options.reported_nodejs_version,
|
||||
.codegen_path = root_build_options.codegen_path,
|
||||
.no_llvm = root_build_options.no_llvm,
|
||||
.enable_asan = root_build_options.enable_asan,
|
||||
};
|
||||
|
||||
var obj = addBunObject(b, &options);
|
||||
@@ -414,7 +442,7 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
|
||||
.name = if (opts.optimize == .Debug) "bun-debug" else "bun",
|
||||
.root_source_file = switch (opts.os) {
|
||||
.wasm => b.path("root_wasm.zig"),
|
||||
else => b.path("root.zig"),
|
||||
else => b.path("src/main.zig"),
|
||||
// else => b.path("root_css.zig"),
|
||||
},
|
||||
.target = opts.target,
|
||||
@@ -428,8 +456,15 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
|
||||
.omit_frame_pointer = false,
|
||||
.strip = false, // stripped at the end
|
||||
});
|
||||
if (opts.enable_asan) {
|
||||
if (@hasField(Build.Module, "sanitize_address")) {
|
||||
obj.root_module.sanitize_address = true;
|
||||
} else {
|
||||
const fail_step = b.addFail("asan is not supported on this platform");
|
||||
obj.step.dependOn(&fail_step.step);
|
||||
}
|
||||
}
|
||||
obj.bundle_compiler_rt = false;
|
||||
obj.formatted_panics = true;
|
||||
obj.root_module.omit_frame_pointer = false;
|
||||
|
||||
// Link libc
|
||||
@@ -497,16 +532,6 @@ fn exists(path: []const u8) bool {
|
||||
fn addInternalPackages(b: *Build, obj: *Compile, opts: *BunBuildOptions) void {
|
||||
const os = opts.os;
|
||||
|
||||
const io_path = switch (os) {
|
||||
.mac => "src/io/io_darwin.zig",
|
||||
.linux => "src/io/io_linux.zig",
|
||||
.windows => "src/io/io_windows.zig",
|
||||
else => "src/io/io_stub.zig",
|
||||
};
|
||||
obj.root_module.addAnonymousImport("async_io", .{
|
||||
.root_source_file = b.path(io_path),
|
||||
});
|
||||
|
||||
const zlib_internal_path = switch (os) {
|
||||
.windows => "src/deps/zlib.win32.zig",
|
||||
.linux, .mac => "src/deps/zlib.posix.zig",
|
||||
@@ -539,6 +564,7 @@ fn addInternalPackages(b: *Build, obj: *Compile, opts: *BunBuildOptions) void {
|
||||
.{ .file = "bun-error/index.js", .enable = opts.shouldEmbedCode() },
|
||||
.{ .file = "bun-error/bun-error.css", .enable = opts.shouldEmbedCode() },
|
||||
.{ .file = "fallback-decoder.js", .enable = opts.shouldEmbedCode() },
|
||||
.{ .file = "node-fallbacks/react-refresh.js", .enable = opts.shouldEmbedCode() },
|
||||
.{ .file = "node-fallbacks/assert.js", .enable = opts.shouldEmbedCode() },
|
||||
.{ .file = "node-fallbacks/buffer.js", .enable = opts.shouldEmbedCode() },
|
||||
.{ .file = "node-fallbacks/console.js", .enable = opts.shouldEmbedCode() },
|
||||
@@ -575,6 +601,15 @@ fn addInternalPackages(b: *Build, obj: *Compile, opts: *BunBuildOptions) void {
|
||||
});
|
||||
}
|
||||
}
|
||||
inline for (.{
|
||||
.{ .import = "completions-bash", .file = b.path("completions/bun.bash") },
|
||||
.{ .import = "completions-zsh", .file = b.path("completions/bun.zsh") },
|
||||
.{ .import = "completions-fish", .file = b.path("completions/bun.fish") },
|
||||
}) |entry| {
|
||||
obj.root_module.addAnonymousImport(entry.import, .{
|
||||
.root_source_file = entry.file,
|
||||
});
|
||||
}
|
||||
|
||||
if (os == .windows) {
|
||||
obj.root_module.addAnonymousImport("bun_shim_impl.exe", .{
|
||||
@@ -614,7 +649,7 @@ const WindowsShim = struct {
|
||||
.optimize = .ReleaseFast,
|
||||
.use_llvm = true,
|
||||
.use_lld = true,
|
||||
.unwind_tables = false,
|
||||
.unwind_tables = .none,
|
||||
.omit_frame_pointer = true,
|
||||
.strip = true,
|
||||
.linkage = .static,
|
||||
|
||||
14
bun.lock
14
bun.lock
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"lockfileVersion": 0,
|
||||
"lockfileVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "bun",
|
||||
@@ -151,13 +151,13 @@
|
||||
|
||||
"@qiwi/npm-registry-client": ["@qiwi/npm-registry-client@8.9.1", "", { "dependencies": { "concat-stream": "^2.0.0", "graceful-fs": "^4.2.4", "normalize-package-data": "~1.0.1 || ^2.0.0 || ^3.0.0", "npm-package-arg": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^8.0.0", "once": "^1.4.0", "request": "^2.88.2", "retry": "^0.12.0", "safe-buffer": "^5.2.1", "semver": "2 >=2.2.1 || 3.x || 4 || 5 || 7", "slide": "^1.1.6", "ssri": "^8.0.0" }, "optionalDependencies": { "npmlog": "2 || ^3.1.0 || ^4.0.0" } }, "sha512-rZF+mG+NfijR0SHphhTLHRr4aM4gtfdwoAMY6we2VGQam8vkN1cxGG1Lg/Llrj8Dd0Mu6VjdFQRyMMRZxtZR2A=="],
|
||||
|
||||
"@types/bun": ["@types/bun@1.1.17", "", { "dependencies": { "bun-types": "1.1.44" } }, "sha512-zZt0Kao/8hAwNOXh4bmt8nKbMEd4QD8n7PeTGF+NZTVY5ouXhU/TX7jUj4He1p7mgY+WdplnU1B6MB1j17vdzg=="],
|
||||
"@types/bun": ["@types/bun@1.2.2", "", { "dependencies": { "bun-types": "1.2.2" } }, "sha512-tr74gdku+AEDN5ergNiBnplr7hpDp3V1h7fqI2GcR/rsUaM39jpSeKH0TFibRvU0KwniRx5POgaYnaXbk0hU+w=="],
|
||||
|
||||
"@types/json-schema": ["@types/json-schema@7.0.15", "", {}, "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA=="],
|
||||
|
||||
"@types/json5": ["@types/json5@0.0.29", "", {}, "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ=="],
|
||||
|
||||
"@types/node": ["@types/node@22.10.7", "", { "dependencies": { "undici-types": "~6.20.0" } }, "sha512-V09KvXxFiutGp6B7XkpaDXlNadZxrzajcY50EuoLIpQ6WWYCSvf19lVIazzfIzQvhUN2HjX12spLojTnhuKlGg=="],
|
||||
"@types/node": ["@types/node@22.13.5", "", { "dependencies": { "undici-types": "~6.20.0" } }, "sha512-+lTU0PxZXn0Dr1NBtC7Y8cR21AJr87dLLU953CWA6pMxxv/UDc7jYAY90upcrie1nRcD6XNG5HOYEDtgW5TxAg=="],
|
||||
|
||||
"@types/prop-types": ["@types/prop-types@15.7.12", "", {}, "sha512-5zvhXYtRNRluoE/jAp4GVsSduVUzNWKkOZrCDBWYtE7biZywwdC2AcEzg+cSMLFRfVgeAFqpfNabiPjxFddV1Q=="],
|
||||
|
||||
@@ -257,7 +257,7 @@
|
||||
|
||||
"builtins": ["builtins@1.0.3", "", {}, "sha512-uYBjakWipfaO/bXI7E8rq6kpwHRZK5cNYrUv2OzZSI/FvmdMyXJ2tG9dKcjEC5YHmHpUAwsargWIZNWdxb/bnQ=="],
|
||||
|
||||
"bun-types": ["bun-types@workspace:packages/bun-types", { "dependencies": { "@types/node": "*", "@types/ws": "~8.5.10" }, "devDependencies": { "@biomejs/biome": "^1.5.3", "@definitelytyped/dtslint": "^0.0.199", "@definitelytyped/eslint-plugin": "^0.0.197", "typescript": "^5.0.2" } }],
|
||||
"bun-types": ["bun-types@workspace:packages/bun-types"],
|
||||
|
||||
"call-bind": ["call-bind@1.0.7", "", { "dependencies": { "es-define-property": "^1.0.0", "es-errors": "^1.3.0", "function-bind": "^1.1.2", "get-intrinsic": "^1.2.4", "set-function-length": "^1.2.1" } }, "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w=="],
|
||||
|
||||
@@ -265,7 +265,7 @@
|
||||
|
||||
"camel-case": ["camel-case@4.1.2", "", { "dependencies": { "pascal-case": "^3.1.2", "tslib": "^2.0.3" } }, "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw=="],
|
||||
|
||||
"caniuse-lite": ["caniuse-lite@1.0.30001653", "", {}, "sha512-XGWQVB8wFQ2+9NZwZ10GxTYC5hk0Fa+q8cSkr0tgvMhYhMHP/QC+WTgrePMDBWiWc/pV+1ik82Al20XOK25Gcw=="],
|
||||
"caniuse-lite": ["caniuse-lite@1.0.30001695", "", {}, "sha512-vHyLade6wTgI2u1ec3WQBxv+2BrTERV28UXQu9LO6lZ9pYeMk34vjXFLOxo1A4UBA8XTL4njRQZdno/yYaSmWw=="],
|
||||
|
||||
"capital-case": ["capital-case@1.0.4", "", { "dependencies": { "no-case": "^3.0.4", "tslib": "^2.0.3", "upper-case-first": "^2.0.2" } }, "sha512-ds37W8CytHgwnhGGTi88pcPyR15qoNkOpYwmMMfnWqqWgESapLqvDx6huFjQ5vqWSn2Z06173XNA7LtMOeUh1A=="],
|
||||
|
||||
@@ -923,10 +923,14 @@
|
||||
|
||||
"are-we-there-yet/readable-stream": ["readable-stream@2.3.8", "", { "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", "isarray": "~1.0.0", "process-nextick-args": "~2.0.0", "safe-buffer": "~5.1.1", "string_decoder": "~1.1.1", "util-deprecate": "~1.0.1" } }, "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA=="],
|
||||
|
||||
"autoprefixer/caniuse-lite": ["caniuse-lite@1.0.30001653", "", {}, "sha512-XGWQVB8wFQ2+9NZwZ10GxTYC5hk0Fa+q8cSkr0tgvMhYhMHP/QC+WTgrePMDBWiWc/pV+1ik82Al20XOK25Gcw=="],
|
||||
|
||||
"babel-code-frame/chalk": ["chalk@1.1.3", "", { "dependencies": { "ansi-styles": "^2.2.1", "escape-string-regexp": "^1.0.2", "has-ansi": "^2.0.0", "strip-ansi": "^3.0.0", "supports-color": "^2.0.0" } }, "sha512-U3lRVLMSlsCfjqYPbLyVv11M9CPW4I728d6TCKMAOJueEeB9/8o+eSsMnxPJD+Q+K909sdESg7C+tIkoH6on1A=="],
|
||||
|
||||
"babel-code-frame/js-tokens": ["js-tokens@3.0.2", "", {}, "sha512-RjTcuD4xjtthQkaWH7dFlH85L+QaVtSoOyGdZ3g6HFhS9dFNDfLyqgm2NFe2X6cQpeFmt0452FJjFG5UameExg=="],
|
||||
|
||||
"browserslist/caniuse-lite": ["caniuse-lite@1.0.30001653", "", {}, "sha512-XGWQVB8wFQ2+9NZwZ10GxTYC5hk0Fa+q8cSkr0tgvMhYhMHP/QC+WTgrePMDBWiWc/pV+1ik82Al20XOK25Gcw=="],
|
||||
|
||||
"eslint-import-resolver-node/debug": ["debug@3.2.7", "", { "dependencies": { "ms": "^2.1.1" } }, "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ=="],
|
||||
|
||||
"eslint-module-utils/debug": ["debug@3.2.7", "", { "dependencies": { "ms": "^2.1.1" } }, "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ=="],
|
||||
|
||||
@@ -2,3 +2,7 @@
|
||||
# https://github.com/oven-sh/bun/issues/16289
|
||||
[test]
|
||||
preload = ["./test/js/node/harness.ts", "./test/preload.ts"]
|
||||
|
||||
[install]
|
||||
# Node.js never auto-installs modules.
|
||||
auto = "disable"
|
||||
|
||||
84
ci/README.md
84
ci/README.md
@@ -1,84 +0,0 @@
|
||||
# CI
|
||||
|
||||
This directory contains scripts for building CI images for Bun.
|
||||
|
||||
## Building
|
||||
|
||||
### `macOS`
|
||||
|
||||
On macOS, images are built using [`tart`](https://tart.run/), a tool that abstracts over the [`Virtualization.Framework`](https://developer.apple.com/documentation/virtualization) APIs, to run macOS VMs.
|
||||
|
||||
To install the dependencies required, run:
|
||||
|
||||
```sh
|
||||
$ cd ci
|
||||
$ bun run bootstrap
|
||||
```
|
||||
|
||||
To build a vanilla macOS VM, run:
|
||||
|
||||
```sh
|
||||
$ bun run build:darwin-aarch64-vanilla
|
||||
```
|
||||
|
||||
This builds a vanilla macOS VM with the current macOS release on your machine. It runs scripts to disable things like spotlight and siri, but it does not install any software.
|
||||
|
||||
> Note: The image size is 50GB, so make sure you have enough disk space.
|
||||
|
||||
If you want to build a specific macOS release, you can run:
|
||||
|
||||
```sh
|
||||
$ bun run build:darwin-aarch64-vanilla-15
|
||||
```
|
||||
|
||||
> Note: You cannot build a newer release of macOS on an older macOS machine.
|
||||
|
||||
To build a macOS VM with software installed to build and test Bun, run:
|
||||
|
||||
```sh
|
||||
$ bun run build:darwin-aarch64
|
||||
```
|
||||
|
||||
## Running
|
||||
|
||||
### `macOS`
|
||||
|
||||
## How To
|
||||
|
||||
### Support a new macOS release
|
||||
|
||||
1. Visit [`ipsw.me`](https://ipsw.me/VirtualMac2,1) and find the IPSW of the macOS release you want to build.
|
||||
|
||||
2. Add an entry to [`ci/darwin/variables.pkr.hcl`](/ci/darwin/variables.pkr.hcl) with the following format:
|
||||
|
||||
```hcl
|
||||
sonoma = {
|
||||
distro = "sonoma"
|
||||
release = "15"
|
||||
ipsw = "https://updates.cdn-apple.com/..."
|
||||
}
|
||||
```
|
||||
|
||||
3. Add matching scripts to [`ci/package.json`](/ci/package.json) to build the image, then test it:
|
||||
|
||||
```sh
|
||||
$ bun run build:darwin-aarch64-vanilla-15
|
||||
```
|
||||
|
||||
> Note: If you need to troubleshoot the build, you can remove the `headless = true` property from [`ci/darwin/image-vanilla.pkr.hcl`](/ci/darwin/image-vanilla.pkr.hcl) and the VM's screen will be displayed.
|
||||
|
||||
4. Test and build the non-vanilla image:
|
||||
|
||||
```sh
|
||||
$ bun run build:darwin-aarch64-15
|
||||
```
|
||||
|
||||
This will use the vanilla image and run the [`scripts/bootstrap.sh`](/scripts/bootstrap.sh) script to install the required software to build and test Bun.
|
||||
|
||||
5. Publish the images:
|
||||
|
||||
```sh
|
||||
$ bun run login
|
||||
$ bun run publish:darwin-aarch64-vanilla-15
|
||||
$ bun run publish:darwin-aarch64-15
|
||||
```
|
||||
@@ -1,22 +0,0 @@
|
||||
FROM alpine:edge AS build
|
||||
ARG GIT_SHA
|
||||
ENV GIT_SHA=${GIT_SHA}
|
||||
WORKDIR /app/bun
|
||||
ENV HOME=/root
|
||||
|
||||
COPY . .
|
||||
RUN touch $HOME/.bashrc
|
||||
RUN ./scripts/bootstrap.sh
|
||||
RUN . $HOME/.bashrc && bun run build:release
|
||||
|
||||
RUN apk add file
|
||||
RUN file ./build/release/bun
|
||||
RUN ldd ./build/release/bun
|
||||
RUN ./build/release/bun
|
||||
|
||||
RUN cp -R /app/bun/build/* /output
|
||||
|
||||
FROM scratch AS artifact
|
||||
COPY --from=build /output /
|
||||
|
||||
# docker build -f ./ci/alpine/build.Dockerfile --progress=plain --build-arg GIT_SHA="$(git rev-parse HEAD)" --target=artifact --output type=local,dest=./build-alpine .
|
||||
@@ -1,20 +0,0 @@
|
||||
FROM alpine:edge
|
||||
ENV HOME=/root
|
||||
WORKDIR /root
|
||||
COPY ./build-alpine/release/bun .
|
||||
COPY ./test ./test
|
||||
COPY ./scripts ./scripts
|
||||
COPY ./package.json ./package.json
|
||||
COPY ./packages ./packages
|
||||
|
||||
RUN apk update
|
||||
RUN apk add nodejs lsb-release-minimal git python3 npm make g++
|
||||
RUN apk add file
|
||||
|
||||
RUN file /root/bun
|
||||
RUN ldd /root/bun
|
||||
RUN /root/bun
|
||||
|
||||
RUN ./scripts/runner.node.mjs --exec-path /root/bun
|
||||
|
||||
# docker build -f ./ci/alpine/test.Dockerfile --progress=plain .
|
||||
@@ -1,46 +0,0 @@
|
||||
# Generates a vanilla macOS VM with optimized settings for virtualized environments.
|
||||
# See login.sh and optimize.sh for details.
|
||||
|
||||
data "external-raw" "boot-script" {
|
||||
program = ["sh", "-c", templatefile("scripts/boot-image.sh", var)]
|
||||
}
|
||||
|
||||
source "tart-cli" "bun-darwin-aarch64-vanilla" {
|
||||
vm_name = "bun-darwin-aarch64-vanilla-${local.release.distro}-${local.release.release}"
|
||||
from_ipsw = local.release.ipsw
|
||||
cpu_count = local.cpu_count
|
||||
memory_gb = local.memory_gb
|
||||
disk_size_gb = local.disk_size_gb
|
||||
ssh_username = local.username
|
||||
ssh_password = local.password
|
||||
ssh_timeout = "120s"
|
||||
create_grace_time = "30s"
|
||||
boot_command = split("\n", data.external-raw.boot-script.result)
|
||||
headless = true # Disable if you need to debug why the boot_command is not working
|
||||
}
|
||||
|
||||
build {
|
||||
sources = ["source.tart-cli.bun-darwin-aarch64-vanilla"]
|
||||
|
||||
provisioner "file" {
|
||||
content = file("scripts/setup-login.sh")
|
||||
destination = "/tmp/setup-login.sh"
|
||||
}
|
||||
|
||||
provisioner "shell" {
|
||||
inline = ["echo \"${local.password}\" | sudo -S sh -c 'sh /tmp/setup-login.sh \"${local.username}\" \"${local.password}\"'"]
|
||||
}
|
||||
|
||||
provisioner "file" {
|
||||
content = file("scripts/optimize-machine.sh")
|
||||
destination = "/tmp/optimize-machine.sh"
|
||||
}
|
||||
|
||||
provisioner "shell" {
|
||||
inline = ["sudo sh /tmp/optimize-machine.sh"]
|
||||
}
|
||||
|
||||
provisioner "shell" {
|
||||
inline = ["sudo rm -rf /tmp/*"]
|
||||
}
|
||||
}
|
||||
@@ -1,44 +0,0 @@
|
||||
# Generates a macOS VM with software installed to build and test Bun.
|
||||
|
||||
source "tart-cli" "bun-darwin-aarch64" {
|
||||
vm_name = "bun-darwin-aarch64-${local.release.distro}-${local.release.release}"
|
||||
vm_base_name = "bun-darwin-aarch64-vanilla-${local.release.distro}-${local.release.release}"
|
||||
cpu_count = local.cpu_count
|
||||
memory_gb = local.memory_gb
|
||||
disk_size_gb = local.disk_size_gb
|
||||
ssh_username = local.username
|
||||
ssh_password = local.password
|
||||
ssh_timeout = "120s"
|
||||
headless = true
|
||||
}
|
||||
|
||||
build {
|
||||
sources = ["source.tart-cli.bun-darwin-aarch64"]
|
||||
|
||||
provisioner "file" {
|
||||
content = file("../../scripts/bootstrap.sh")
|
||||
destination = "/tmp/bootstrap.sh"
|
||||
}
|
||||
|
||||
provisioner "shell" {
|
||||
inline = ["CI=true sh /tmp/bootstrap.sh"]
|
||||
}
|
||||
|
||||
provisioner "file" {
|
||||
source = "darwin/plists/"
|
||||
destination = "/tmp/"
|
||||
}
|
||||
|
||||
provisioner "shell" {
|
||||
inline = [
|
||||
"sudo ls /tmp/",
|
||||
"sudo mv /tmp/*.plist /Library/LaunchDaemons/",
|
||||
"sudo chown root:wheel /Library/LaunchDaemons/*.plist",
|
||||
"sudo chmod 644 /Library/LaunchDaemons/*.plist",
|
||||
]
|
||||
}
|
||||
|
||||
provisioner "shell" {
|
||||
inline = ["sudo rm -rf /tmp/*"]
|
||||
}
|
||||
}
|
||||
@@ -1,44 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>Label</key>
|
||||
<string>com.buildkite.buildkite-agent</string>
|
||||
|
||||
<key>ProgramArguments</key>
|
||||
<array>
|
||||
<string>/usr/local/bin/buildkite-agent</string>
|
||||
<string>start</string>
|
||||
</array>
|
||||
|
||||
<key>KeepAlive</key>
|
||||
<dict>
|
||||
<key>SuccessfulExit</key>
|
||||
<false />
|
||||
</dict>
|
||||
|
||||
<key>RunAtLoad</key>
|
||||
<true />
|
||||
|
||||
<key>StandardOutPath</key>
|
||||
<string>/var/buildkite-agent/logs/buildkite-agent.log</string>
|
||||
|
||||
<key>StandardErrorPath</key>
|
||||
<string>/var/buildkite-agent/logs/buildkite-agent.log</string>
|
||||
|
||||
<key>EnvironmentVariables</key>
|
||||
<dict>
|
||||
<key>BUILDKITE_AGENT_CONFIG</key>
|
||||
<string>/etc/buildkite-agent/buildkite-agent.cfg</string>
|
||||
</dict>
|
||||
|
||||
<key>LimitLoadToSessionType</key>
|
||||
<array>
|
||||
<string>Aqua</string>
|
||||
<string>LoginWindow</string>
|
||||
<string>Background</string>
|
||||
<string>StandardIO</string>
|
||||
<string>System</string>
|
||||
</array>
|
||||
</dict>
|
||||
</plist>
|
||||
@@ -1,20 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>Label</key>
|
||||
<string>com.tailscale.tailscaled</string>
|
||||
|
||||
<key>ProgramArguments</key>
|
||||
<array>
|
||||
<string>/usr/local/bin/tailscale</string>
|
||||
<string>up</string>
|
||||
<string>--ssh</string>
|
||||
<string>--authkey</string>
|
||||
<string>${TAILSCALE_AUTHKEY}</string>
|
||||
</array>
|
||||
|
||||
<key>RunAtLoad</key>
|
||||
<true />
|
||||
</dict>
|
||||
</plist>
|
||||
@@ -1,16 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>Label</key>
|
||||
<string>com.tailscale.tailscaled</string>
|
||||
|
||||
<key>ProgramArguments</key>
|
||||
<array>
|
||||
<string>/usr/local/bin/tailscaled</string>
|
||||
</array>
|
||||
|
||||
<key>RunAtLoad</key>
|
||||
<true />
|
||||
</dict>
|
||||
</plist>
|
||||
@@ -1,124 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# This script generates the boot commands for the macOS installer GUI.
|
||||
# It is run on your local machine, not inside the VM.
|
||||
|
||||
# Sources:
|
||||
# - https://github.com/cirruslabs/macos-image-templates/blob/master/templates/vanilla-sequoia.pkr.hcl
|
||||
|
||||
if ! [ "${release}" ] || ! [ "${username}" ] || ! [ "${password}" ]; then
|
||||
echo "Script must be run with variables: release, username, and password" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Hello, hola, bonjour, etc.
|
||||
echo "<wait120s><spacebar>"
|
||||
|
||||
# Select Your Country and Region
|
||||
echo "<wait30s>italiano<esc>english<enter>"
|
||||
echo "<wait30s>united states<leftShiftOn><tab><leftShiftOff><spacebar>"
|
||||
|
||||
# Written and Spoken Languages
|
||||
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
|
||||
|
||||
# Accessibility
|
||||
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
|
||||
|
||||
# Data & Privacy
|
||||
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
|
||||
|
||||
# Migration Assistant
|
||||
echo "<wait30s><tab><tab><tab><spacebar>"
|
||||
|
||||
# Sign In with Your Apple ID
|
||||
echo "<wait30s><leftShiftOn><tab><leftShiftOff><leftShiftOn><tab><leftShiftOff><spacebar>"
|
||||
|
||||
# Are you sure you want to skip signing in with an Apple ID?
|
||||
echo "<wait30s><tab><spacebar>"
|
||||
|
||||
# Terms and Conditions
|
||||
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
|
||||
|
||||
# I have read and agree to the macOS Software License Agreement
|
||||
echo "<wait30s><tab><spacebar>"
|
||||
|
||||
# Create a Computer Account
|
||||
echo "<wait30s>${username}<tab><tab>${password}<tab>${password}<tab><tab><tab><spacebar>"
|
||||
|
||||
# Enable Location Services
|
||||
echo "<wait60s><leftShiftOn><tab><leftShiftOff><spacebar>"
|
||||
|
||||
# Are you sure you don't want to use Location Services?
|
||||
echo "<wait30s><tab><spacebar>"
|
||||
|
||||
# Select Your Time Zone
|
||||
echo "<wait30s><tab>UTC<enter><leftShiftOn><tab><leftShiftOff><spacebar>"
|
||||
|
||||
# Analytics
|
||||
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
|
||||
|
||||
# Screen Time
|
||||
echo "<wait30s><tab><spacebar>"
|
||||
|
||||
# Siri
|
||||
echo "<wait30s><tab><spacebar><leftShiftOn><tab><leftShiftOff><spacebar>"
|
||||
|
||||
# Choose Your Look
|
||||
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
|
||||
|
||||
if [ "${release}" = "13" ] || [ "${release}" = "14" ]; then
|
||||
# Enable Voice Over
|
||||
echo "<wait30s><leftAltOn><f5><leftAltOff><wait5s>v"
|
||||
else
|
||||
# Welcome to Mac
|
||||
echo "<wait30s><spacebar>"
|
||||
|
||||
# Enable Keyboard navigation
|
||||
echo "<wait30s><leftAltOn><spacebar><leftAltOff>Terminal<enter>"
|
||||
echo "<wait30s>defaults write NSGlobalDomain AppleKeyboardUIMode -int 3<enter>"
|
||||
echo "<wait30s><leftAltOn>q<leftAltOff>"
|
||||
fi
|
||||
|
||||
# Now that the installation is done, open "System Settings"
|
||||
echo "<wait30s><leftAltOn><spacebar><leftAltOff>System Settings<enter>"
|
||||
|
||||
# Navigate to "Sharing"
|
||||
echo "<wait30s><leftAltOn>f<leftAltOff>sharing<enter>"
|
||||
|
||||
if [ "${release}" = "13" ]; then
|
||||
# Navigate to "Screen Sharing" and enable it
|
||||
echo "<wait30s><tab><down><spacebar>"
|
||||
|
||||
# Navigate to "Remote Login" and enable it
|
||||
echo "<wait30s><tab><tab><tab><tab><tab><tab><spacebar>"
|
||||
|
||||
# Open "Remote Login" details
|
||||
echo "<wait30s><tab><spacebar>"
|
||||
|
||||
# Enable "Full Disk Access"
|
||||
echo "<wait30s><tab><spacebar>"
|
||||
|
||||
# Click "Done"
|
||||
echo "<wait30s><leftShiftOn><tab><leftShiftOff><leftShiftOn><tab><leftShiftOff><spacebar>"
|
||||
|
||||
# Disable Voice Over
|
||||
echo "<leftAltOn><f5><leftAltOff>"
|
||||
elif [ "${release}" = "14" ]; then
|
||||
# Navigate to "Screen Sharing" and enable it
|
||||
echo "<wait30s><tab><tab><tab><tab><tab><spacebar>"
|
||||
|
||||
# Navigate to "Remote Login" and enable it
|
||||
echo "<wait30s><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><spacebar>"
|
||||
|
||||
# Disable Voice Over
|
||||
echo "<wait30s><leftAltOn><f5><leftAltOff>"
|
||||
elif [ "${release}" = "15" ]; then
|
||||
# Navigate to "Screen Sharing" and enable it
|
||||
echo "<wait30s><tab><tab><tab><tab><tab><spacebar>"
|
||||
|
||||
# Navigate to "Remote Login" and enable it
|
||||
echo "<wait30s><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><spacebar>"
|
||||
fi
|
||||
|
||||
# Quit System Settings
|
||||
echo "<wait30s><leftAltOn>q<leftAltOff>"
|
||||
@@ -1,122 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# This script optimizes macOS for virtualized environments.
|
||||
# It disables things like spotlight, screen saver, and sleep.
|
||||
|
||||
# Sources:
|
||||
# - https://github.com/sickcodes/osx-optimizer
|
||||
# - https://github.com/koding88/MacBook-Optimization-Script
|
||||
# - https://www.macstadium.com/blog/simple-optimizations-for-macos-and-ios-build-agents
|
||||
|
||||
if [ "$(id -u)" != "0" ]; then
|
||||
echo "This script must be run using sudo." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
execute() {
|
||||
echo "$ $@" >&2
|
||||
if ! "$@"; then
|
||||
echo "Command failed: $@" >&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
disable_software_update() {
|
||||
execute softwareupdate --schedule off
|
||||
execute defaults write com.apple.SoftwareUpdate AutomaticDownload -bool false
|
||||
execute defaults write com.apple.SoftwareUpdate AutomaticCheckEnabled -bool false
|
||||
execute defaults write com.apple.SoftwareUpdate ConfigDataInstall -int 0
|
||||
execute defaults write com.apple.SoftwareUpdate CriticalUpdateInstall -int 0
|
||||
execute defaults write com.apple.SoftwareUpdate ScheduleFrequency -int 0
|
||||
execute defaults write com.apple.SoftwareUpdate AutomaticDownload -int 0
|
||||
execute defaults write com.apple.commerce AutoUpdate -bool false
|
||||
execute defaults write com.apple.commerce AutoUpdateRestartRequired -bool false
|
||||
}
|
||||
|
||||
disable_spotlight() {
|
||||
execute mdutil -i off -a
|
||||
execute mdutil -E /
|
||||
}
|
||||
|
||||
disable_siri() {
|
||||
execute launchctl unload -w /System/Library/LaunchAgents/com.apple.Siri.agent.plist
|
||||
execute defaults write com.apple.Siri StatusMenuVisible -bool false
|
||||
execute defaults write com.apple.Siri UserHasDeclinedEnable -bool true
|
||||
execute defaults write com.apple.assistant.support "Assistant Enabled" 0
|
||||
}
|
||||
|
||||
disable_sleep() {
|
||||
execute systemsetup -setsleep Never
|
||||
execute systemsetup -setcomputersleep Never
|
||||
execute systemsetup -setdisplaysleep Never
|
||||
execute systemsetup -setharddisksleep Never
|
||||
}
|
||||
|
||||
disable_screen_saver() {
|
||||
execute defaults write com.apple.screensaver loginWindowIdleTime 0
|
||||
execute defaults write com.apple.screensaver idleTime 0
|
||||
}
|
||||
|
||||
disable_screen_lock() {
|
||||
execute defaults write com.apple.loginwindow DisableScreenLock -bool true
|
||||
}
|
||||
|
||||
disable_wallpaper() {
|
||||
execute defaults write com.apple.loginwindow DesktopPicture ""
|
||||
}
|
||||
|
||||
disable_application_state() {
|
||||
execute defaults write com.apple.loginwindow TALLogoutSavesState -bool false
|
||||
}
|
||||
|
||||
disable_accessibility() {
|
||||
execute defaults write com.apple.Accessibility DifferentiateWithoutColor -int 1
|
||||
execute defaults write com.apple.Accessibility ReduceMotionEnabled -int 1
|
||||
execute defaults write com.apple.universalaccess reduceMotion -int 1
|
||||
execute defaults write com.apple.universalaccess reduceTransparency -int 1
|
||||
}
|
||||
|
||||
disable_dashboard() {
|
||||
execute defaults write com.apple.dashboard mcx-disabled -boolean YES
|
||||
execute killall Dock
|
||||
}
|
||||
|
||||
disable_animations() {
|
||||
execute defaults write NSGlobalDomain NSAutomaticWindowAnimationsEnabled -bool false
|
||||
execute defaults write -g QLPanelAnimationDuration -float 0
|
||||
execute defaults write com.apple.finder DisableAllAnimations -bool true
|
||||
}
|
||||
|
||||
disable_time_machine() {
|
||||
execute tmutil disable
|
||||
}
|
||||
|
||||
enable_performance_mode() {
|
||||
# https://support.apple.com/en-us/101992
|
||||
if ! [ $(nvram boot-args 2>/dev/null | grep -q serverperfmode) ]; then
|
||||
execute nvram boot-args="serverperfmode=1 $(nvram boot-args 2>/dev/null | cut -f 2-)"
|
||||
fi
|
||||
}
|
||||
|
||||
add_terminal_to_desktop() {
|
||||
execute ln -sf /System/Applications/Utilities/Terminal.app ~/Desktop/Terminal
|
||||
}
|
||||
|
||||
main() {
|
||||
disable_software_update
|
||||
disable_spotlight
|
||||
disable_siri
|
||||
disable_sleep
|
||||
disable_screen_saver
|
||||
disable_screen_lock
|
||||
disable_wallpaper
|
||||
disable_application_state
|
||||
disable_accessibility
|
||||
disable_dashboard
|
||||
disable_animations
|
||||
disable_time_machine
|
||||
enable_performance_mode
|
||||
add_terminal_to_desktop
|
||||
}
|
||||
|
||||
main
|
||||
@@ -1,78 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# This script generates a /etc/kcpassword file to enable auto-login on macOS.
|
||||
# Yes, this stores your password in plain text. Do NOT do this on your local machine.
|
||||
|
||||
# Sources:
|
||||
# - https://github.com/xfreebird/kcpassword/blob/master/kcpassword
|
||||
|
||||
if [ "$(id -u)" != "0" ]; then
|
||||
echo "This script must be run using sudo." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
execute() {
|
||||
echo "$ $@" >&2
|
||||
if ! "$@"; then
|
||||
echo "Command failed: $@" >&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
kcpassword() {
|
||||
passwd="$1"
|
||||
key="7d 89 52 23 d2 bc dd ea a3 b9 1f"
|
||||
passwd_hex=$(printf "%s" "$passwd" | xxd -p | tr -d '\n')
|
||||
|
||||
key_len=33
|
||||
passwd_len=${#passwd_hex}
|
||||
remainder=$((passwd_len % key_len))
|
||||
if [ $remainder -ne 0 ]; then
|
||||
padding=$((key_len - remainder))
|
||||
passwd_hex="${passwd_hex}$(printf '%0*x' $((padding / 2)) 0)"
|
||||
fi
|
||||
|
||||
result=""
|
||||
i=0
|
||||
while [ $i -lt ${#passwd_hex} ]; do
|
||||
for byte in $key; do
|
||||
[ $i -ge ${#passwd_hex} ] && break
|
||||
p="${passwd_hex:$i:2}"
|
||||
r=$(printf '%02x' $((0x$p ^ 0x$byte)))
|
||||
result="${result}${r}"
|
||||
i=$((i + 2))
|
||||
done
|
||||
done
|
||||
|
||||
echo "$result"
|
||||
}
|
||||
|
||||
login() {
|
||||
username="$1"
|
||||
password="$2"
|
||||
|
||||
enable_passwordless_sudo() {
|
||||
execute mkdir -p /etc/sudoers.d/
|
||||
echo "${username} ALL=(ALL) NOPASSWD: ALL" | EDITOR=tee execute visudo "/etc/sudoers.d/${username}-nopasswd"
|
||||
}
|
||||
|
||||
enable_auto_login() {
|
||||
echo "00000000: 1ced 3f4a bcbc ba2c caca 4e82" | execute xxd -r - /etc/kcpassword
|
||||
execute defaults write /Library/Preferences/com.apple.loginwindow autoLoginUser "${username}"
|
||||
}
|
||||
|
||||
disable_screen_lock() {
|
||||
execute sysadminctl -screenLock off -password "${password}"
|
||||
}
|
||||
|
||||
enable_passwordless_sudo
|
||||
enable_auto_login
|
||||
disable_screen_lock
|
||||
}
|
||||
|
||||
if [ $# -ne 2 ]; then
|
||||
echo "Usage: $0 <username> <password>" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
login "$@"
|
||||
@@ -1,78 +0,0 @@
|
||||
packer {
|
||||
required_plugins {
|
||||
tart = {
|
||||
version = ">= 1.12.0"
|
||||
source = "github.com/cirruslabs/tart"
|
||||
}
|
||||
external = {
|
||||
version = ">= 0.0.2"
|
||||
source = "github.com/joomcode/external"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variable "release" {
|
||||
type = number
|
||||
default = 13
|
||||
}
|
||||
|
||||
variable "username" {
|
||||
type = string
|
||||
default = "admin"
|
||||
}
|
||||
|
||||
variable "password" {
|
||||
type = string
|
||||
default = "admin"
|
||||
}
|
||||
|
||||
variable "cpu_count" {
|
||||
type = number
|
||||
default = 2
|
||||
}
|
||||
|
||||
variable "memory_gb" {
|
||||
type = number
|
||||
default = 4
|
||||
}
|
||||
|
||||
variable "disk_size_gb" {
|
||||
type = number
|
||||
default = 50
|
||||
}
|
||||
|
||||
locals {
|
||||
sequoia = {
|
||||
tier = 1
|
||||
distro = "sequoia"
|
||||
release = "15"
|
||||
ipsw = "https://updates.cdn-apple.com/2024FallFCS/fullrestores/062-78489/BDA44327-C79E-4608-A7E0-455A7E91911F/UniversalMac_15.0_24A335_Restore.ipsw"
|
||||
}
|
||||
|
||||
sonoma = {
|
||||
tier = 2
|
||||
distro = "sonoma"
|
||||
release = "14"
|
||||
ipsw = "https://updates.cdn-apple.com/2023FallFCS/fullrestores/042-54934/0E101AD6-3117-4B63-9BF1-143B6DB9270A/UniversalMac_14.0_23A344_Restore.ipsw"
|
||||
}
|
||||
|
||||
ventura = {
|
||||
tier = 2
|
||||
distro = "ventura"
|
||||
release = "13"
|
||||
ipsw = "https://updates.cdn-apple.com/2022FallFCS/fullrestores/012-92188/2C38BCD1-2BFF-4A10-B358-94E8E28BE805/UniversalMac_13.0_22A380_Restore.ipsw"
|
||||
}
|
||||
|
||||
releases = {
|
||||
15 = local.sequoia
|
||||
14 = local.sonoma
|
||||
13 = local.ventura
|
||||
}
|
||||
|
||||
release = local.releases[var.release]
|
||||
username = var.username
|
||||
password = var.password
|
||||
cpu_count = var.cpu_count
|
||||
memory_gb = var.memory_gb
|
||||
disk_size_gb = var.disk_size_gb
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
ARG IMAGE=debian:11
|
||||
FROM $IMAGE
|
||||
COPY ./scripts/bootstrap.sh /tmp/bootstrap.sh
|
||||
ENV CI=true
|
||||
RUN sh /tmp/bootstrap.sh && rm -rf /tmp/*
|
||||
WORKDIR /workspace/bun
|
||||
COPY bunfig.toml bunfig.toml
|
||||
COPY package.json package.json
|
||||
COPY CMakeLists.txt CMakeLists.txt
|
||||
COPY cmake/ cmake/
|
||||
COPY scripts/ scripts/
|
||||
COPY patches/ patches/
|
||||
COPY *.zig ./
|
||||
COPY src/ src/
|
||||
COPY packages/ packages/
|
||||
COPY test/ test/
|
||||
RUN bun i
|
||||
RUN bun run build:ci
|
||||
@@ -1,27 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# This script sets the hostname of the current machine.
|
||||
|
||||
execute() {
|
||||
echo "$ $@" >&2
|
||||
if ! "$@"; then
|
||||
echo "Command failed: $@" >&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
main() {
|
||||
if [ "$#" -ne 1 ]; then
|
||||
echo "Usage: $0 <hostname>" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -f "$(which hostnamectl)" ]; then
|
||||
execute hostnamectl set-hostname "$1"
|
||||
else
|
||||
echo "Error: hostnamectl is not installed." >&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
main "$@"
|
||||
@@ -1,22 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# This script starts tailscale on the current machine.
|
||||
|
||||
execute() {
|
||||
echo "$ $@" >&2
|
||||
if ! "$@"; then
|
||||
echo "Command failed: $@" >&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
main() {
|
||||
if [ "$#" -ne 1 ]; then
|
||||
echo "Usage: $0 <auth-key>" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
execute tailscale up --reset --ssh --accept-risk=lose-ssh --auth-key="$1"
|
||||
}
|
||||
|
||||
main "$@"
|
||||
@@ -1,27 +0,0 @@
|
||||
{
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"bootstrap": "brew install gh jq cirruslabs/cli/tart cirruslabs/cli/sshpass hashicorp/tap/packer && packer init darwin",
|
||||
"login": "token=$(gh auth token); username=$(gh api user --jq .login); echo \"Login as $username...\"; echo \"$token\" | tart login ghcr.io --username \"$username\" --password-stdin; echo \"$token\" | docker login ghcr.io --username \"$username\" --password-stdin",
|
||||
"fetch:image-name": "echo ghcr.io/oven-sh/bun-vm",
|
||||
"fetch:darwin-version": "echo 1",
|
||||
"fetch:macos-version": "sw_vers -productVersion | cut -d. -f1",
|
||||
"fetch:script-version": "cat ../scripts/bootstrap.sh | grep 'v=' | sed 's/v=\"//;s/\"//' | head -n 1",
|
||||
"build:darwin-aarch64-vanilla": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=$(bun fetch:macos-version) darwin/",
|
||||
"build:darwin-aarch64-vanilla-15": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=15 darwin/",
|
||||
"build:darwin-aarch64-vanilla-14": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=14 darwin/",
|
||||
"build:darwin-aarch64-vanilla-13": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=13 darwin/",
|
||||
"build:darwin-aarch64": "packer build '-only=*.bun-darwin-aarch64' -var release=$(bun fetch:macos-version) darwin/",
|
||||
"build:darwin-aarch64-15": "packer build '-only=*.bun-darwin-aarch64' -var release=15 darwin/",
|
||||
"build:darwin-aarch64-14": "packer build '-only=*.bun-darwin-aarch64' -var release=14 darwin/",
|
||||
"build:darwin-aarch64-13": "packer build '-only=*.bun-darwin-aarch64' -var release=13 darwin/",
|
||||
"publish:darwin-aarch64-vanilla": "image=$(tart list --format json | jq -r \".[] | select(.Name | test(\\\"^bun-darwin-aarch64-vanilla-.*-$(bun fetch:macos-version)$\\\")) | .Name\" | head -n 1 | sed 's/bun-//'); tart push \"bun-$image\" \"ghcr.io/oven-sh/bun-vm:$image-v$(bun fetch:darwin-version)\"",
|
||||
"publish:darwin-aarch64-vanilla-15": "tart push bun-darwin-aarch64-vanilla-sequoia-15 \"$(bun fetch:image-name):darwin-aarch64-vanilla-sequoia-15-v$(bun fetch:darwin-version)\"",
|
||||
"publish:darwin-aarch64-vanilla-14": "tart push bun-darwin-aarch64-vanilla-sonoma-14 \"$(bun fetch:image-name):darwin-aarch64-vanilla-sonoma-14-v$(bun fetch:darwin-version)\"",
|
||||
"publish:darwin-aarch64-vanilla-13": "tart push bun-darwin-aarch64-vanilla-ventura-13 \"$(bun fetch:image-name):darwin-aarch64-vanilla-ventura-13-v$(bun fetch:darwin-version)\"",
|
||||
"publish:darwin-aarch64": "image=$(tart list --format json | jq -r \".[] | select(.Name | test(\\\"^bun-darwin-aarch64-.*-$(bun fetch:macos-version)$\\\")) | .Name\" | head -n 1 | sed 's/bun-//'); tart push \"bun-$image\" \"ghcr.io/oven-sh/bun-vm:$image-v$(bun fetch:script-version)\"",
|
||||
"publish:darwin-aarch64-15": "tart push bun-darwin-aarch64-sequoia-15 \"$(bun fetch:image-name):darwin-aarch64-sequoia-15-v$(bun fetch:script-version)\"",
|
||||
"publish:darwin-aarch64-14": "tart push bun-darwin-aarch64-sonoma-14 \"$(bun fetch:image-name):darwin-aarch64-sonoma-14-v$(bun fetch:script-version)\"",
|
||||
"publish:darwin-aarch64-13": "tart push bun-darwin-aarch64-ventura-13 \"$(bun fetch:image-name):darwin-aarch64-ventura-13-v$(bun fetch:script-version)\""
|
||||
}
|
||||
}
|
||||
@@ -44,6 +44,13 @@ if(WIN32)
|
||||
)
|
||||
endif()
|
||||
|
||||
if(ENABLE_ASAN)
|
||||
register_compiler_flags(
|
||||
DESCRIPTION "Enable AddressSanitizer"
|
||||
-fsanitize=address
|
||||
)
|
||||
endif()
|
||||
|
||||
# --- Optimization level ---
|
||||
if(DEBUG)
|
||||
register_compiler_flags(
|
||||
|
||||
@@ -419,7 +419,15 @@ function(register_command)
|
||||
list(APPEND CMD_EFFECTIVE_OUTPUTS ${artifact})
|
||||
if(BUILDKITE)
|
||||
file(RELATIVE_PATH filename ${BUILD_PATH} ${artifact})
|
||||
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} buildkite-agent artifact upload ${filename})
|
||||
if(filename STREQUAL "libbun-profile.a")
|
||||
# libbun-profile.a is now over 5gb in size, compress it first
|
||||
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} rm -r ${BUILD_PATH}/codegen)
|
||||
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} rm -r ${CACHE_PATH})
|
||||
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} gzip -6 libbun-profile.a)
|
||||
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} buildkite-agent artifact upload libbun-profile.a.gz)
|
||||
else()
|
||||
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} buildkite-agent artifact upload ${filename})
|
||||
endif()
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
|
||||
@@ -86,6 +86,11 @@ optionx(ENABLE_LTO BOOL "If LTO (link-time optimization) should be used" DEFAULT
|
||||
if(LINUX)
|
||||
optionx(ENABLE_VALGRIND BOOL "If Valgrind support should be enabled" DEFAULT OFF)
|
||||
endif()
|
||||
if(DEBUG AND APPLE AND CMAKE_SYSTEM_PROCESSOR MATCHES "arm64|aarch64")
|
||||
optionx(ENABLE_ASAN BOOL "If ASAN support should be enabled" DEFAULT ON)
|
||||
else()
|
||||
optionx(ENABLE_ASAN BOOL "If ASAN support should be enabled" DEFAULT OFF)
|
||||
endif()
|
||||
|
||||
optionx(ENABLE_PRETTIER BOOL "If prettier should be ran" DEFAULT OFF)
|
||||
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
get_filename_component(SCRIPT_NAME ${CMAKE_CURRENT_LIST_FILE} NAME)
|
||||
message(STATUS "Running script: ${SCRIPT_NAME}")
|
||||
|
||||
if(NOT ZIG_PATH OR NOT ZIG_COMMIT OR NOT ZIG_VERSION)
|
||||
message(FATAL_ERROR "ZIG_PATH, ZIG_COMMIT, and ZIG_VERSION are required")
|
||||
if(NOT ZIG_PATH OR NOT ZIG_COMMIT)
|
||||
message(FATAL_ERROR "ZIG_PATH and ZIG_COMMIT required")
|
||||
endif()
|
||||
|
||||
if(CMAKE_HOST_APPLE)
|
||||
set(ZIG_OS "macos")
|
||||
set(ZIG_OS_ABI "macos-none")
|
||||
elseif(CMAKE_HOST_WIN32)
|
||||
set(ZIG_OS "windows")
|
||||
set(ZIG_OS_ABI "windows-gnu")
|
||||
elseif(CMAKE_HOST_UNIX)
|
||||
set(ZIG_OS "linux")
|
||||
set(ZIG_OS_ABI "linux-musl")
|
||||
else()
|
||||
message(FATAL_ERROR "Unsupported operating system: ${CMAKE_HOST_SYSTEM_NAME}")
|
||||
endif()
|
||||
@@ -28,17 +28,16 @@ else()
|
||||
message(FATAL_ERROR "Unsupported architecture: ${CMAKE_HOST_SYSTEM_PROCESSOR}")
|
||||
endif()
|
||||
|
||||
set(ZIG_NAME zig-${ZIG_OS}-${ZIG_ARCH}-${ZIG_VERSION})
|
||||
set(ZIG_NAME bootstrap-${ZIG_ARCH}-${ZIG_OS_ABI})
|
||||
set(ZIG_FILENAME ${ZIG_NAME}.zip)
|
||||
|
||||
if(CMAKE_HOST_WIN32)
|
||||
set(ZIG_EXE "zig.exe")
|
||||
set(ZIG_FILENAME ${ZIG_NAME}.zip)
|
||||
else()
|
||||
set(ZIG_EXE "zig")
|
||||
set(ZIG_FILENAME ${ZIG_NAME}.tar.xz)
|
||||
endif()
|
||||
|
||||
set(ZIG_DOWNLOAD_URL https://ziglang.org/download/${ZIG_VERSION}/${ZIG_FILENAME})
|
||||
set(ZIG_DOWNLOAD_URL https://github.com/oven-sh/zig/releases/download/autobuild-${ZIG_COMMIT}/${ZIG_FILENAME})
|
||||
|
||||
execute_process(
|
||||
COMMAND
|
||||
@@ -62,35 +61,8 @@ if(NOT EXISTS ${ZIG_PATH}/${ZIG_EXE})
|
||||
endif()
|
||||
|
||||
# Tools like VSCode need a stable path to the zig executable, on both Unix and Windows
|
||||
# To workaround this, we create a `bun.exe` symlink on Unix.
|
||||
# To workaround this, we create a `zig.exe` & `zls.exe` symlink on Unix.
|
||||
if(NOT WIN32)
|
||||
file(CREATE_LINK ${ZIG_PATH}/${ZIG_EXE} ${ZIG_PATH}/zig.exe SYMBOLIC)
|
||||
file(CREATE_LINK ${ZIG_PATH}/zls ${ZIG_PATH}/zls.exe SYMBOLIC)
|
||||
endif()
|
||||
|
||||
set(ZIG_REPOSITORY_PATH ${ZIG_PATH}/repository)
|
||||
|
||||
execute_process(
|
||||
COMMAND
|
||||
${CMAKE_COMMAND}
|
||||
-DGIT_PATH=${ZIG_REPOSITORY_PATH}
|
||||
-DGIT_REPOSITORY=oven-sh/zig
|
||||
-DGIT_COMMIT=${ZIG_COMMIT}
|
||||
-P ${CMAKE_CURRENT_LIST_DIR}/GitClone.cmake
|
||||
ERROR_STRIP_TRAILING_WHITESPACE
|
||||
ERROR_VARIABLE
|
||||
ZIG_REPOSITORY_ERROR
|
||||
RESULT_VARIABLE
|
||||
ZIG_REPOSITORY_RESULT
|
||||
)
|
||||
|
||||
if(NOT ZIG_REPOSITORY_RESULT EQUAL 0)
|
||||
message(FATAL_ERROR "Download failed: ${ZIG_REPOSITORY_ERROR}")
|
||||
endif()
|
||||
|
||||
file(REMOVE_RECURSE ${ZIG_PATH}/lib)
|
||||
|
||||
# Use copy_directory instead of file(RENAME) because there were
|
||||
# race conditions in CI where some files were not copied.
|
||||
execute_process(COMMAND ${CMAKE_COMMAND} -E copy_directory ${ZIG_REPOSITORY_PATH}/lib ${ZIG_PATH}/lib)
|
||||
|
||||
file(REMOVE_RECURSE ${ZIG_REPOSITORY_PATH})
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
if(DEBUG)
|
||||
set(bun bun-debug)
|
||||
elseif(ENABLE_SMOL)
|
||||
set(bun bun-smol-profile)
|
||||
set(bunStrip bun-smol)
|
||||
# elseif(ENABLE_SMOL)
|
||||
# set(bun bun-smol-profile)
|
||||
# set(bunStrip bun-smol)
|
||||
elseif(ENABLE_VALGRIND)
|
||||
set(bun bun-valgrind)
|
||||
elseif(ENABLE_ASSERTIONS)
|
||||
@@ -179,6 +179,32 @@ register_command(
|
||||
${BUN_NODE_FALLBACKS_OUTPUTS}
|
||||
)
|
||||
|
||||
# An embedded copy of react-refresh is used when the user forgets to install it.
|
||||
# The library is not versioned alongside React.
|
||||
set(BUN_REACT_REFRESH_OUTPUT ${BUN_NODE_FALLBACKS_OUTPUT}/react-refresh.js)
|
||||
register_command(
|
||||
TARGET
|
||||
bun-node-fallbacks-react-refresh
|
||||
COMMENT
|
||||
"Building node-fallbacks/react-refresh.js"
|
||||
CWD
|
||||
${BUN_NODE_FALLBACKS_SOURCE}
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE} build
|
||||
${BUN_NODE_FALLBACKS_SOURCE}/node_modules/react-refresh/cjs/react-refresh-runtime.development.js
|
||||
--outfile=${BUN_REACT_REFRESH_OUTPUT}
|
||||
--target=browser
|
||||
--format=cjs
|
||||
--minify
|
||||
--define:process.env.NODE_ENV=\"'development'\"
|
||||
SOURCES
|
||||
${BUN_NODE_FALLBACKS_SOURCE}/package.json
|
||||
${BUN_NODE_FALLBACKS_SOURCE}/bun.lock
|
||||
${BUN_NODE_FALLBACKS_NODE_MODULES}
|
||||
OUTPUTS
|
||||
${BUN_REACT_REFRESH_OUTPUT}
|
||||
)
|
||||
|
||||
set(BUN_ERROR_CODE_SCRIPT ${CWD}/src/codegen/generate-node-errors.ts)
|
||||
|
||||
set(BUN_ERROR_CODE_SOURCES
|
||||
@@ -404,7 +430,9 @@ set(BUN_OBJECT_LUT_SOURCES
|
||||
${CWD}/src/bun.js/bindings/ZigGlobalObject.lut.txt
|
||||
${CWD}/src/bun.js/bindings/JSBuffer.cpp
|
||||
${CWD}/src/bun.js/bindings/BunProcess.cpp
|
||||
${CWD}/src/bun.js/bindings/ProcessBindingBuffer.cpp
|
||||
${CWD}/src/bun.js/bindings/ProcessBindingConstants.cpp
|
||||
${CWD}/src/bun.js/bindings/ProcessBindingFs.cpp
|
||||
${CWD}/src/bun.js/bindings/ProcessBindingNatives.cpp
|
||||
${CWD}/src/bun.js/modules/NodeModuleModule.cpp
|
||||
${CODEGEN_PATH}/ZigGeneratedClasses.lut.txt
|
||||
@@ -415,7 +443,9 @@ set(BUN_OBJECT_LUT_OUTPUTS
|
||||
${CODEGEN_PATH}/ZigGlobalObject.lut.h
|
||||
${CODEGEN_PATH}/JSBuffer.lut.h
|
||||
${CODEGEN_PATH}/BunProcess.lut.h
|
||||
${CODEGEN_PATH}/ProcessBindingBuffer.lut.h
|
||||
${CODEGEN_PATH}/ProcessBindingConstants.lut.h
|
||||
${CODEGEN_PATH}/ProcessBindingFs.lut.h
|
||||
${CODEGEN_PATH}/ProcessBindingNatives.lut.h
|
||||
${CODEGEN_PATH}/NodeModuleModule.lut.h
|
||||
${CODEGEN_PATH}/ZigGeneratedClasses.lut.h
|
||||
@@ -498,8 +528,7 @@ file(GLOB_RECURSE BUN_ZIG_SOURCES ${CONFIGURE_DEPENDS}
|
||||
|
||||
list(APPEND BUN_ZIG_SOURCES
|
||||
${CWD}/build.zig
|
||||
${CWD}/root.zig
|
||||
${CWD}/root_wasm.zig
|
||||
${CWD}/src/main.zig
|
||||
${BUN_BINDGEN_ZIG_OUTPUTS}
|
||||
)
|
||||
|
||||
@@ -508,6 +537,7 @@ set(BUN_ZIG_GENERATED_SOURCES
|
||||
${BUN_FALLBACK_DECODER_OUTPUT}
|
||||
${BUN_RUNTIME_JS_OUTPUT}
|
||||
${BUN_NODE_FALLBACKS_OUTPUTS}
|
||||
${BUN_REACT_REFRESH_OUTPUT}
|
||||
${BUN_ERROR_CODE_OUTPUTS}
|
||||
${BUN_ZIG_GENERATED_CLASSES_OUTPUTS}
|
||||
${BUN_JAVASCRIPT_OUTPUTS}
|
||||
@@ -592,6 +622,7 @@ file(GLOB BUN_CXX_SOURCES ${CONFIGURE_DEPENDS}
|
||||
${CWD}/src/bun.js/bindings/sqlite/*.cpp
|
||||
${CWD}/src/bun.js/bindings/webcrypto/*.cpp
|
||||
${CWD}/src/bun.js/bindings/webcrypto/*/*.cpp
|
||||
${CWD}/src/bun.js/bindings/node/crypto/*.cpp
|
||||
${CWD}/src/bun.js/bindings/v8/*.cpp
|
||||
${CWD}/src/bun.js/bindings/v8/shim/*.cpp
|
||||
${CWD}/src/bake/*.cpp
|
||||
@@ -707,7 +738,7 @@ endif()
|
||||
# --- C/C++ Properties ---
|
||||
|
||||
set_target_properties(${bun} PROPERTIES
|
||||
CXX_STANDARD 20
|
||||
CXX_STANDARD 23
|
||||
CXX_STANDARD_REQUIRED YES
|
||||
CXX_EXTENSIONS YES
|
||||
CXX_VISIBILITY_PRESET hidden
|
||||
@@ -716,6 +747,18 @@ set_target_properties(${bun} PROPERTIES
|
||||
VISIBILITY_INLINES_HIDDEN YES
|
||||
)
|
||||
|
||||
if (NOT WIN32)
|
||||
# Enable precompiled headers
|
||||
# Only enable in these scenarios:
|
||||
# 1. NOT in CI, OR
|
||||
# 2. In CI AND BUN_CPP_ONLY is enabled
|
||||
if(NOT CI OR (CI AND BUN_CPP_ONLY))
|
||||
target_precompile_headers(${bun} PRIVATE
|
||||
"$<$<COMPILE_LANGUAGE:CXX>:${CWD}/src/bun.js/bindings/root.h>"
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# --- C/C++ Includes ---
|
||||
|
||||
if(WIN32)
|
||||
@@ -729,6 +772,7 @@ target_include_directories(${bun} PRIVATE
|
||||
${CWD}/src/bun.js/bindings
|
||||
${CWD}/src/bun.js/bindings/webcore
|
||||
${CWD}/src/bun.js/bindings/webcrypto
|
||||
${CWD}/src/bun.js/bindings/node/crypto
|
||||
${CWD}/src/bun.js/bindings/sqlite
|
||||
${CWD}/src/bun.js/bindings/v8
|
||||
${CWD}/src/bun.js/modules
|
||||
@@ -827,6 +871,15 @@ if(NOT WIN32)
|
||||
)
|
||||
endif()
|
||||
|
||||
if (ENABLE_ASAN)
|
||||
target_compile_options(${bun} PUBLIC
|
||||
-fsanitize=address
|
||||
)
|
||||
target_link_libraries(${bun} PUBLIC
|
||||
-fsanitize=address
|
||||
)
|
||||
endif()
|
||||
|
||||
target_compile_options(${bun} PUBLIC
|
||||
-Werror=return-type
|
||||
-Werror=return-stack-address
|
||||
@@ -860,6 +913,10 @@ if(NOT WIN32)
|
||||
-Werror
|
||||
)
|
||||
endif()
|
||||
else()
|
||||
target_compile_options(${bun} PUBLIC
|
||||
-Wno-nullability-completeness
|
||||
)
|
||||
endif()
|
||||
|
||||
# --- Linker options ---
|
||||
@@ -902,28 +959,17 @@ endif()
|
||||
|
||||
if(LINUX)
|
||||
if(NOT ABI STREQUAL "musl")
|
||||
# on arm64
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm|ARM|arm64|ARM64|aarch64|AARCH64")
|
||||
target_link_options(${bun} PUBLIC
|
||||
-Wl,--wrap=exp
|
||||
-Wl,--wrap=expf
|
||||
-Wl,--wrap=fcntl64
|
||||
-Wl,--wrap=log
|
||||
-Wl,--wrap=log2
|
||||
-Wl,--wrap=log2f
|
||||
-Wl,--wrap=logf
|
||||
-Wl,--wrap=pow
|
||||
-Wl,--wrap=powf
|
||||
)
|
||||
else()
|
||||
target_link_options(${bun} PUBLIC
|
||||
-Wl,--wrap=exp
|
||||
-Wl,--wrap=expf
|
||||
-Wl,--wrap=log2f
|
||||
-Wl,--wrap=logf
|
||||
-Wl,--wrap=powf
|
||||
)
|
||||
endif()
|
||||
target_link_options(${bun} PUBLIC
|
||||
-Wl,--wrap=exp
|
||||
-Wl,--wrap=expf
|
||||
-Wl,--wrap=fcntl64
|
||||
-Wl,--wrap=log
|
||||
-Wl,--wrap=log2
|
||||
-Wl,--wrap=log2f
|
||||
-Wl,--wrap=logf
|
||||
-Wl,--wrap=pow
|
||||
-Wl,--wrap=powf
|
||||
)
|
||||
endif()
|
||||
|
||||
if(NOT ABI STREQUAL "musl")
|
||||
@@ -1052,6 +1098,7 @@ add_custom_target(dependencies DEPENDS ${BUN_TARGETS})
|
||||
|
||||
if(APPLE)
|
||||
target_link_libraries(${bun} PRIVATE icucore resolv)
|
||||
target_compile_definitions(${bun} PRIVATE U_DISABLE_RENAMING=1)
|
||||
endif()
|
||||
|
||||
if(USE_STATIC_SQLITE)
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
cloudflare/lol-html
|
||||
COMMIT
|
||||
4f8becea13a0021c8b71abd2dcc5899384973b66
|
||||
67f1d4ffd6b74db7e053fb129dcce620193c180d
|
||||
)
|
||||
|
||||
set(LOLHTML_CWD ${VENDOR_PATH}/lolhtml/c-api)
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
litespeedtech/ls-hpack
|
||||
COMMIT
|
||||
32e96f10593c7cb8553cd8c9c12721100ae9e924
|
||||
8905c024b6d052f083a3d11d0a169b3c2735c8a1
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
oven-sh/mimalloc
|
||||
COMMIT
|
||||
82b2c2277a4d570187c07b376557dc5bde81d848
|
||||
1beadf9651a7bfdec6b5367c380ecc3fe1c40d1a
|
||||
)
|
||||
|
||||
set(MIMALLOC_CMAKE_ARGS
|
||||
@@ -19,6 +19,10 @@ set(MIMALLOC_CMAKE_ARGS
|
||||
-DMI_SKIP_COLLECT_ON_EXIT=ON
|
||||
)
|
||||
|
||||
if(ENABLE_ASAN)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_TRACK_ASAN=ON)
|
||||
endif()
|
||||
|
||||
if(DEBUG)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_DEBUG_FULL=ON)
|
||||
endif()
|
||||
|
||||
@@ -120,6 +120,9 @@ foreach(i RANGE ${BUILDKITE_JOBS_MAX_INDEX})
|
||||
endif()
|
||||
|
||||
if(BUILDKITE)
|
||||
if(BUILDKITE_ARTIFACT_PATH STREQUAL "libbun-profile.a")
|
||||
set(BUILDKITE_ARTIFACT_PATH libbun-profile.a.gz)
|
||||
endif()
|
||||
set(BUILDKITE_DOWNLOAD_COMMAND buildkite-agent artifact download ${BUILDKITE_ARTIFACT_PATH} . --build ${BUILDKITE_BUILD_UUID} --step ${BUILDKITE_JOB_ID})
|
||||
else()
|
||||
set(BUILDKITE_DOWNLOAD_COMMAND curl -L -o ${BUILDKITE_ARTIFACT_PATH} ${BUILDKITE_ARTIFACTS_URL}/${BUILDKITE_ARTIFACT_ID})
|
||||
@@ -135,6 +138,20 @@ foreach(i RANGE ${BUILDKITE_JOBS_MAX_INDEX})
|
||||
OUTPUT
|
||||
${BUILD_PATH}/${BUILDKITE_ARTIFACT_PATH}
|
||||
)
|
||||
if(BUILDKITE_ARTIFACT_PATH STREQUAL "libbun-profile.a.gz")
|
||||
add_custom_command(
|
||||
COMMENT
|
||||
"Unpacking libbun-profile.a.gz"
|
||||
VERBATIM COMMAND
|
||||
gunzip libbun-profile.a.gz
|
||||
WORKING_DIRECTORY
|
||||
${BUILD_PATH}
|
||||
OUTPUT
|
||||
${BUILD_PATH}/libbun-profile.a
|
||||
DEPENDS
|
||||
${BUILD_PATH}/libbun-profile.a.gz
|
||||
)
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
list(APPEND BUILDKITE_JOBS_MATCH ${BUILDKITE_JOB_NAME})
|
||||
|
||||
@@ -9,6 +9,14 @@ find_command(
|
||||
>=1.1.26
|
||||
)
|
||||
|
||||
if (NOT CI)
|
||||
# If node.js is not installed, it is extremely easy to make this path point to
|
||||
# a tempdir such as /private/tmp/bun-node-ce532901c/bun, which may cause this
|
||||
# CMake configuration break after tempdir is cleaned up (ex. after reboot).
|
||||
get_filename_component(BUN_EXECUTABLE ${BUN_EXECUTABLE} REALPATH)
|
||||
set(BUN_EXECUTABLE ${BUN_EXECUTABLE} CACHE FILEPATH "Bun executable" FORCE)
|
||||
endif()
|
||||
|
||||
# If this is not set, some advanced features are not checked.
|
||||
# https://github.com/oven-sh/bun/blob/cd7f6a1589db7f1e39dc4e3f4a17234afbe7826c/src/bun.js/javascript.zig#L1069-L1072
|
||||
setenv(BUN_GARBAGE_COLLECTOR_LEVEL 1)
|
||||
|
||||
@@ -36,7 +36,8 @@ endif()
|
||||
string(REPLACE "\n" ";" GIT_CHANGED_SOURCES "${GIT_DIFF}")
|
||||
|
||||
if(CI)
|
||||
setx(GIT_CHANGED_SOURCES ${GIT_CHANGED_SOURCES})
|
||||
set(GIT_CHANGED_SOURCES "${GIT_CHANGED_SOURCES}")
|
||||
message(STATUS "Set GIT_CHANGED_SOURCES: ${GIT_CHANGED_SOURCES}")
|
||||
endif()
|
||||
|
||||
list(TRANSFORM GIT_CHANGED_SOURCES PREPEND ${CWD}/)
|
||||
|
||||
@@ -12,7 +12,7 @@ if(NOT ENABLE_LLVM)
|
||||
return()
|
||||
endif()
|
||||
|
||||
set(DEFAULT_LLVM_VERSION "18.1.8")
|
||||
set(DEFAULT_LLVM_VERSION "19.1.7")
|
||||
|
||||
optionx(LLVM_VERSION STRING "The version of LLVM to use" DEFAULT ${DEFAULT_LLVM_VERSION})
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
|
||||
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
|
||||
|
||||
if(NOT WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION e6cb36cabed465c28c7bcbb28d86e8466ea36e8d)
|
||||
set(WEBKIT_VERSION 91bf2baced1b1309c7e05f19177c97fefec20976)
|
||||
endif()
|
||||
|
||||
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)
|
||||
@@ -79,6 +79,10 @@ else()
|
||||
set(WEBKIT_SUFFIX "${WEBKIT_SUFFIX}")
|
||||
endif()
|
||||
|
||||
if(ENABLE_ASAN)
|
||||
set(WEBKIT_SUFFIX "${WEBKIT_SUFFIX}-asan")
|
||||
endif()
|
||||
|
||||
set(WEBKIT_NAME bun-webkit-${WEBKIT_OS}-${WEBKIT_ARCH}${WEBKIT_SUFFIX})
|
||||
set(WEBKIT_FILENAME ${WEBKIT_NAME}.tar.gz)
|
||||
setx(WEBKIT_DOWNLOAD_URL https://github.com/oven-sh/WebKit/releases/download/autobuild-${WEBKIT_VERSION}/${WEBKIT_FILENAME})
|
||||
|
||||
@@ -20,8 +20,7 @@ else()
|
||||
unsupported(CMAKE_SYSTEM_NAME)
|
||||
endif()
|
||||
|
||||
optionx(ZIG_VERSION STRING "The zig version of the compiler to download" DEFAULT "0.13.0")
|
||||
optionx(ZIG_COMMIT STRING "The zig commit to use in oven-sh/zig" DEFAULT "131a009ba2eb127a3447d05b9e12f710429aa5ee")
|
||||
set(ZIG_COMMIT "cd1995944508e4c946deb75bd70947d302e0db37")
|
||||
optionx(ZIG_TARGET STRING "The zig target to use" DEFAULT ${DEFAULT_ZIG_TARGET})
|
||||
|
||||
if(CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
@@ -77,8 +76,8 @@ register_command(
|
||||
COMMAND
|
||||
${CMAKE_COMMAND}
|
||||
-DZIG_PATH=${ZIG_PATH}
|
||||
-DZIG_VERSION=${ZIG_VERSION}
|
||||
-DZIG_COMMIT=${ZIG_COMMIT}
|
||||
-DENABLE_ASAN=${ENABLE_ASAN}
|
||||
-P ${CWD}/cmake/scripts/DownloadZig.cmake
|
||||
SOURCES
|
||||
${CWD}/cmake/scripts/DownloadZig.cmake
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM debian:bullseye-slim AS build
|
||||
FROM debian:bookworm-slim AS build
|
||||
|
||||
# https://github.com/oven-sh/bun/releases
|
||||
ARG BUN_VERSION=latest
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM debian:bullseye-slim AS build
|
||||
FROM debian:bookworm-slim AS build
|
||||
|
||||
# https://github.com/oven-sh/bun/releases
|
||||
ARG BUN_VERSION=latest
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM debian:bullseye-slim AS build
|
||||
FROM debian:bookworm-slim AS build
|
||||
|
||||
# https://github.com/oven-sh/bun/releases
|
||||
ARG BUN_VERSION=latest
|
||||
@@ -67,14 +67,18 @@ ARG BUN_INSTALL_BIN=/usr/local/bin
|
||||
ENV BUN_INSTALL_BIN=${BUN_INSTALL_BIN}
|
||||
|
||||
COPY --from=build /usr/local/bin/bun /usr/local/bin/
|
||||
RUN mkdir -p /usr/local/bun-node-fallback-bin && ln -s /usr/local/bin/bun /usr/local/bun-node-fallback-bin/node
|
||||
ENV PATH "${PATH}:/usr/local/bun-node-fallback-bin"
|
||||
|
||||
# Temporarily use the `build`-stage image binaries to create a symlink:
|
||||
RUN --mount=type=bind,from=build,source=/usr/bin,target=/usr/bin \
|
||||
--mount=type=bind,from=build,source=/bin,target=/bin <<EOF
|
||||
--mount=type=bind,from=build,source=/bin,target=/bin \
|
||||
--mount=type=bind,from=build,source=/usr/lib,target=/usr/lib \
|
||||
--mount=type=bind,from=build,source=/lib,target=/lib \
|
||||
<<EOF
|
||||
ln -s /usr/local/bin/bun /usr/local/bin/bunx
|
||||
which bunx
|
||||
mkdir -p /usr/local/bun-node-fallback-bin
|
||||
ln -s /usr/local/bin/bun /usr/local/bun-node-fallback-bin/nodebun
|
||||
EOF
|
||||
|
||||
ENTRYPOINT ["/usr/local/bin/bun"]
|
||||
|
||||
@@ -204,7 +204,7 @@ To customize the TLS validation, use the `checkServerIdentity` option in `tls`
|
||||
await fetch("https://example.com", {
|
||||
tls: {
|
||||
checkServerIdentity: (hostname, peerCertificate) => {
|
||||
// Return an error if the certificate is invalid
|
||||
// Return an Error if the certificate is invalid
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
{% callout %}
|
||||
**⚠️ Warning** — `bun:ffi` is **experimental**, with known bugs and limitations, and should not be relied on in production. The most stable way to interact with native code from Bun is to write a [Node-API module](/docs/api/node-api).
|
||||
{% /callout %}
|
||||
|
||||
Use the built-in `bun:ffi` module to efficiently call native libraries from JavaScript. It works with languages that support the C ABI (Zig, Rust, C/C++, C#, Nim, Kotlin, etc).
|
||||
|
||||
## dlopen usage (`bun:ffi`)
|
||||
@@ -298,7 +302,11 @@ setTimeout(() => {
|
||||
When you're done with a JSCallback, you should call `close()` to free the memory.
|
||||
|
||||
### Experimental thread-safe callbacks
|
||||
`JSCallback` has experimental support for thread-safe callbacks. This will be needed if you pass a callback function into a different thread from it's instantiation context. You can enable it with the optional `threadsafe` option flag.
|
||||
|
||||
`JSCallback` has experimental support for thread-safe callbacks. This will be needed if you pass a callback function into a different thread from its instantiation context. You can enable it with the optional `threadsafe` parameter.
|
||||
|
||||
Currently, thread-safe callbacks work best when run from another thread that is running JavaScript code, i.e. a [`Worker`](/docs/api/workers). A future version of Bun will enable them to be called from any thread (such as new threads spawned by your native library that Bun is not aware of).
|
||||
|
||||
```ts
|
||||
const searchIterator = new JSCallback(
|
||||
(ptr, length) => /hello/.test(new CString(ptr, length)),
|
||||
@@ -309,7 +317,6 @@ const searchIterator = new JSCallback(
|
||||
},
|
||||
);
|
||||
```
|
||||
Be aware that there are still cases where this does not 100% work.
|
||||
|
||||
{% callout %}
|
||||
|
||||
|
||||
524
docs/api/http.md
524
docs/api/http.md
@@ -8,19 +8,421 @@ To start a high-performance HTTP server with a clean API, the recommended approa
|
||||
|
||||
## `Bun.serve()`
|
||||
|
||||
Start an HTTP server in Bun with `Bun.serve`.
|
||||
Use `Bun.serve` to start an HTTP server in Bun.
|
||||
|
||||
```ts
|
||||
Bun.serve({
|
||||
// `routes` requires Bun v1.2.3+
|
||||
routes: {
|
||||
// Static routes
|
||||
"/api/status": new Response("OK"),
|
||||
|
||||
// Dynamic routes
|
||||
"/users/:id": req => {
|
||||
return new Response(`Hello User ${req.params.id}!`);
|
||||
},
|
||||
|
||||
// Per-HTTP method handlers
|
||||
"/api/posts": {
|
||||
GET: () => new Response("List posts"),
|
||||
POST: async req => {
|
||||
const body = await req.json();
|
||||
return Response.json({ created: true, ...body });
|
||||
},
|
||||
},
|
||||
|
||||
// Wildcard route for all routes that start with "/api/" and aren't otherwise matched
|
||||
"/api/*": Response.json({ message: "Not found" }, { status: 404 }),
|
||||
|
||||
// Redirect from /blog/hello to /blog/hello/world
|
||||
"/blog/hello": Response.redirect("/blog/hello/world"),
|
||||
|
||||
// Serve a file by buffering it in memory
|
||||
"/favicon.ico": new Response(await Bun.file("./favicon.ico").bytes(), {
|
||||
headers: {
|
||||
"Content-Type": "image/x-icon",
|
||||
},
|
||||
}),
|
||||
},
|
||||
|
||||
// (optional) fallback for unmatched routes:
|
||||
// Required if Bun's version < 1.2.3
|
||||
fetch(req) {
|
||||
return new Response("Bun!");
|
||||
return new Response("Not Found", { status: 404 });
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Routing
|
||||
|
||||
Routes in `Bun.serve()` receive a `BunRequest` (which extends [`Request`](https://developer.mozilla.org/en-US/docs/Web/API/Request)) and return a [`Response`](https://developer.mozilla.org/en-US/docs/Web/API/Response) or `Promise<Response>`. This makes it easier to use the same code for both sending & receiving HTTP requests.
|
||||
|
||||
```ts
|
||||
// Simplified for brevity
|
||||
interface BunRequest<T extends string> extends Request {
|
||||
params: Record<T, string>;
|
||||
}
|
||||
```
|
||||
|
||||
#### Async/await in routes
|
||||
|
||||
You can use async/await in route handlers to return a `Promise<Response>`.
|
||||
|
||||
```ts
|
||||
import { sql, serve } from "bun";
|
||||
|
||||
serve({
|
||||
port: 3001,
|
||||
routes: {
|
||||
"/api/version": async () => {
|
||||
const [version] = await sql`SELECT version()`;
|
||||
return Response.json(version);
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
#### Promise in routes
|
||||
|
||||
You can also return a `Promise<Response>` from a route handler.
|
||||
|
||||
```ts
|
||||
import { sql, serve } from "bun";
|
||||
|
||||
serve({
|
||||
routes: {
|
||||
"/api/version": () => {
|
||||
return new Promise(resolve => {
|
||||
setTimeout(async () => {
|
||||
const [version] = await sql`SELECT version()`;
|
||||
resolve(Response.json(version));
|
||||
}, 100);
|
||||
});
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
#### Type-safe route parameters
|
||||
|
||||
TypeScript parses route parameters when passed as a string literal, so that your editor will show autocomplete when accessing `request.params`.
|
||||
|
||||
```ts
|
||||
import type { BunRequest } from "bun";
|
||||
|
||||
Bun.serve({
|
||||
routes: {
|
||||
// TypeScript knows the shape of params when passed as a string literal
|
||||
"/orgs/:orgId/repos/:repoId": req => {
|
||||
const { orgId, repoId } = req.params;
|
||||
return Response.json({ orgId, repoId });
|
||||
},
|
||||
|
||||
"/orgs/:orgId/repos/:repoId/settings": (
|
||||
// optional: you can explicitly pass a type to BunRequest:
|
||||
req: BunRequest<"/orgs/:orgId/repos/:repoId/settings">,
|
||||
) => {
|
||||
const { orgId, repoId } = req.params;
|
||||
return Response.json({ orgId, repoId });
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
Percent-encoded route parameter values are automatically decoded. Unicode characters are supported. Invalid unicode is replaced with the unicode replacement character `&0xFFFD;`.
|
||||
|
||||
### Static responses
|
||||
|
||||
Routes can also be `Response` objects (without the handler function). Bun.serve() optimizes it for zero-allocation dispatch - perfect for health checks, redirects, and fixed content:
|
||||
|
||||
```ts
|
||||
Bun.serve({
|
||||
routes: {
|
||||
// Health checks
|
||||
"/health": new Response("OK"),
|
||||
"/ready": new Response("Ready", {
|
||||
headers: {
|
||||
// Pass custom headers
|
||||
"X-Ready": "1",
|
||||
},
|
||||
}),
|
||||
|
||||
// Redirects
|
||||
"/blog": Response.redirect("https://bun.sh/blog"),
|
||||
|
||||
// API responses
|
||||
"/api/config": Response.json({
|
||||
version: "1.0.0",
|
||||
env: "production",
|
||||
}),
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
Static responses do not allocate additional memory after initialization. You can generally expect at least a 15% performance improvement over manually returning a `Response` object.
|
||||
|
||||
Static route responses are cached for the lifetime of the server object. To reload static routes, call `server.reload(options)`.
|
||||
|
||||
```ts
|
||||
const server = Bun.serve({
|
||||
static: {
|
||||
"/api/time": new Response(new Date().toISOString()),
|
||||
},
|
||||
|
||||
fetch(req) {
|
||||
return new Response("404!");
|
||||
},
|
||||
});
|
||||
|
||||
// Update the time every second.
|
||||
setInterval(() => {
|
||||
server.reload({
|
||||
static: {
|
||||
"/api/time": new Response(new Date().toISOString()),
|
||||
},
|
||||
|
||||
fetch(req) {
|
||||
return new Response("404!");
|
||||
},
|
||||
});
|
||||
}, 1000);
|
||||
```
|
||||
|
||||
Reloading routes only impact the next request. In-flight requests continue to use the old routes. After in-flight requests to old routes are finished, the old routes are freed from memory.
|
||||
|
||||
To simplify error handling, static routes do not support streaming response bodies from `ReadableStream` or an `AsyncIterator`. Fortunately, you can still buffer the response in memory first:
|
||||
|
||||
```ts
|
||||
const time = await fetch("https://api.example.com/v1/data");
|
||||
// Buffer the response in memory first.
|
||||
const blob = await time.blob();
|
||||
|
||||
const server = Bun.serve({
|
||||
static: {
|
||||
"/api/data": new Response(blob),
|
||||
},
|
||||
|
||||
fetch(req) {
|
||||
return new Response("404!");
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Route precedence
|
||||
|
||||
Routes are matched in order of specificity:
|
||||
|
||||
1. Exact routes (`/users/all`)
|
||||
2. Parameter routes (`/users/:id`)
|
||||
3. Wildcard routes (`/users/*`)
|
||||
4. Global catch-all (`/*`)
|
||||
|
||||
```ts
|
||||
Bun.serve({
|
||||
routes: {
|
||||
// Most specific first
|
||||
"/api/users/me": () => new Response("Current user"),
|
||||
"/api/users/:id": req => new Response(`User ${req.params.id}`),
|
||||
"/api/*": () => new Response("API catch-all"),
|
||||
"/*": () => new Response("Global catch-all"),
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Per-HTTP Method Routes
|
||||
|
||||
Route handlers can be specialized by HTTP method:
|
||||
|
||||
```ts
|
||||
Bun.serve({
|
||||
routes: {
|
||||
"/api/posts": {
|
||||
// Different handlers per method
|
||||
GET: () => new Response("List posts"),
|
||||
POST: async req => {
|
||||
const post = await req.json();
|
||||
return Response.json({ id: crypto.randomUUID(), ...post });
|
||||
},
|
||||
PUT: async req => {
|
||||
const updates = await req.json();
|
||||
return Response.json({ updated: true, ...updates });
|
||||
},
|
||||
DELETE: () => new Response(null, { status: 204 }),
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
You can pass any of the following methods:
|
||||
|
||||
| Method | Usecase example |
|
||||
| --------- | ------------------------------- |
|
||||
| `GET` | Fetch a resource |
|
||||
| `HEAD` | Check if a resource exists |
|
||||
| `OPTIONS` | Get allowed HTTP methods (CORS) |
|
||||
| `DELETE` | Delete a resource |
|
||||
| `PATCH` | Update a resource |
|
||||
| `POST` | Create a resource |
|
||||
| `PUT` | Update a resource |
|
||||
|
||||
When passing a function instead of an object, all methods will be handled by that function:
|
||||
|
||||
```ts
|
||||
const server = Bun.serve({
|
||||
routes: {
|
||||
"/api/version": () => Response.json({ version: "1.0.0" }),
|
||||
},
|
||||
});
|
||||
|
||||
await fetch(new URL("/api/version", server.url));
|
||||
await fetch(new URL("/api/version", server.url), { method: "PUT" });
|
||||
// ... etc
|
||||
```
|
||||
|
||||
### Hot Route Reloading
|
||||
|
||||
Update routes without server restarts using `server.reload()`:
|
||||
|
||||
```ts
|
||||
const server = Bun.serve({
|
||||
routes: {
|
||||
"/api/version": () => Response.json({ version: "1.0.0" }),
|
||||
},
|
||||
});
|
||||
|
||||
// Deploy new routes without downtime
|
||||
server.reload({
|
||||
routes: {
|
||||
"/api/version": () => Response.json({ version: "2.0.0" }),
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Error Handling
|
||||
|
||||
Bun provides structured error handling for routes:
|
||||
|
||||
```ts
|
||||
Bun.serve({
|
||||
routes: {
|
||||
// Errors are caught automatically
|
||||
"/api/risky": () => {
|
||||
throw new Error("Something went wrong");
|
||||
},
|
||||
},
|
||||
// Global error handler
|
||||
error(error) {
|
||||
console.error(error);
|
||||
return new Response(`Internal Error: ${error.message}`, {
|
||||
status: 500,
|
||||
headers: {
|
||||
"Content-Type": "text/plain",
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### HTML imports
|
||||
|
||||
To add a client-side single-page app, you can use an HTML import:
|
||||
|
||||
```ts
|
||||
import myReactSinglePageApp from "./index.html";
|
||||
|
||||
Bun.serve({
|
||||
routes: {
|
||||
"/": myReactSinglePageApp,
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
HTML imports don't just serve HTML. It's a full-featured frontend bundler, transpiler, and toolkit built using Bun's [bundler](https://bun.sh/docs/bundler), JavaScript transpiler and CSS parser.
|
||||
|
||||
You can use this to build a full-featured frontend with React, TypeScript, Tailwind CSS, and more. Check out [/docs/bundler/fullstack](https://bun.sh/docs/bundler/fullstack) to learn more.
|
||||
|
||||
### Practical example: REST API
|
||||
|
||||
Here's a basic database-backed REST API using Bun's router with zero dependencies:
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
```ts#server.ts
|
||||
import type { Post } from "./types.ts";
|
||||
import { Database } from "bun:sqlite";
|
||||
|
||||
const db = new Database("posts.db");
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS posts (
|
||||
id TEXT PRIMARY KEY,
|
||||
title TEXT NOT NULL,
|
||||
content TEXT NOT NULL,
|
||||
created_at TEXT NOT NULL
|
||||
)
|
||||
`);
|
||||
|
||||
Bun.serve({
|
||||
routes: {
|
||||
// List posts
|
||||
"/api/posts": {
|
||||
GET: () => {
|
||||
const posts = db.query("SELECT * FROM posts").all();
|
||||
return Response.json(posts);
|
||||
},
|
||||
|
||||
// Create post
|
||||
POST: async req => {
|
||||
const post: Omit<Post, "id" | "created_at"> = await req.json();
|
||||
const id = crypto.randomUUID();
|
||||
|
||||
db.query(
|
||||
`INSERT INTO posts (id, title, content, created_at)
|
||||
VALUES (?, ?, ?, ?)`,
|
||||
).run(id, post.title, post.content, new Date().toISOString());
|
||||
|
||||
return Response.json({ id, ...post }, { status: 201 });
|
||||
},
|
||||
},
|
||||
|
||||
// Get post by ID
|
||||
"/api/posts/:id": req => {
|
||||
const post = db
|
||||
.query("SELECT * FROM posts WHERE id = ?")
|
||||
.get(req.params.id);
|
||||
|
||||
if (!post) {
|
||||
return new Response("Not Found", { status: 404 });
|
||||
}
|
||||
|
||||
return Response.json(post);
|
||||
},
|
||||
},
|
||||
|
||||
error(error) {
|
||||
console.error(error);
|
||||
return new Response("Internal Server Error", { status: 500 });
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
```ts#types.ts
|
||||
export interface Post {
|
||||
id: string;
|
||||
title: string;
|
||||
content: string;
|
||||
created_at: string;
|
||||
}
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
### Routing performance
|
||||
|
||||
`Bun.serve()`'s router builds on top uWebSocket's [tree-based approach](https://github.com/oven-sh/bun/blob/0d1a00fa0f7830f8ecd99c027fce8096c9d459b6/packages/bun-uws/src/HttpRouter.h#L57-L64) to add [SIMD-accelerated route parameter decoding](https://github.com/oven-sh/bun/blob/main/src/bun.js/bindings/decodeURIComponentSIMD.cpp#L21-L271) and [JavaScriptCore structure caching](https://github.com/oven-sh/bun/blob/main/src/bun.js/bindings/ServerRouteList.cpp#L100-L101) to push the performance limits of what modern hardware allows.
|
||||
|
||||
### `fetch` request handler
|
||||
|
||||
The `fetch` handler handles incoming requests. It receives a [`Request`](https://developer.mozilla.org/en-US/docs/Web/API/Request) object and returns a [`Response`](https://developer.mozilla.org/en-US/docs/Web/API/Response) or `Promise<Response>`.
|
||||
The `fetch` handler handles incoming requests that weren't matched by any route. It receives a [`Request`](https://developer.mozilla.org/en-US/docs/Web/API/Request) object and returns a [`Response`](https://developer.mozilla.org/en-US/docs/Web/API/Response) or [`Promise<Response>`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise).
|
||||
|
||||
```ts
|
||||
Bun.serve({
|
||||
@@ -70,116 +472,6 @@ const server = Bun.serve({
|
||||
});
|
||||
```
|
||||
|
||||
### Static routes
|
||||
|
||||
Use the `static` option to serve static `Response` objects by route.
|
||||
|
||||
```ts
|
||||
// Bun v1.1.27+ required
|
||||
Bun.serve({
|
||||
static: {
|
||||
// health-check endpoint
|
||||
"/api/health-check": new Response("All good!"),
|
||||
|
||||
// redirect from /old-link to /new-link
|
||||
"/old-link": Response.redirect("/new-link", 301),
|
||||
|
||||
// serve static text
|
||||
"/": new Response("Hello World"),
|
||||
|
||||
// serve a file by buffering it in memory
|
||||
"/index.html": new Response(await Bun.file("./index.html").bytes(), {
|
||||
headers: {
|
||||
"Content-Type": "text/html",
|
||||
},
|
||||
}),
|
||||
"/favicon.ico": new Response(await Bun.file("./favicon.ico").bytes(), {
|
||||
headers: {
|
||||
"Content-Type": "image/x-icon",
|
||||
},
|
||||
}),
|
||||
|
||||
// serve JSON
|
||||
"/api/version.json": Response.json({ version: "1.0.0" }),
|
||||
},
|
||||
|
||||
fetch(req) {
|
||||
return new Response("404!");
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
Static routes support headers, status code, and other `Response` options.
|
||||
|
||||
```ts
|
||||
Bun.serve({
|
||||
static: {
|
||||
"/api/time": new Response(new Date().toISOString(), {
|
||||
headers: {
|
||||
"X-Custom-Header": "Bun!",
|
||||
},
|
||||
}),
|
||||
},
|
||||
|
||||
fetch(req) {
|
||||
return new Response("404!");
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
Static routes can serve Response bodies faster than `fetch` handlers because they don't create `Request` objects, they don't create `AbortSignal`, they don't create additional `Response` objects. The only per-request memory allocation is the TCP/TLS socket data needed for each request.
|
||||
|
||||
{% note %}
|
||||
`static` is experimental
|
||||
{% /note %}
|
||||
|
||||
Static route responses are cached for the lifetime of the server object. To reload static routes, call `server.reload(options)`.
|
||||
|
||||
```ts
|
||||
const server = Bun.serve({
|
||||
static: {
|
||||
"/api/time": new Response(new Date().toISOString()),
|
||||
},
|
||||
|
||||
fetch(req) {
|
||||
return new Response("404!");
|
||||
},
|
||||
});
|
||||
|
||||
// Update the time every second.
|
||||
setInterval(() => {
|
||||
server.reload({
|
||||
static: {
|
||||
"/api/time": new Response(new Date().toISOString()),
|
||||
},
|
||||
|
||||
fetch(req) {
|
||||
return new Response("404!");
|
||||
},
|
||||
});
|
||||
}, 1000);
|
||||
```
|
||||
|
||||
Reloading static routes only impact the next request. In-flight requests continue to use the old static routes. After in-flight requests to old static routes are finished, the old static routes are freed from memory.
|
||||
|
||||
To simplify error handling, static routes do not support streaming response bodies from `ReadableStream` or an `AsyncIterator`. Fortunately, you can still buffer the response in memory first:
|
||||
|
||||
```ts
|
||||
const time = await fetch("https://api.example.com/v1/data");
|
||||
// Buffer the response in memory first.
|
||||
const blob = await time.blob();
|
||||
|
||||
const server = Bun.serve({
|
||||
static: {
|
||||
"/api/data": new Response(blob),
|
||||
},
|
||||
|
||||
fetch(req) {
|
||||
return new Response("404!");
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Changing the `port` and `hostname`
|
||||
|
||||
To configure which port and hostname the server will listen on, set `port` and `hostname` in the options object.
|
||||
@@ -553,7 +845,7 @@ Update the server's handlers without restarting:
|
||||
|
||||
```ts
|
||||
const server = Bun.serve({
|
||||
static: {
|
||||
routes: {
|
||||
"/api/version": Response.json({ version: "v1" }),
|
||||
},
|
||||
fetch(req) {
|
||||
@@ -563,7 +855,7 @@ const server = Bun.serve({
|
||||
|
||||
// Update to new handler
|
||||
server.reload({
|
||||
static: {
|
||||
routes: {
|
||||
"/api/version": Response.json({ version: "v2" }),
|
||||
},
|
||||
fetch(req) {
|
||||
@@ -572,7 +864,7 @@ server.reload({
|
||||
});
|
||||
```
|
||||
|
||||
This is useful for development and hot reloading. Only `fetch`, `error`, and `static` handlers can be updated.
|
||||
This is useful for development and hot reloading. Only `fetch`, `error`, and `routes` can be updated.
|
||||
|
||||
## Per-Request Controls
|
||||
|
||||
|
||||
@@ -12,5 +12,3 @@ Alternatively, use `process.dlopen`:
|
||||
let mod = { exports: {} };
|
||||
process.dlopen(mod, "./my-node-module.node");
|
||||
```
|
||||
|
||||
Bun polyfills the [`detect-libc`](https://npmjs.com/package/detect-libc) package, which is used by many Node-API modules to detect which `.node` binding to `require`.
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user