Compare commits

...

227 Commits

Author SHA1 Message Date
Ashcon Partovi
5d93684c2f Debug windows specific 2024-06-25 20:18:22 -07:00
Ashcon Partovi
c3bc44adb5 Debug windows specific 2024-06-25 20:16:33 -07:00
Ashcon Partovi
604a8e0782 Debug windows specific 2024-06-25 20:14:40 -07:00
Ashcon Partovi
777c706e07 Debug windows script 2024-06-25 19:59:33 -07:00
Ashcon Partovi
aaec8ecc68 Fix local build scripts 2024-06-25 19:10:17 -07:00
Ashcon Partovi
e8523f7246 Too much smoke 2024-06-25 18:41:27 -07:00
Ashcon Partovi
2fc5ab5235 SMOKE tests fix 2024-06-25 18:39:33 -07:00
Ashcon Partovi
dfc3805903 SMOKE tests 2024-06-25 18:31:00 -07:00
Ashcon Partovi
4efd9e6e4c Fix duplicate key 2024-06-25 17:56:15 -07:00
Ashcon Partovi
e735d4a860 Both LTO and non-LTO 2024-06-25 17:54:46 -07:00
Ashcon Partovi
6198559b21 More fixes 2024-06-25 17:26:49 -07:00
Ashcon Partovi
d8ca88a351 Fix artifact location 2024-06-25 17:26:17 -07:00
Ashcon Partovi
103a9bd561 Fix crash error catch 2024-06-25 16:31:47 -07:00
Ashcon Partovi
826c4cbd28 bump windows paralleism to 10 2024-06-25 16:15:52 -07:00
Ashcon Partovi
8e33a1af7d Fix buildkite scripts 2024-06-25 16:14:11 -07:00
Ashcon Partovi
d83f383690 CI changes 2024-06-25 16:11:33 -07:00
Ashcon Partovi
c8c79c88f0 CI changes 2024-06-25 16:11:33 -07:00
Ashcon Partovi
665e6052aa new CI testing 2024-06-25 16:11:33 -07:00
Ashcon Partovi
4cb16b3581 Fix github workflow 2024-06-25 16:11:33 -07:00
Ashcon Partovi
16ee28ff57 Fix github workflow 2024-06-25 16:11:33 -07:00
Ashcon Partovi
07e86cb949 Fix github workflow 2024-06-25 16:11:33 -07:00
Ashcon Partovi
c62ba235ab Fix github workflow 2024-06-25 16:11:33 -07:00
Ashcon Partovi
7a055c89be Fix github workflow 2024-06-25 16:11:33 -07:00
Ashcon Partovi
566957250c Fix github workflow 2024-06-25 16:11:33 -07:00
Ashcon Partovi
cd4c5694d4 Fix github workflow 2024-06-25 16:11:33 -07:00
Ashcon Partovi
78655a06fc Fix github workflow 2024-06-25 16:11:33 -07:00
Ashcon Partovi
85eadb4c38 Fix github workflow 2024-06-25 16:11:33 -07:00
Ashcon Partovi
473f67aff7 Fix github workflow 2024-06-25 16:11:33 -07:00
Ashcon Partovi
6dd48a134f Fix github workflow 2024-06-25 16:11:33 -07:00
Ashcon Partovi
ba1d8c7529 Fix github workflow 2024-06-25 16:11:33 -07:00
Ashcon Partovi
f7830585c8 Fix github workflow 2024-06-25 16:11:33 -07:00
Ashcon Partovi
458a753a9b Fix setup-bun on Windows 2024-06-25 16:11:33 -07:00
Ashcon Partovi
21d27e9c54 Troubleshoot CI 2024-06-25 16:11:33 -07:00
Ashcon Partovi
4f9bdb632c Troubleshoot CI 2024-06-25 16:11:32 -07:00
Ashcon Partovi
fb59ca732f Troubleshoot CI 2024-06-25 16:11:32 -07:00
Ashcon Partovi
800dd81a67 Troubleshoot CI 2024-06-25 16:11:32 -07:00
Ashcon Partovi
155b6519f4 Troubleshoot CI 2024-06-25 16:11:32 -07:00
Ashcon Partovi
e1ae632b60 Troubleshoot CI 2024-06-25 16:11:32 -07:00
Ashcon Partovi
8217c26164 Fix CI 2024-06-25 16:11:32 -07:00
Ashcon Partovi
fc14d91500 Use github runners due to queue times 2024-06-25 16:11:32 -07:00
Ashcon Partovi
27d9b5a763 Fix bail on by default 2024-06-25 16:11:32 -07:00
Ashcon Partovi
4b8036f37d Fix filtering 2024-06-25 16:11:32 -07:00
Ashcon Partovi
189d251878 Parallel tests 2024-06-25 16:11:32 -07:00
Ashcon Partovi
ff811eca01 Parallel tests 2024-06-25 16:11:32 -07:00
Ashcon Partovi
2355d4e667 CI changes 2024-06-25 16:11:32 -07:00
Ashcon Partovi
68dac5ff9a CI changes 2024-06-25 16:11:32 -07:00
Ashcon Partovi
6c3bc83107 CI changes 2024-06-25 16:11:32 -07:00
Ashcon Partovi
0b8dc54af5 CI changes 2024-06-25 16:11:32 -07:00
Ashcon Partovi
8115477883 CI changes 2024-06-25 16:11:32 -07:00
Ashcon Partovi
3c4c253001 CI changes 2024-06-25 16:11:32 -07:00
Ashcon Partovi
70e9c3287f remove macos 11 2024-06-25 16:11:32 -07:00
Ashcon Partovi
8ca8e98629 add macos 11 tests 2024-06-25 16:11:32 -07:00
Ashcon Partovi
d91b030f06 Fix tests 2024-06-25 16:11:32 -07:00
Ashcon Partovi
9a37d11305 Fix test 2024-06-25 16:11:32 -07:00
Ashcon Partovi
01ad121868 Fix test 2024-06-25 16:11:32 -07:00
Ashcon Partovi
2d2c3a137f Fix test 2024-06-25 16:11:32 -07:00
Ashcon Partovi
950aa0c4a7 Updates 2024-06-25 16:11:32 -07:00
Ashcon Partovi
69c3dfbca4 Fix runner 2024-06-25 16:11:32 -07:00
Ashcon Partovi
17f2183024 Add macOS 12 tests 2024-06-25 16:11:32 -07:00
Ashcon Partovi
d77483c741 Truncate stdout preview 2024-06-25 16:11:32 -07:00
Ashcon Partovi
8e5f2e52b8 Fix windows baseline 2024-06-25 16:11:32 -07:00
Ashcon Partovi
93f5d690e7 Change pipeline 2024-06-25 16:11:32 -07:00
Ashcon Partovi
f752f3aa71 Fix test 2024-06-25 16:11:31 -07:00
Ashcon Partovi
97a6cf9cfe Fix diff 2024-06-25 16:11:31 -07:00
Ashcon Partovi
2825ad9862 Fix puppeteer on macOS cloud 2024-06-25 16:11:31 -07:00
Ashcon Partovi
028c5ebfd1 test/runner.node.mjs -> scripts/runner.node.mjs 2024-06-25 16:11:31 -07:00
Ashcon Partovi
45ced96cf2 Fix runner 2024-06-25 16:11:31 -07:00
Ashcon Partovi
95f9476ae2 Maybe fix scripts 2024-06-25 16:11:31 -07:00
Ashcon Partovi
17e2599dc5 Maybe fix scripts 2024-06-25 16:11:31 -07:00
Ashcon Partovi
24b81efdfd Maybe fix scripts 2024-06-25 16:11:31 -07:00
Ashcon Partovi
23de00c9f3 Maybe fix scripts 2024-06-25 16:11:31 -07:00
Ashcon Partovi
231ecd543d Fix scripts 2024-06-25 16:11:31 -07:00
Ashcon Partovi
f8eb92edfd Fix scripts 2024-06-25 16:11:31 -07:00
Ashcon Partovi
d3f6006668 Fix scripts 2024-06-25 16:11:31 -07:00
Ashcon Partovi
bb74106d0c Better labels 2024-06-25 16:11:31 -07:00
Ashcon Partovi
ebb3b47e73 Fix runner 2024-06-25 16:11:31 -07:00
Ashcon Partovi
737ef25ba2 Update scripts 2024-06-25 16:11:31 -07:00
Ashcon Partovi
dac79ff62c Update scripts 2024-06-25 16:11:31 -07:00
Ashcon Partovi
5cb105ecc4 Update scripts 2024-06-25 16:11:31 -07:00
Ashcon Partovi
6cc35cf091 Fix scripts for CI 2024-06-25 16:11:31 -07:00
Ashcon Partovi
3050be235e Fix scripts for CI 2024-06-25 16:11:31 -07:00
Ashcon Partovi
81b70cd543 Fix scripts for CI 2024-06-25 16:11:31 -07:00
Ashcon Partovi
8b7f14677d Update scripts 2024-06-25 16:11:31 -07:00
Ashcon Partovi
fe8c918bdf Fix test 2024-06-25 16:11:31 -07:00
Ashcon Partovi
f2b86b1bd1 Add support for the end of the world 2024-06-25 16:11:31 -07:00
Ashcon Partovi
3793149279 Fix test 2024-06-25 16:11:31 -07:00
Ashcon Partovi
ef99e7e2fb Fix test 2024-06-25 16:11:31 -07:00
Ashcon Partovi
22cf802ece Fix test 2024-06-25 16:11:31 -07:00
Ashcon Partovi
5b05c42462 Fix test 2024-06-25 16:11:31 -07:00
Ashcon Partovi
7f5cd2676e Fix test 2024-06-25 16:11:31 -07:00
Ashcon Partovi
9dbe19ca38 Fix test 2024-06-25 16:11:31 -07:00
Ashcon Partovi
da09200e53 Fix test 2024-06-25 16:11:31 -07:00
Ashcon Partovi
87a1da93d5 Fix test 2024-06-25 16:11:31 -07:00
Ashcon Partovi
56f912e999 Fix test 2024-06-25 16:11:31 -07:00
Ashcon Partovi
29b864dec2 Fix test 2024-06-25 16:11:31 -07:00
Ashcon Partovi
b482def1ab Fix test 2024-06-25 16:11:30 -07:00
Ashcon Partovi
4b5422e349 Fix test 2024-06-25 16:11:30 -07:00
Ashcon Partovi
f97991cdea Fix test 2024-06-25 16:11:30 -07:00
Ashcon Partovi
a6e4700932 Fix test 2024-06-25 16:11:30 -07:00
Ashcon Partovi
025d501b53 Fix test 2024-06-25 16:11:30 -07:00
Ashcon Partovi
aa4a344a7b Fix test 2024-06-25 16:11:30 -07:00
Ashcon Partovi
dcc18fb355 Fix test 2024-06-25 16:11:30 -07:00
Ashcon Partovi
3fa68ecf30 Fix test 2024-06-25 16:11:30 -07:00
Ashcon Partovi
945ab91288 More fixes 2024-06-25 16:11:30 -07:00
Ashcon Partovi
4e659ef489 More fixes 2024-06-25 16:11:30 -07:00
Ashcon Partovi
494bb322a8 More fixes 2024-06-25 16:11:30 -07:00
Ashcon Partovi
297730a285 More fixes 2024-06-25 16:11:30 -07:00
Ashcon Partovi
42f6e931e5 More fixes 2024-06-25 16:11:30 -07:00
Ashcon Partovi
1fbd66a57f Fix lots of tests without cygwin 3 2024-06-25 16:11:30 -07:00
Ashcon Partovi
ea156d3b11 Fix lots of tests without cygwin 2 2024-06-25 16:11:30 -07:00
Ashcon Partovi
4384b1fa9b Fix lots of tests without cygwin 2024-06-25 16:11:30 -07:00
Ashcon Partovi
67bb248e1b Fix runner on Windows 2024-06-25 16:11:30 -07:00
Ashcon Partovi
fa57f10250 Fix more tests so we don't need cygwin 2024-06-25 16:11:30 -07:00
Ashcon Partovi
3133fbe3fc Fix tmpdir in Windows 2024-06-25 16:11:30 -07:00
Ashcon Partovi
3a08c2198e Escape buildkite special log output 2024-06-25 16:11:30 -07:00
Ashcon Partovi
3250fa5f92 Remove special retry because buildkite is slow :( 2024-06-25 16:11:30 -07:00
Ashcon Partovi
63928a0cdb Add step id 2024-06-25 16:11:30 -07:00
Ashcon Partovi
d5e5efddac Add debug log 2024-06-25 16:11:30 -07:00
Ashcon Partovi
2991faba81 Fix issue in runner 2024-06-25 16:11:30 -07:00
Ashcon Partovi
ffaae41a13 add diff 2024-06-25 16:11:30 -07:00
Ashcon Partovi
4a0c44df83 Runner improvements 2024-06-25 16:11:30 -07:00
Ashcon Partovi
a1402f3511 More runner fixes 2024-06-25 16:11:30 -07:00
Ashcon Partovi
1baca0b9e4 Fix status file 2024-06-25 16:11:30 -07:00
Ashcon Partovi
92775854ba Runner improvements 2024-06-25 16:11:30 -07:00
Ashcon Partovi
770a62b320 Add progress indicator to test runner 2024-06-25 16:11:30 -07:00
Ashcon Partovi
5ec7ac73ef Fix flaky test 2024-06-25 16:11:29 -07:00
Ashcon Partovi
491e6f0369 Fix flaky test 2024-06-25 16:11:29 -07:00
Ashcon Partovi
d39f59ef51 Fix flaky test 2024-06-25 16:11:29 -07:00
Ashcon Partovi
ac8bc955eb Fix flaky test 2024-06-25 16:11:29 -07:00
Ashcon Partovi
f7bac5e2d3 Use per-test timeout 2024-06-25 16:11:29 -07:00
Ashcon Partovi
9f3a4ddb96 Fix flaky test 2024-06-25 16:11:29 -07:00
Ashcon Partovi
579cf03158 Fix diff in test runner 2024-06-25 16:11:29 -07:00
Ashcon Partovi
9db7294e0a Clean up bundler tests 2024-06-25 16:11:29 -07:00
Ashcon Partovi
02f887cf97 Fix more tests 2024-06-25 16:11:29 -07:00
Ashcon Partovi
f43fcb8bd1 Fix more tests 2024-06-25 16:11:29 -07:00
Ashcon Partovi
e3dfdc55b3 Fix some tests 2024-06-25 16:11:29 -07:00
Ashcon Partovi
e38ef49e33 Disable unshare 2024-06-25 16:11:29 -07:00
Ashcon Partovi
7b693576bb Fix rare issue with annotation 2024-06-25 16:11:29 -07:00
Ashcon Partovi
37277334d5 Set process.env.USER 2024-06-25 16:11:29 -07:00
Ashcon Partovi
16e802ad08 Retries 2024-06-25 16:11:29 -07:00
Ashcon Partovi
f7532a6277 Fix 2024-06-25 16:11:29 -07:00
Ashcon Partovi
9ab089bf87 More fixes 2024-06-25 16:11:29 -07:00
Ashcon Partovi
67634b95be Remove sparese 2024-06-25 16:11:29 -07:00
Ashcon Partovi
7f6c1f9ab6 More 2024-06-25 16:11:29 -07:00
Ashcon Partovi
6334a0e61b Fix flaky 2024-06-25 16:11:29 -07:00
Ashcon Partovi
3881c1ae3b Use hosted runners 2024-06-25 16:11:29 -07:00
Ashcon Partovi
9ae59fb624 Allow overite of bun target 2024-06-25 16:11:29 -07:00
Ashcon Partovi
04eb1b61e7 Windows fix home dir 2024-06-25 16:11:29 -07:00
Ashcon Partovi
cb3c86e583 Better timeouts 2024-06-25 16:11:28 -07:00
Ashcon Partovi
e511eef751 Fix unshare command 2024-06-25 16:11:28 -07:00
Ashcon Partovi
4c2fae3dab Add SSH script 2024-06-25 16:11:28 -07:00
Ashcon Partovi
1a1c9d3782 Test changes 2024-06-25 16:11:28 -07:00
Ashcon Partovi
828438d01a Allow filters in test runner 2024-06-25 16:11:28 -07:00
Ashcon Partovi
bd8551e76a Use unshare 2024-06-25 16:11:28 -07:00
Ashcon Partovi
4c7233d0c2 More changes 2024-06-25 16:11:28 -07:00
Ashcon Partovi
46099a1639 Inherit parent process.env 2024-06-25 16:11:28 -07:00
Ashcon Partovi
5efa10cbfa Use test isolation on Linux 2024-06-25 16:11:28 -07:00
Ashcon Partovi
5a74da4a40 Different queues so its faster 2024-06-25 16:11:28 -07:00
Ashcon Partovi
978f314a1a Test different queue 2024-06-25 16:11:28 -07:00
Ashcon Partovi
1110a76bfe Update pipeline 2024-06-25 16:11:28 -07:00
Ashcon Partovi
082b11524a Use special exit code 2024-06-25 16:11:28 -07:00
Ashcon Partovi
3a61d995cb Make concise summary better 2024-06-25 16:11:28 -07:00
Ashcon Partovi
79e6abfd6c Use buildkite for PRs 2024-06-25 16:11:28 -07:00
Ashcon Partovi
af2d51c9fc Soft fail: 2024-06-25 16:11:28 -07:00
Ashcon Partovi
e92df427ca Try zig on M1 2024-06-25 16:11:28 -07:00
Ashcon Partovi
28d5560ea0 Fix GitHub comment 2024-06-25 16:11:28 -07:00
Ashcon Partovi
2455bf1fa4 More parallelism 2024-06-25 16:11:28 -07:00
Ashcon Partovi
8ce7eae1a3 Zig optimized machines 2024-06-25 16:11:28 -07:00
Ashcon Partovi
1fe8af2e20 Maybe fix github output 2024-06-25 16:11:28 -07:00
Ashcon Partovi
edcea7e31b Maybe fix powershell 2024-06-25 16:11:28 -07:00
Ashcon Partovi
511b97d599 Change label 2024-06-25 16:11:28 -07:00
Ashcon Partovi
0123a2b0d7 Fix script 2024-06-25 16:11:28 -07:00
Ashcon Partovi
4b14b2fc2d Better scripts 2024-06-25 16:11:28 -07:00
Ashcon Partovi
1a0921b2e9 LTO 2024-06-25 16:11:28 -07:00
Ashcon Partovi
db06c601af Changes 2024-06-25 16:11:28 -07:00
Ashcon Partovi
feef707998 More runner fixes for Windows 2024-06-25 16:11:28 -07:00
Ashcon Partovi
ab213165f0 Maybe fix puppeteer 2 2024-06-25 16:11:28 -07:00
Ashcon Partovi
08b1aae387 Maybe fix bun-upgrade test 2024-06-25 16:11:28 -07:00
Ashcon Partovi
07ff957644 Maybe fix TMPDIR 2024-06-25 16:11:28 -07:00
Ashcon Partovi
429c396976 Group tests by path, not platform 2024-06-25 16:11:28 -07:00
Ashcon Partovi
46e48bffd5 Maybe fix puppeteer on AWS macOS 2024-06-25 16:11:27 -07:00
Ashcon Partovi
213f0f57b5 Really fix linker 2024-06-25 16:11:27 -07:00
Ashcon Partovi
532d7cd8ed Fix script path 2024-06-25 16:11:27 -07:00
Ashcon Partovi
8cce4eed79 Fix linker 2024-06-25 16:11:27 -07:00
Ashcon Partovi
d22048d75b Lots o changes 2024-06-25 16:11:27 -07:00
Ashcon Partovi
ab8f665c03 Changes to runner 3 2024-06-25 16:11:27 -07:00
Ashcon Partovi
84f1e8889f Changes to runner 2 2024-06-25 16:11:27 -07:00
Ashcon Partovi
6a5b7d909c Changes to runner 2024-06-25 16:11:27 -07:00
Ashcon Partovi
6692405d9c Revert "Remove powershell -command"
This reverts commit f5a11a81dbac7c3eede5ac4f391de1c77a31538b.
2024-06-25 16:11:27 -07:00
Ashcon Partovi
48dc7d720a Remove powershell -command 2024-06-25 16:11:27 -07:00
Ashcon Partovi
13841ffc93 Changes 2024-06-25 16:11:27 -07:00
Ashcon Partovi
bc589af1ad Fix exit code 2024-06-25 16:11:27 -07:00
Ashcon Partovi
886852fb26 Changes 2024-06-25 16:11:27 -07:00
Ashcon Partovi
8580dbc00b Lockfile 2024-06-25 16:11:27 -07:00
Ashcon Partovi
7f761b0f76 More changes 2024-06-25 16:11:27 -07:00
Ashcon Partovi
6090562cf5 More changes 2024-06-25 16:11:27 -07:00
Ashcon Partovi
57fe9b877c Move test runner 2024-06-25 16:11:27 -07:00
Ashcon Partovi
4df9aff4c2 Add printenv for CI 2024-06-25 16:11:27 -07:00
Ashcon Partovi
30912bc982 Test concurrency of 10! 2024-06-25 16:11:27 -07:00
Ashcon Partovi
8df928642e Bootstrap changes 2 2024-06-25 16:11:27 -07:00
Ashcon Partovi
09f2e69b2e Bootstrap changes 2024-06-25 16:11:27 -07:00
Ashcon Partovi
c04d9d270d More bootstrap fixes 2024-06-25 16:11:27 -07:00
Ashcon Partovi
03b40f89b6 More bootstrap fixes 2024-06-25 16:11:27 -07:00
Ashcon Partovi
05fcb18759 Fix error on linux? 2024-06-25 16:11:27 -07:00
Ashcon Partovi
ffa8f5328c Change user on Linux 2024-06-25 16:11:27 -07:00
Ashcon Partovi
bebdf06247 Changes 6 2024-06-25 16:11:27 -07:00
Ashcon Partovi
58ebe0e469 Changes 5 2024-06-25 16:11:27 -07:00
Ashcon Partovi
d16ad17fd9 Changes 4 2024-06-25 16:11:27 -07:00
Ashcon Partovi
327c047a68 Changes 3 2024-06-25 16:11:27 -07:00
Ashcon Partovi
9294d9b287 Changes 2 2024-06-25 16:11:27 -07:00
Ashcon Partovi
c2402d10e9 Changes 2024-06-25 16:11:27 -07:00
Ashcon Partovi
ef583214e6 Add linux bootstrap 2024-06-25 16:11:26 -07:00
Ashcon Partovi
05515f5e21 Improve bootstrap 5 2024-06-25 16:11:26 -07:00
Ashcon Partovi
102f04bc30 Improve bootstrap 4 2024-06-25 16:11:26 -07:00
Ashcon Partovi
e0214e1a67 Improve bootstrap 3 2024-06-25 16:11:26 -07:00
Ashcon Partovi
b6b601f175 Improve bootstrap 2 2024-06-25 16:11:26 -07:00
Ashcon Partovi
c185021018 Improve bootstrap 2024-06-25 16:11:26 -07:00
Ashcon Partovi
1ce8d42914 Add darwin bootstrap 2024-06-25 16:11:26 -07:00
Ashcon Partovi
dc5844db14 Test gardening 2 2024-06-25 16:11:26 -07:00
Ashcon Partovi
4171a3258e Test gardening 2024-06-25 16:11:26 -07:00
Ashcon Partovi
942f64ddd1 Changes 2024-06-25 16:11:26 -07:00
Ashcon Partovi
0329469e2a Fix cwd 2024-06-25 16:11:26 -07:00
Ashcon Partovi
cc82876d05 Use cygwin for Windows 2024-06-25 16:11:26 -07:00
Ashcon Partovi
5db74e74ba Fix creating log file 2024-06-25 16:11:26 -07:00
Ashcon Partovi
16733733cc Fix script 2024-06-25 16:11:26 -07:00
Ashcon Partovi
2765542e1e Fix missing codegen diff 2024-06-25 16:11:26 -07:00
Ashcon Partovi
e0a2b0cf0a Add BuildKite CI 2024-06-25 16:11:26 -07:00
172 changed files with 33649 additions and 30924 deletions

11
.buildkite/bootstrap.yml Normal file
View File

@@ -0,0 +1,11 @@
# Uploads the latest CI workflow to Buildkite.
# https://buildkite.com/docs/pipelines/defining-steps
#
# Changes to this file must be manually edited here:
# https://buildkite.com/bun/bun/settings/steps
steps:
- label: ":pipeline:"
command:
- "buildkite-agent pipeline upload .buildkite/ci.yml"
agents:
queue: "build-linux"

1267
.buildkite/ci.yml Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -6,45 +6,112 @@ inputs:
type: string
description: "The version of bun to install: 'latest', 'canary', 'bun-v1.0.0', etc."
default: latest
required: false
workflow-run-id:
type: string
description: "The workflow run ID where to download bun."
baseline:
type: boolean
description: "Whether to use the baseline version of bun."
default: false
required: false
add-to-path:
type: boolean
description: "Whether to add bun to PATH."
default: true
download-url:
type: string
description: "The base URL to download bun from."
default: "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases"
required: false
outputs:
os:
description: "Operating system: 'darwin', 'linux', or 'windows'."
value: ${{ steps.setup.outputs.os }}
arch:
description: "Architecture: 'aarch64' or 'x64'."
value: ${{ steps.setup.outputs.arch }}
target:
description: "Target: 'bun-{os}-{arch}-[baseline]'."
value: ${{ steps.setup.outputs.target }}
release:
description: "Release: 'latest', 'canary', or 'bun-v1.x.x'."
value: ${{ steps.setup.outputs.release }}
bun-path:
description: "The path to Bun."
value: ${{ steps.unpack.outputs.bun-path }}
runs:
using: composite
steps:
- name: Setup Bun
- id: setup
name: Setup Bun
shell: bash
run: |
set -x
case "$(uname -s)" in
Linux*) os=linux;;
Darwin*) os=darwin;;
*) os=windows;;
esac
echo "os=${os}" >> $GITHUB_OUTPUT
case "$(uname -m)" in
arm64 | aarch64) arch=aarch64;;
*) arch=x64;;
esac
echo "arch=${arch}" >> $GITHUB_OUTPUT
case "${{ inputs.baseline }}" in
true | 1) target="bun-${os}-${arch}-baseline";;
*) target="bun-${os}-${arch}";;
true | 1) target="bun-${os}-${arch}-baseline" ;;
*) target="bun-${os}-${arch}" ;;
esac
echo "target=${target}" >> $GITHUB_OUTPUT
case "${{ inputs.bun-version }}" in
latest) release="latest";;
canary) release="canary";;
*) release="bun-v${{ inputs.bun-version }}";;
latest) release="latest" ;;
canary) release="canary" ;;
*) release="bun-v${{ inputs.bun-version }}" ;;
esac
curl -LO "${{ inputs.download-url }}/${release}/${target}.zip"
unzip ${target}.zip
mkdir -p ${{ runner.temp }}/.bun/bin
mv ${target}/bun* ${{ runner.temp }}/.bun/bin/
chmod +x ${{ runner.temp }}/.bun/bin/*
echo "${{ runner.temp }}/.bun/bin" >> ${GITHUB_PATH}
if [[ "${{ inputs.workflow-run-id }}" ]]; then
release="workflow-${{ inputs.workflow-run-id }}"
fi
echo "release=${release}" >> $GITHUB_OUTPUT
- if: ${{ inputs.workflow-run-id }}
name: Download Bun from Github Actions
uses: actions/download-artifact@v4
with:
github-token: ${{ github.token }}
run-id: ${{ inputs.workflow-run-id }}
name: ${{ steps.setup.outputs.target }}
- if: ${{ !inputs.workflow-run-id }}
name: Download Bun from URL
shell: bash
run: |
set -x
curl -LO ${{ inputs.download-url }}/${{ steps.setup.outputs.release }}/${{ steps.setup.outputs.target }}.zip
- id: unpack
name: Unpack Bun
shell: bash
run: |
set -x
target="${{ steps.setup.outputs.target }}"
release="${{ steps.setup.outputs.release }}"
mkdir -p ${target}/${release}
unzip ${target}.zip -d ${target}
mv ${target}/${target}/* ${target}/${release}/
rm -rf ${target}/${target}
rm -f ${target}.zip
bin="$(pwd)/${target}/${release}"
path="${bin}/bun"
if [[ "${{ runner.os }}" == "Windows" ]]; then
bin=$(cygpath -w ${bin})
path=$(cygpath -w ${path})
fi
echo "bun-bin=${bin}" >> $GITHUB_OUTPUT
echo "bun-path=${path}" >> ${GITHUB_OUTPUT}
- if: ${{ inputs.add-to-path == 'true' }}
name: Add Bun to Path
shell: bash
run: |
set -x
echo "${{ steps.unpack.outputs.bun-bin }}" >> ${GITHUB_PATH}

131
.github/workflows/build-bun.yml vendored Normal file
View File

@@ -0,0 +1,131 @@
name: Build Bun
permissions:
contents: read
actions: write
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
on:
workflow_call:
inputs:
no-cache:
type: boolean
canary:
type: boolean
assertions:
type: boolean
zig-optimize:
type: string
workflow_dispatch:
inputs:
no-cache:
type: boolean
canary:
type: boolean
assertions:
type: boolean
zig-optimize:
type: string
jobs:
linux-x64:
name: Build linux-x64
uses: ./.github/workflows/build-linux.yml
secrets: inherit
with:
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
tag: linux-x64
arch: x64
cpu: haswell
no-cache: ${{ inputs.no-cache }}
canary: ${{ inputs.canary }}
assertions: ${{ inputs.assertions }}
zig-optimize: ${{ inputs.zig-optimize }}
linux-x64-baseline:
name: Build linux-x64-baseline
uses: ./.github/workflows/build-linux.yml
secrets: inherit
with:
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
tag: linux-x64-baseline
arch: x64
cpu: nehalem
no-cache: ${{ inputs.no-cache }}
canary: ${{ inputs.canary }}
assertions: ${{ inputs.assertions }}
zig-optimize: ${{ inputs.zig-optimize }}
linux-aarch64:
if: ${{ github.repository_owner == 'oven-sh' }}
name: Build linux-aarch64
uses: ./.github/workflows/build-linux.yml
secrets: inherit
with:
runs-on: namespace-profile-bun-ci-linux-aarch64
tag: linux-aarch64
arch: aarch64
cpu: native
no-cache: ${{ inputs.no-cache }}
canary: ${{ inputs.canary }}
assertions: ${{ inputs.assertions }}
zig-optimize: ${{ inputs.zig-optimize }}
darwin-x64:
name: Build darwin-x64
uses: ./.github/workflows/build-darwin.yml
secrets: inherit
with:
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
tag: darwin-x64
arch: x64
cpu: haswell
no-cache: ${{ inputs.no-cache }}
canary: ${{ inputs.canary }}
assertions: ${{ inputs.assertions }}
zig-optimize: ${{ inputs.zig-optimize }}
darwin-aarch64:
name: Build darwin-aarch64
uses: ./.github/workflows/build-darwin.yml
secrets: inherit
with:
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-12' }}
tag: darwin-aarch64
arch: aarch64
cpu: native
no-cache: ${{ inputs.no-cache }}
canary: ${{ inputs.canary }}
assertions: ${{ inputs.assertions }}
zig-optimize: ${{ inputs.zig-optimize }}
windows-x64:
name: Build windows-x64
uses: ./.github/workflows/build-windows.yml
secrets: inherit
with:
runs-on: windows
tag: windows-x64
arch: x64
cpu: haswell
no-cache: ${{ inputs.no-cache }}
canary: ${{ inputs.canary }}
assertions: ${{ inputs.assertions }}
zig-optimize: ${{ inputs.zig-optimize }}
windows-x64-baseline:
name: Build windows-x64-baseline
uses: ./.github/workflows/build-windows.yml
secrets: inherit
with:
runs-on: windows
tag: windows-x64-baseline
arch: x64
cpu: nehalem
no-cache: ${{ inputs.no-cache }}
canary: ${{ inputs.canary }}
assertions: ${{ inputs.assertions }}
zig-optimize: ${{ inputs.zig-optimize }}

View File

@@ -21,6 +21,8 @@ on:
required: true
assertions:
type: boolean
zig-optimize:
type: string
canary:
type: boolean
no-cache:
@@ -189,6 +191,7 @@ jobs:
arch: ${{ inputs.arch }}
cpu: ${{ inputs.cpu }}
assertions: ${{ inputs.assertions }}
zig-optimize: ${{ inputs.zig-optimize }}
canary: ${{ inputs.canary }}
no-cache: ${{ inputs.no-cache }}
link:

View File

@@ -43,6 +43,7 @@ jobs:
zig-optimize: ${{ inputs.zig-optimize }}
canary: ${{ inputs.canary }}
no-cache: ${{ inputs.no-cache }}
on-failure:
if: ${{ github.repository_owner == 'oven-sh' && failure() }}
name: On Failure

View File

@@ -21,6 +21,8 @@ on:
required: true
assertions:
type: boolean
zig-optimize:
type: string
canary:
type: boolean
no-cache:
@@ -48,6 +50,7 @@ jobs:
run: |
git config --global core.autocrlf false
git config --global core.eol lf
git config --system core.longpaths true
- name: Checkout
uses: actions/checkout@v4
with:
@@ -108,6 +111,7 @@ jobs:
name: bun-${{ inputs.tag }}-deps
path: bun-deps
if-no-files-found: error
codegen:
name: Codegen
runs-on: ubuntu-latest
@@ -116,6 +120,7 @@ jobs:
run: |
git config --global core.autocrlf false
git config --global core.eol lf
git config --system core.longpaths true
- name: Checkout
uses: actions/checkout@v4
- name: Setup Bun
@@ -136,6 +141,7 @@ jobs:
name: bun-${{ inputs.tag }}-codegen
path: build-codegen-win32-x64
if-no-files-found: error
build-cpp:
name: Build C++
needs: codegen
@@ -147,6 +153,7 @@ jobs:
run: |
git config --global core.autocrlf false
git config --global core.eol lf
git config --system core.longpaths true
- name: Checkout
uses: actions/checkout@v4
with:
@@ -200,19 +207,23 @@ jobs:
name: bun-${{ inputs.tag }}-cpp
path: build/bun-cpp-objects.a
if-no-files-found: error
build-zig:
name: Build Zig
uses: ./.github/workflows/build-zig.yml
with:
os: windows
zig-optimize: ReleaseSafe
only-zig: true
tag: ${{ inputs.tag }}
arch: ${{ inputs.arch }}
cpu: ${{ inputs.cpu }}
assertions: ${{ inputs.assertions }}
zig-optimize: ReleaseSafe
# Windows is always ReleaseSafe for now
# zig-optimize: ${{ inputs.zig-optimize }}
canary: ${{ inputs.canary }}
no-cache: ${{ inputs.no-cache }}
link:
name: Link
runs-on: ${{ inputs.runs-on }}
@@ -228,6 +239,7 @@ jobs:
run: |
git config --global core.autocrlf false
git config --global core.eol lf
git config --system core.longpaths true
- name: Checkout
uses: actions/checkout@v4
with:
@@ -323,6 +335,7 @@ jobs:
path: features.json
if-no-files-found: error
overwrite: true
on-failure:
if: ${{ github.repository_owner == 'oven-sh' && failure() }}
name: On Failure

View File

@@ -2,18 +2,15 @@ name: CI
permissions:
contents: read
statuses: read
actions: write
concurrency:
group: ${{ github.workflow }}-${{ github.event_name == 'workflow_dispatch' && inputs.run-id || github.ref }}
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
on:
workflow_dispatch:
inputs:
run-id:
type: string
description: The workflow ID to download artifacts (skips the build step)
pull_request:
paths-ignore:
- .vscode/**/*
@@ -28,218 +25,74 @@ on:
- examples/**/*
jobs:
format:
if: ${{ !inputs.run-id }}
name: Format
uses: ./.github/workflows/run-format.yml
secrets: inherit
with:
zig-version: 0.13.0
permissions:
contents: write
lint:
if: ${{ !inputs.run-id }}
name: Lint
uses: ./.github/workflows/run-lint.yml
secrets: inherit
linux-x64:
if: ${{ !inputs.run-id }}
name: Build linux-x64
uses: ./.github/workflows/build-linux.yml
secrets: inherit
with:
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
tag: linux-x64
arch: x64
cpu: haswell
canary: true
no-cache: true
linux-x64-baseline:
if: ${{ !inputs.run-id }}
name: Build linux-x64-baseline
uses: ./.github/workflows/build-linux.yml
secrets: inherit
with:
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
tag: linux-x64-baseline
arch: x64
cpu: nehalem
canary: true
no-cache: true
linux-aarch64:
if: ${{ !inputs.run-id && github.repository_owner == 'oven-sh' }}
name: Build linux-aarch64
uses: ./.github/workflows/build-linux.yml
secrets: inherit
with:
runs-on: namespace-profile-bun-ci-linux-aarch64
tag: linux-aarch64
arch: aarch64
cpu: native
canary: true
no-cache: true
darwin-x64:
if: ${{ !inputs.run-id }}
name: Build darwin-x64
uses: ./.github/workflows/build-darwin.yml
secrets: inherit
with:
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
tag: darwin-x64
arch: x64
cpu: haswell
canary: true
darwin-x64-baseline:
if: ${{ !inputs.run-id }}
name: Build darwin-x64-baseline
uses: ./.github/workflows/build-darwin.yml
secrets: inherit
with:
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
tag: darwin-x64-baseline
arch: x64
cpu: nehalem
canary: true
darwin-aarch64:
if: ${{ !inputs.run-id }}
name: Build darwin-aarch64
uses: ./.github/workflows/build-darwin.yml
secrets: inherit
with:
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-12' }}
tag: darwin-aarch64
arch: aarch64
cpu: native
canary: true
windows-x64:
if: ${{ !inputs.run-id }}
name: Build windows-x64
uses: ./.github/workflows/build-windows.yml
secrets: inherit
with:
runs-on: windows
tag: windows-x64
arch: x64
cpu: haswell
canary: true
windows-x64-baseline:
if: ${{ !inputs.run-id }}
name: Build windows-x64-baseline
uses: ./.github/workflows/build-windows.yml
secrets: inherit
with:
runs-on: windows
tag: windows-x64-baseline
arch: x64
cpu: nehalem
canary: true
linux-x64-test:
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
name: Test linux-x64
needs: linux-x64
uses: ./.github/workflows/run-test.yml
secrets: inherit
with:
run-id: ${{ inputs.run-id }}
pr-number: ${{ github.event.number }}
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
tag: linux-x64
linux-x64-baseline-test:
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
name: Test linux-x64-baseline
needs: linux-x64-baseline
uses: ./.github/workflows/run-test.yml
secrets: inherit
with:
run-id: ${{ inputs.run-id }}
pr-number: ${{ github.event.number }}
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
tag: linux-x64-baseline
linux-aarch64-test:
if: ${{ inputs.run-id || github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'}}
name: Test linux-aarch64
needs: linux-aarch64
uses: ./.github/workflows/run-test.yml
secrets: inherit
with:
run-id: ${{ inputs.run-id }}
pr-number: ${{ github.event.number }}
runs-on: namespace-profile-bun-ci-linux-aarch64
tag: linux-aarch64
darwin-x64-test:
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
name: Test darwin-x64
needs: darwin-x64
uses: ./.github/workflows/run-test.yml
secrets: inherit
with:
run-id: ${{ inputs.run-id }}
pr-number: ${{ github.event.number }}
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
tag: darwin-x64
darwin-x64-baseline-test:
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
name: Test darwin-x64-baseline
needs: darwin-x64-baseline
uses: ./.github/workflows/run-test.yml
secrets: inherit
with:
run-id: ${{ inputs.run-id }}
pr-number: ${{ github.event.number }}
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
tag: darwin-x64-baseline
darwin-aarch64-test:
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
name: Test darwin-aarch64
needs: darwin-aarch64
uses: ./.github/workflows/run-test.yml
secrets: inherit
with:
run-id: ${{ inputs.run-id }}
pr-number: ${{ github.event.number }}
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-12' }}
tag: darwin-aarch64
windows-x64-test:
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
name: Test windows-x64
needs: windows-x64
uses: ./.github/workflows/run-test.yml
secrets: inherit
with:
run-id: ${{ inputs.run-id }}
pr-number: ${{ github.event.number }}
runs-on: windows
tag: windows-x64
windows-x64-baseline-test:
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
name: Test windows-x64-baseline
needs: windows-x64-baseline
uses: ./.github/workflows/run-test.yml
secrets: inherit
with:
run-id: ${{ inputs.run-id }}
pr-number: ${{ github.event.number }}
runs-on: windows
tag: windows-x64-baseline
cleanup:
if: ${{ always() }}
name: Cleanup
needs:
- linux-x64
- linux-x64-baseline
- linux-aarch64
- darwin-x64
- darwin-x64-baseline
- darwin-aarch64
- windows-x64
- windows-x64-baseline
trigger:
name: CI
runs-on: ubuntu-latest
steps:
- name: Cleanup Artifacts
uses: geekyeggo/delete-artifact@v5
- id: ci
name: Start CI
uses: buildkite/trigger-pipeline-action@v2.2.0
with:
name: |
bun-*-cpp
bun-*-zig
bun-*-deps
bun-*-codegen
buildkite_api_access_token: ${{ secrets.BUILDKITE_TOKEN }}
pipeline: "bun/bun"
send_pull_request: true
- name: Wait for CI
uses: EnricoMi/download-buildkite-artifact-action@v1
with:
buildkite_token: ${{ secrets.BUILDKITE_TOKEN }}
buildkite_build_url: ${{ steps.ci.outputs.url }}
ignore_build_states: blocked,canceled,skipped,not_run
ignore_job_states: timed_out,failed
output_path: artifacts
- name: Upload bun-linux-x64
uses: actions/upload-artifact@v4
with:
name: bun-linux-x64
path: artifacts/bun-linux-x64.zip
if-no-files-found: error
- name: Upload bun-linux-x64-baseline
uses: actions/upload-artifact@v4
with:
name: bun-linux-x64-baseline
path: artifacts/bun-linux-x64-baseline.zip
if-no-files-found: error
- name: Upload bun-linux-aarch64
uses: actions/upload-artifact@v4
with:
name: bun-linux-aarch64
path: artifacts/bun-linux-aarch64.zip
if-no-files-found: error
- name: Upload bun-darwin-x64
uses: actions/upload-artifact@v4
with:
name: bun-darwin-x64
path: artifacts/bun-darwin-x64.zip
if-no-files-found: error
- name: Upload bun-darwin-x64-baseline
uses: actions/upload-artifact@v4
with:
name: bun-darwin-x64-baseline
path: artifacts/bun-darwin-x64-baseline.zip
if-no-files-found: error
- name: Upload bun-darwin-aarch64
uses: actions/upload-artifact@v4
with:
name: bun-darwin-aarch64
path: artifacts/bun-darwin-aarch64.zip
if-no-files-found: error
- name: Upload bun-windows-x64
uses: actions/upload-artifact@v4
with:
name: bun-windows-x64
path: artifacts/bun-windows-x64.zip
if-no-files-found: error
- name: Upload bun-windows-x64-baseline
uses: actions/upload-artifact@v4
with:
name: bun-windows-x64-baseline
path: artifacts/bun-windows-x64-baseline.zip
if-no-files-found: error

View File

@@ -14,10 +14,11 @@ on:
type: string
description: The workflow ID to download artifacts (skips the build step)
pull_request:
paths-ignore:
- .vscode/**/*
- docs/**/*
- examples/**/*
paths:
- ".github/workflows/lint-cpp.yml"
- "**/*.cpp"
- "src/deps/**/*"
- "CMakeLists.txt"
jobs:
lint-cpp:

View File

@@ -13,23 +13,42 @@ on:
tag:
type: string
required: true
pr-number:
github-id:
type: string
required: true
run-id:
release-name:
type: string
buildkite-url:
type: string
default: ${{ github.run_id }}
jobs:
test:
name: Tests
name: ${{ matrix.label }}
runs-on: ${{ inputs.runs-on }}
strategy:
fail-fast: false
matrix:
include:
- label: Bundler tests
include: bundler/,transpiler/
- label: CLI tests
include: cli/
- label: Node tests
include: js/node/
- label: Bun tests
include: js/bun/
- label: Web tests
include: js/web/
- label: Integration tests
include: integration/,third_party/
- label: Other tests
exclude: bundler/,transpiler/,cli/,integration/,third_party/,js/node/,js/bun/,js/web/
steps:
- if: ${{ runner.os == 'Windows' }}
name: Setup Git
run: |
git config --global core.autocrlf false
git config --global core.eol lf
git config --system core.autocrlf false
git config --system core.eol lf
git config --system core.longpaths true
- name: Checkout
uses: actions/checkout@v4
with:
@@ -37,188 +56,122 @@ jobs:
package.json
bun.lockb
test
packages/bun-internal-test
packages/bun-types
- name: Setup Environment
shell: bash
run: |
echo "${{ inputs.pr-number }}" > pr-number.txt
- name: Download Bun
uses: actions/download-artifact@v4
scripts
.github
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
name: bun-${{ inputs.tag }}
path: bun
github-token: ${{ github.token }}
run-id: ${{ inputs.run-id || github.run_id }}
- name: Download pnpm
uses: pnpm/action-setup@v4
bun-version: latest
baseline: ${{ contains(inputs.tag, '-baseline') }}
- id: setup-release
if: ${{ inputs.release-name }}
name: Setup Bun from Release
uses: ./.github/actions/setup-bun
with:
version: 8
- if: ${{ runner.os != 'Windows' }}
name: Setup Bun
shell: bash
run: |
unzip bun/bun-*.zip
cd bun-*
pwd >> $GITHUB_PATH
- if: ${{ runner.os == 'Windows' }}
name: Setup Cygwin
uses: secondlife/setup-cygwin@v3
bun-version: ${{ inputs.release-name }}
baseline: ${{ contains(inputs.tag, '-baseline') }}
add-to-path: false
- id: setup-github
if: ${{ inputs.github-id }}
name: Setup Bun from Github Actions
uses: ./.github/actions/setup-bun
with:
packages: bash
- if: ${{ runner.os == 'Windows' }}
name: Setup Bun (Windows)
run: |
unzip bun/bun-*.zip
cd bun-*
pwd >> $env:GITHUB_PATH
workflow-run-id: ${{ inputs.github-id }}
baseline: ${{ contains(inputs.tag, '-baseline') }}
add-to-path: false
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 20
- name: Install Dependencies
timeout-minutes: 5
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
bun install
- name: Install Dependencies (test)
timeout-minutes: 5
run: |
bun install --cwd test
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Install Dependencies (runner)
timeout-minutes: 5
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
bun install --cwd packages/bun-internal-test
node-version: 22
- name: Run Tests
id: test
timeout-minutes: 90
shell: bash
timeout-minutes: 30
env:
IS_BUN_CI: 1
TMPDIR: ${{ runner.temp }}
BUN_TAG: ${{ inputs.tag }}
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true"
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
TEST_INFO_STRIPE: ${{ secrets.TEST_INFO_STRIPE }}
TEST_INFO_AZURE_SERVICE_BUS: ${{ secrets.TEST_INFO_AZURE_SERVICE_BUS }}
SHELLOPTS: igncr
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
node packages/bun-internal-test/src/runner.node.mjs $(which bun)
- if: ${{ always() }}
name: Upload Results
uses: actions/upload-artifact@v4
with:
name: bun-${{ inputs.tag }}-tests
path: |
test-report.*
comment.md
pr-number.txt
if-no-files-found: error
overwrite: true
- if: ${{ always() && steps.test.outputs.failing_tests != '' && github.event.pull_request && github.repository_owner == 'oven-sh' }}
name: Send Message
uses: sarisia/actions-status-discord@v1
with:
webhook: ${{ secrets.DISCORD_WEBHOOK }}
nodetail: true
color: "#FF0000"
title: ""
description: |
### ❌ [${{ github.event.pull_request.title }}](${{ github.event.pull_request.html_url }})
node scripts/runner.node.mjs --exec-path "${{ steps.setup-github.outputs.bun-path || steps.setup-release.outputs.bun-path }}" --include "${{ matrix.include }}" --exclude "${{ matrix.exclude }}"
@${{ github.actor }}, there are ${{ steps.test.outputs.failing_tests_count || 'some' }} failing tests on bun-${{ inputs.tag }}.
${{ steps.test.outputs.failing_tests }}
**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})**
- name: Fail
if: ${{ failure() || always() && steps.test.outputs.failing_tests != '' }}
run: |
echo "There are ${{ steps.test.outputs.failing_tests_count || 'some' }} failing tests on bun-${{ inputs.tag }}."
exit 1
test-node:
name: Node.js Tests
# TODO: enable when we start paying attention to the results. In the meantime, this causes CI to queue jobs wasting developer time.
if: 0
runs-on: ${{ inputs.runs-on }}
steps:
- if: ${{ runner.os == 'Windows' }}
name: Setup Git
run: |
git config --global core.autocrlf false
git config --global core.eol lf
- name: Checkout
uses: actions/checkout@v4
with:
sparse-checkout: |
test/node.js
- name: Setup Environment
shell: bash
run: |
echo "${{ inputs.pr-number }}" > pr-number.txt
- name: Download Bun
uses: actions/download-artifact@v4
with:
name: bun-${{ inputs.tag }}
path: bun
github-token: ${{ github.token }}
run-id: ${{ inputs.run-id || github.run_id }}
- if: ${{ runner.os != 'Windows' }}
name: Setup Bun
shell: bash
run: |
unzip bun/bun-*.zip
cd bun-*
pwd >> $GITHUB_PATH
- if: ${{ runner.os == 'Windows' }}
name: Setup Cygwin
uses: secondlife/setup-cygwin@v3
with:
packages: bash
- if: ${{ runner.os == 'Windows' }}
name: Setup Bun (Windows)
run: |
unzip bun/bun-*.zip
cd bun-*
pwd >> $env:GITHUB_PATH
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 20
- name: Checkout Tests
shell: bash
working-directory: test/node.js
run: |
node runner.mjs --pull
- name: Install Dependencies
timeout-minutes: 5
shell: bash
working-directory: test/node.js
run: |
bun install
- name: Run Tests
timeout-minutes: 10 # Increase when more tests are added
shell: bash
working-directory: test/node.js
env:
TMPDIR: ${{ runner.temp }}
BUN_GARBAGE_COLLECTOR_LEVEL: "0"
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true"
run: |
node runner.mjs
- name: Upload Results
uses: actions/upload-artifact@v4
with:
name: bun-${{ inputs.tag }}-node-tests
path: |
test/node.js/summary/*.json
if-no-files-found: error
overwrite: true
# TODO: Enable when we start running the Node.js test suite
# test-node:
# name: Node.js Tests
# runs-on: ${{ inputs.runs-on }}
# steps:
# - if: ${{ runner.os == 'Windows' }}
# name: Setup Git
# run: |
# git config --global core.autocrlf false
# git config --global core.eol lf
# git config --system core.longpaths true
# - name: Checkout
# uses: actions/checkout@v4
# with:
# sparse-checkout: |
# test/node.js
# - name: Setup Environment
# shell: bash
# run: |
# echo "${{ inputs.pr-number }}" > pr-number.txt
# - name: Download Bun
# uses: actions/download-artifact@v4
# with:
# name: bun-${{ inputs.tag }}
# path: bun
# github-token: ${{ github.token }}
# run-id: ${{ inputs.run-id || github.run_id }}
# - if: ${{ runner.os != 'Windows' }}
# name: Setup Bun
# shell: bash
# run: |
# unzip bun/bun-*.zip
# cd bun-*
# pwd >> $GITHUB_PATH
# - if: ${{ runner.os == 'Windows' }}
# name: Setup Cygwin
# uses: secondlife/setup-cygwin@v3
# with:
# packages: bash
# - if: ${{ runner.os == 'Windows' }}
# name: Setup Bun (Windows)
# run: |
# unzip bun/bun-*.zip
# cd bun-*
# pwd >> $env:GITHUB_PATH
# - name: Setup Node.js
# uses: actions/setup-node@v4
# with:
# node-version: 20
# - name: Checkout Tests
# shell: bash
# working-directory: test/node.js
# run: |
# node runner.mjs --pull
# - name: Install Dependencies
# timeout-minutes: 5
# shell: bash
# working-directory: test/node.js
# run: |
# bun install
# - name: Run Tests
# timeout-minutes: 10 # Increase when more tests are added
# shell: bash
# working-directory: test/node.js
# env:
# TMPDIR: ${{ runner.temp }}
# BUN_GARBAGE_COLLECTOR_LEVEL: "0"
# BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true"
# run: |
# node runner.mjs
# - name: Upload Results
# uses: actions/upload-artifact@v4
# with:
# name: bun-${{ inputs.tag }}-node-tests
# path: |
# test/node.js/summary/*.json
# if-no-files-found: error
# overwrite: true

100
.github/workflows/test-bun.yml vendored Normal file
View File

@@ -0,0 +1,100 @@
name: Test Bun
permissions:
contents: read
actions: write
concurrency:
group: ${{ github.workflow }}-${{ inputs.workflow-run-id || inputs.buildkite-url || inputs.bun-version || github.ref }}
cancel-in-progress: true
on:
workflow_dispatch:
inputs:
workflow-run-id:
type: string
buildkite-url:
type: string
bun-version:
type: string
workflow_call:
inputs:
workflow-run-id:
type: string
buildkite-url:
type: string
bun-version:
type: string
jobs:
linux-x64-test:
name: Test linux-x64
uses: ./.github/workflows/run-test.yml
secrets: inherit
with:
runs-on: ubuntu-latest
tag: linux-x64
workflow-run-id: ${{ inputs.workflow-run-id }}
buildkite-url: ${{ inputs.buildkite-url }}
bun-version: ${{ inputs.bun-version }}
linux-x64-baseline-test:
name: Test linux-x64-baseline
uses: ./.github/workflows/run-test.yml
secrets: inherit
with:
runs-on: ubuntu-latest
tag: linux-x64-baseline
workflow-run-id: ${{ inputs.workflow-run-id }}
buildkite-url: ${{ inputs.buildkite-url }}
bun-version: ${{ inputs.bun-version }}
linux-aarch64-test:
if: ${{ github.repository_owner == 'oven-sh' }}
name: Test linux-aarch64
uses: ./.github/workflows/run-test.yml
secrets: inherit
with:
runs-on: namespace-profile-bun-ci-linux-aarch64
tag: linux-aarch64
workflow-run-id: ${{ inputs.workflow-run-id }}
buildkite-url: ${{ inputs.buildkite-url }}
bun-version: ${{ inputs.bun-version }}
darwin-x64-test:
name: Test darwin-x64
uses: ./.github/workflows/run-test.yml
secrets: inherit
with:
runs-on: macos-12
tag: darwin-x64
workflow-run-id: ${{ inputs.workflow-run-id }}
buildkite-url: ${{ inputs.buildkite-url }}
bun-version: ${{ inputs.bun-version }}
darwin-aarch64-test:
name: Test darwin-aarch64
uses: ./.github/workflows/run-test.yml
secrets: inherit
with:
runs-on: macos-12
tag: darwin-aarch64
workflow-run-id: ${{ inputs.workflow-run-id }}
buildkite-url: ${{ inputs.buildkite-url }}
bun-version: ${{ inputs.bun-version }}
windows-x64-test:
name: Test windows-x64
uses: ./.github/workflows/run-test.yml
secrets: inherit
with:
runs-on: windows-latest
tag: windows-x64
workflow-run-id: ${{ inputs.workflow-run-id }}
buildkite-url: ${{ inputs.buildkite-url }}
bun-version: ${{ inputs.bun-version }}
windows-x64-baseline-test:
name: Test windows-x64-baseline
uses: ./.github/workflows/run-test.yml
secrets: inherit
with:
runs-on: windows-latest
tag: windows-x64-baseline
workflow-run-id: ${{ inputs.workflow-run-id }}
buildkite-url: ${{ inputs.buildkite-url }}
bun-version: ${{ inputs.bun-version }}

8
.vscode/launch.json generated vendored
View File

@@ -445,8 +445,8 @@
"request": "launch",
"name": "bun test [*] (ci)",
"program": "node",
"args": ["src/runner.node.mjs"],
"cwd": "${workspaceFolder}/packages/bun-internal-test",
"args": ["test/runner.node.mjs"],
"cwd": "${workspaceFolder}",
"console": "internalConsole",
},
// Windows: bun test [file]
@@ -1093,8 +1093,8 @@
"request": "launch",
"name": "Windows: bun test [*] (ci)",
"program": "node",
"args": ["src/runner.node.mjs"],
"cwd": "${workspaceFolder}/packages/bun-internal-test",
"args": ["test/runner.node.mjs"],
"cwd": "${workspaceFolder}",
"console": "internalConsole",
},
],

View File

@@ -466,8 +466,9 @@ if(USE_UNIFIED_SOURCES)
endif()
# CCache
find_program(CCACHE_PROGRAM sccache)
find_program(CCACHE_PROGRAM ccache)
#find_program(CCACHE_PROGRAM buildcache) # https://gitlab.com/bits-n-bites/buildcache
#find_program(CCACHE_PROGRAM sccache) # https://github.com/mozilla/sccache
#find_program(CCACHE_PROGRAM ccache) # https://ccache.dev/
if(CCACHE_PROGRAM)
set(CMAKE_CXX_COMPILER_LAUNCHER "${CCACHE_PROGRAM}")
@@ -614,7 +615,7 @@ set(BUN_DEPS_DIR "${BUN_SRC}/deps")
set(BUN_CODEGEN_SRC "${BUN_SRC}/codegen")
if(NOT BUN_DEPS_OUT_DIR)
set(BUN_DEPS_OUT_DIR "${BUN_DEPS_DIR}")
set(BUN_DEPS_OUT_DIR "${CMAKE_CURRENT_SOURCE_DIR}/build/bun-deps")
endif()
set(BUN_RAW_SOURCES, "")

View File

@@ -129,7 +129,7 @@ SED = $(shell which gsed 2>/dev/null || which sed 2>/dev/null)
BUN_DIR ?= $(shell dirname $(realpath $(firstword $(MAKEFILE_LIST))))
BUN_DEPS_DIR ?= $(shell pwd)/src/deps
BUN_DEPS_OUT_DIR ?= $(BUN_DEPS_DIR)
BUN_DEPS_OUT_DIR ?= $(shell pwd)/build/bun-deps
CPU_COUNT = 2
ifeq ($(OS_NAME),darwin)
CPU_COUNT = $(shell sysctl -n hw.logicalcpu)

BIN
bun.lockb

Binary file not shown.

View File

@@ -5,23 +5,23 @@
"./packages/bun-types"
],
"dependencies": {
"@vscode/debugadapter": "^1.61.0",
"esbuild": "^0.17.15",
"eslint": "^8.20.0",
"eslint-config-prettier": "^8.5.0",
"mitata": "^0.1.3",
"@vscode/debugadapter": "^1.65.0",
"esbuild": "^0.21.4",
"eslint": "^9.4.0",
"eslint-config-prettier": "^9.1.0",
"mitata": "^0.1.11",
"peechy": "0.4.34",
"prettier": "^3.2.5",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"source-map-js": "^1.0.2",
"typescript": "^5.0.2"
"react": "^18.3.1",
"react-dom": "^18.3.1",
"source-map-js": "^1.2.0",
"typescript": "^5.4.5"
},
"devDependencies": {
"@types/bun": "^1.1.2",
"@types/react": "^18.0.25",
"@typescript-eslint/eslint-plugin": "^5.31.0",
"@typescript-eslint/parser": "^5.31.0"
"@types/bun": "^1.1.3",
"@types/react": "^18.3.3",
"@typescript-eslint/eslint-plugin": "^7.11.0",
"@typescript-eslint/parser": "^7.11.0"
},
"resolutions": {
"bun-types": "workspace:packages/bun-types"
@@ -43,8 +43,8 @@
"fmt:zig": "zig fmt src/*.zig src/*/*.zig src/*/*/*.zig src/*/*/*/*.zig",
"lint": "eslint './**/*.d.ts' --cache",
"lint:fix": "eslint './**/*.d.ts' --cache --fix",
"test": "node packages/bun-internal-test/src/runner.node.mjs ./build/bun-debug",
"test:release": "node packages/bun-internal-test/src/runner.node.mjs ./build-release/bun",
"test": "node scripts/runner.node.mjs ./build/bun-debug",
"test:release": "node scripts/runner.node.mjs ./build-release/bun",
"zig-check": ".cache/zig/zig.exe build check --summary new",
"zig-check-all": ".cache/zig/zig.exe build check-all --summary new",
"zig": ".cache/zig/zig.exe "

View File

@@ -1,606 +0,0 @@
import * as action from "@actions/core";
import { spawn, spawnSync } from "child_process";
import { rmSync, writeFileSync, readFileSync, mkdirSync, openSync, closeSync } from "fs";
import { readdirSync } from "node:fs";
import { resolve, basename } from "node:path";
import { cpus, hostname, tmpdir, totalmem, userInfo } from "os";
import { join, normalize, posix, relative } from "path";
import { fileURLToPath } from "url";
import PQueue from "p-queue";
const run_start = new Date();
const TIMEOUT_DURATION = 1000 * 60 * 5;
const SHORT_TIMEOUT_DURATION = Math.ceil(TIMEOUT_DURATION / 5);
function defaultConcurrency() {
// This causes instability due to the number of open file descriptors / sockets in some tests
// Windows has higher limits
if (process.platform !== "win32") {
return 1;
}
return Math.min(Math.floor((cpus().length - 2) / 2), 2);
}
const windows = process.platform === "win32";
const nativeMemory = totalmem();
const force_ram_size_input = parseInt(process.env["BUN_JSC_forceRAMSize"] || "0", 10);
let force_ram_size = Number(BigInt(nativeMemory) >> BigInt(2)) + "";
if (!(Number.isSafeInteger(force_ram_size_input) && force_ram_size_input > 0)) {
force_ram_size = force_ram_size_input + "";
}
function uncygwinTempDir() {
if (process.platform === "win32") {
for (let key of ["TMPDIR", "TEMP", "TEMPDIR", "TMP"]) {
let TMPDIR = process.env[key] || "";
if (!/^\/[a-zA-Z]\//.test(TMPDIR)) {
continue;
}
const driveLetter = TMPDIR[1];
TMPDIR = path.win32.normalize(`${driveLetter.toUpperCase()}:` + TMPDIR.substring(2));
process.env[key] = TMPDIR;
}
}
}
uncygwinTempDir();
const cwd = resolve(fileURLToPath(import.meta.url), "../../../../");
process.chdir(cwd);
const ci = !!process.env["GITHUB_ACTIONS"];
const enableProgressBar = false;
const dirPrefix = "bun-test-tmp-" + ((Math.random() * 100_000_0) | 0).toString(36) + "_";
const run_concurrency = Math.max(Number(process.env["BUN_TEST_CONCURRENCY"] || defaultConcurrency(), 10), 1);
const queue = new PQueue({ concurrency: run_concurrency });
var prevTmpdir = "";
function maketemp() {
prevTmpdir = join(
tmpdir(),
dirPrefix + (Date.now() | 0).toString() + "_" + ((Math.random() * 100_000_0) | 0).toString(36),
);
mkdirSync(prevTmpdir, { recursive: true });
return prevTmpdir;
}
const extensions = [".js", ".ts", ".jsx", ".tsx", ".mjs", ".cjs", ".mts", ".cts", ".mjsx", ".cjsx", ".mtsx", ".ctsx"];
const git_sha =
process.env["GITHUB_SHA"] ?? spawnSync("git", ["rev-parse", "HEAD"], { encoding: "utf-8" }).stdout.trim();
const TEST_FILTER = process.env.BUN_TEST_FILTER;
function isTest(path) {
if (!basename(path).includes(".test.") || !extensions.some(ext => path.endsWith(ext))) {
return false;
}
if (TEST_FILTER) {
if (!path.includes(TEST_FILTER)) {
return false;
}
}
return true;
}
function* findTests(dir, query) {
for (const entry of readdirSync(resolve(dir), { encoding: "utf-8", withFileTypes: true })) {
const path = resolve(dir, entry.name);
if (entry.isDirectory() && entry.name !== "node_modules" && entry.name !== ".git") {
yield* findTests(path, query);
} else if (isTest(path)) {
yield path;
}
}
}
let bunExe = "bun";
if (process.argv.length > 2) {
bunExe = resolve(process.argv.at(-1));
} else if (process.env.BUN_PATH) {
const { BUN_PATH_BASE, BUN_PATH } = process.env;
bunExe = resolve(normalize(BUN_PATH_BASE), normalize(BUN_PATH));
}
const { error, stdout: revision_stdout } = spawnSync(bunExe, ["--revision"], {
env: { ...process.env, BUN_DEBUG_QUIET_LOGS: 1 },
});
if (error) {
if (error.code !== "ENOENT") throw error;
console.error(`\x1b[31merror\x1b[0;2m:\x1b[0m Could not find Bun executable at '${bunExe}'`);
process.exit(1);
}
const revision = revision_stdout.toString().trim();
const { error: error2, stdout: argv0_stdout } = spawnSync(bunExe, ["-e", "console.log(process.argv[0])"], {
env: { ...process.env, BUN_DEBUG_QUIET_LOGS: 1 },
});
if (error2) throw error2;
const argv0 = argv0_stdout.toString().trim();
console.log(`Testing ${argv0} v${revision}`);
const ntStatusPath = "C:\\Program Files (x86)\\Windows Kits\\10\\Include\\10.0.22621.0\\shared\\ntstatus.h";
let ntstatus_header_cache = null;
function lookupWindowsError(code) {
if (ntstatus_header_cache === null) {
try {
ntstatus_header_cache = readFileSync(ntStatusPath, "utf-8");
} catch {
console.error(`could not find ntstatus.h to lookup error code: ${ntStatusPath}`);
ntstatus_header_cache = "";
}
}
const match = ntstatus_header_cache.match(new RegExp(`(STATUS_\\w+).*0x${code.toString(16)}`, "i"));
if (match) {
return match[1];
}
return null;
}
const failing_tests = [];
const passing_tests = [];
let maxFd = -1;
function getMaxFileDescriptor(path) {
if (process.platform === "win32") {
return -1;
}
hasInitialMaxFD = true;
if (process.platform === "linux" || process.platform === "darwin") {
try {
readdirSync(process.platform === "darwin" ? "/dev/fd" : "/proc/self/fd").forEach(name => {
const fd = parseInt(name.trim(), 10);
if (Number.isSafeInteger(fd) && fd >= 0) {
maxFd = Math.max(maxFd, fd);
}
});
return maxFd;
} catch {}
}
const devnullfd = openSync("/dev/null", "r");
closeSync(devnullfd);
maxFd = devnullfd + 1;
return maxFd;
}
let hasInitialMaxFD = false;
const activeTests = new Map();
let slowTestCount = 0;
function checkSlowTests() {
const now = Date.now();
const prevSlowTestCount = slowTestCount;
slowTestCount = 0;
for (const [path, { start, proc }] of activeTests) {
if (proc && now - start >= TIMEOUT_DURATION) {
console.error(
`\x1b[31merror\x1b[0;2m:\x1b[0m Killing test ${JSON.stringify(path)} after ${Math.ceil((now - start) / 1000)}s`,
);
proc?.stdout?.destroy?.();
proc?.stderr?.destroy?.();
proc?.kill?.(9);
} else if (now - start > SHORT_TIMEOUT_DURATION) {
console.error(
`\x1b[33mwarning\x1b[0;2m:\x1b[0m Test ${JSON.stringify(path)} has been running for ${Math.ceil(
(now - start) / 1000,
)}s`,
);
slowTestCount++;
}
}
if (slowTestCount > prevSlowTestCount && queue.concurrency > 1) {
queue.concurrency += 1;
}
}
setInterval(checkSlowTests, SHORT_TIMEOUT_DURATION).unref();
var currentTestNumber = 0;
async function runTest(path) {
const pathOnDisk = resolve(path);
const thisTestNumber = currentTestNumber++;
const testFileName = posix.normalize(relative(cwd, path).replaceAll("\\", "/"));
let exitCode, signal, err, output;
const start = Date.now();
const activeTestObject = { start, proc: undefined };
activeTests.set(testFileName, activeTestObject);
try {
await new Promise((finish, reject) => {
const chunks = [];
process.stderr.write(
`
at ${((start - run_start.getTime()) / 1000).toFixed(2)}s, file ${thisTestNumber
.toString()
.padStart(total.toString().length, "0")}/${total}, ${failing_tests.length} failing files
Starting "${testFileName}"
`,
);
const TMPDIR = maketemp();
const proc = spawn(bunExe, ["test", pathOnDisk], {
stdio: ["ignore", "pipe", "pipe"],
env: {
...process.env,
FORCE_COLOR: "1",
BUN_GARBAGE_COLLECTOR_LEVEL: "1",
BUN_JSC_forceRAMSize: force_ram_size,
BUN_RUNTIME_TRANSPILER_CACHE_PATH: "0",
GITHUB_ACTIONS: process.env.GITHUB_ACTIONS ?? "true",
BUN_DEBUG_QUIET_LOGS: "1",
BUN_INSTALL_CACHE_DIR: join(TMPDIR, ".bun-install-cache"),
BUN_ENABLE_CRASH_REPORTING: "1",
[windows ? "TEMP" : "TMPDIR"]: TMPDIR,
},
});
activeTestObject.proc = proc;
proc.stdout.once("end", () => {
done();
});
let doneCalls = 0;
var done = () => {
// TODO: wait for stderr as well
// spawn.test currently causes it to hang
if (doneCalls++ === 1) {
actuallyDone();
}
};
var actuallyDone = function () {
actuallyDone = done = () => {};
proc?.stderr?.unref?.();
proc?.stdout?.unref?.();
proc?.unref?.();
output = Buffer.concat(chunks).toString();
finish();
};
// if (!KEEP_TMPDIR)
// proc.once("close", () => {
// rm(TMPDIR, { recursive: true, force: true }).catch(() => {});
// });
proc.stdout.on("data", chunk => {
chunks.push(chunk);
if (run_concurrency === 1) process.stdout.write(chunk);
});
proc.stderr.on("data", chunk => {
chunks.push(chunk);
if (run_concurrency === 1) process.stderr.write(chunk);
});
proc.once("close", () => {
activeTestObject.proc = undefined;
});
proc.once("exit", (code_, signal_) => {
activeTestObject.proc = undefined;
exitCode = code_;
signal = signal_;
if (signal || exitCode !== 0) {
actuallyDone();
} else {
done();
}
});
proc.once("error", err_ => {
activeTestObject.proc = undefined;
err = err_;
actuallyDone();
});
});
} finally {
activeTests.delete(testFileName);
}
if (!hasInitialMaxFD) {
getMaxFileDescriptor();
} else if (maxFd > 0) {
const prevMaxFd = maxFd;
maxFd = getMaxFileDescriptor();
if (maxFd > prevMaxFd + queue.concurrency * 2) {
process.stderr.write(
`\n\x1b[31mewarn\x1b[0;2m:\x1b[0m file descriptor leak in ${testFileName}, delta: ${
maxFd - prevMaxFd
}, current: ${maxFd}, previous: ${prevMaxFd}\n`,
);
}
}
const passed = exitCode === 0 && !err && !signal;
let reason = "";
if (!passed) {
let match;
if (err && err.message.includes("timed")) {
reason = "hang";
} else if ((match = output && output.match(/thread \d+ panic: (.*)\n/))) {
reason = 'panic "' + match[1] + '"';
} else if (err) {
reason = (err.name || "Error") + ": " + err.message;
} else if (signal) {
reason = signal;
} else if (exitCode === 1) {
const failMatch = output.match(/\x1b\[31m\s(\d+) fail/);
if (failMatch) {
reason = failMatch[1] + " failing";
} else {
reason = "code 1";
}
} else {
const x = windows && lookupWindowsError(exitCode);
if (x) {
if (x === "STATUS_BREAKPOINT") {
if (output.includes("Segmentation fault at address")) {
reason = "STATUS_ACCESS_VIOLATION";
}
}
reason = x;
} else {
reason = "code " + exitCode;
}
}
}
const duration = (Date.now() - start) / 1000;
if (run_concurrency !== 1 && enableProgressBar) {
// clear line
process.stdout.write("\x1b[2K\r");
}
console.log(
`\x1b[2m${formatTime(duration).padStart(6, " ")}\x1b[0m ${
passed ? "\x1b[32m✔" : "\x1b[31m✖"
} ${testFileName}\x1b[0m${reason ? ` (${reason})` : ""}`,
);
finished++;
if (run_concurrency !== 1 && enableProgressBar) {
writeProgressBar();
}
if (run_concurrency > 1 && ci) {
process.stderr.write(output);
}
if (!passed) {
failing_tests.push({ path: testFileName, reason, output });
process.exitCode = 1;
if (err) console.error(err);
} else {
passing_tests.push(testFileName);
}
return passed;
}
var finished = 0;
function writeProgressBar() {
const barWidth = Math.min(process.stdout.columns || 40, 80) - 2;
const percent = (finished / total) * 100;
const bar = "=".repeat(Math.floor(percent / 2));
const str1 = `[${finished}/${total}] [${bar}`;
process.stdout.write(`\r${str1}${" ".repeat(barWidth - str1.length)}]`);
}
const allTests = [...findTests(resolve(cwd, "test"))];
console.log(`Starting ${allTests.length} tests with ${run_concurrency} concurrency...`);
let total = allTests.length;
for (const path of allTests) {
queue.add(
async () =>
await runTest(path).catch(e => {
console.error("Bug in bun-internal-test");
console.error(e);
process.exit(1);
}),
);
}
await queue.onIdle();
console.log(`
Completed ${total} tests with ${failing_tests.length} failing tests
`);
console.log("\n");
function linkToGH(linkTo) {
return `https://github.com/oven-sh/bun/blob/${git_sha}/${linkTo}`;
}
failing_tests.sort((a, b) => a.path.localeCompare(b.path));
passing_tests.sort((a, b) => a.localeCompare(b));
const failingTestDisplay = failing_tests.map(({ path, reason }) => `- \`${path}\` ${reason}`).join("\n");
// const passingTestDisplay = passing_tests.map(path => `- \`${path}\``).join("\n");
rmSync("report.md", { force: true });
const uptime = process.uptime();
function formatTime(seconds) {
if (seconds < 60) {
return seconds.toFixed(1) + "s";
} else if (seconds < 60 * 60) {
return (seconds / 60).toFixed(0) + "m " + formatTime(seconds % 60);
} else {
return (seconds / 60 / 60).toFixed(0) + "h " + formatTime(seconds % (60 * 60));
}
}
const header = `
host: ${process.env["GITHUB_RUN_ID"] ? "GitHub Actions: " : ""}${userInfo().username}@${hostname()}
platform: ${process.platform} ${process.arch}
bun: ${argv0}
version: v${revision}
date: ${run_start.toISOString()}
duration: ${formatTime(uptime)}
total: ${total} files
failing: ${failing_tests.length} files
passing: ${passing_tests.length} files
percent: ${((passing_tests.length / total) * 100).toFixed(2)}%
`.trim();
console.log("\n" + "-".repeat(Math.min(process.stdout.columns || 40, 80)) + "\n");
console.log(header);
console.log("\n" + "-".repeat(Math.min(process.stdout.columns || 40, 80)) + "\n");
let report = `# bun test on ${
process.env["GITHUB_REF"] ??
spawnSync("git", ["rev-parse", "--abbrev-ref", "HEAD"], { encoding: "utf-8" }).stdout.trim()
}
\`\`\`
${header}
\`\`\`
`;
if (failingTestDisplay.length > 0) {
report += `## Failing tests\n\n`;
report += failingTestDisplay;
report += "\n\n";
}
// if(passingTestDisplay.length > 0) {
// report += `## Passing tests\n\n`;
// report += passingTestDisplay;
// report += "\n\n";
// }
if (failing_tests.length) {
report += `## Failing tests log output\n\n`;
for (const { path, output, reason } of failing_tests) {
report += `### ${path}\n\n`;
report += "[Link to file](" + linkToGH(path) + ")\n\n";
report += `${reason}\n\n`;
report += "```\n";
let failing_output = output
.replace(/\x1b\[[0-9;]*m/g, "")
.replace(/^::(group|endgroup|error|warning|set-output|add-matcher|remove-matcher).*$/gm, "");
if (failing_output.length > 1024 * 64) {
failing_output = failing_output.slice(0, 1024 * 64) + `\n\n[truncated output (length: ${failing_output.length})]`;
}
report += failing_output;
report += "```\n\n";
}
}
writeFileSync("test-report.md", report);
writeFileSync(
"test-report.json",
JSON.stringify({
failing_tests,
passing_tests,
}),
);
function mabeCapitalize(str) {
str = str.toLowerCase();
if (str.includes("arm64") || str.includes("aarch64")) {
return str.toUpperCase();
}
if (str.includes("x64")) {
return "x64";
}
if (str.includes("baseline")) {
return str;
}
return str[0].toUpperCase() + str.slice(1);
}
console.log("-> test-report.md, test-report.json");
function linkify(text, url) {
if (url?.startsWith?.("https://")) {
return `[${text}](${url})`;
}
return text;
}
if (ci) {
if (failing_tests.length > 0) {
action.setFailed(`${failing_tests.length} files with failing tests`);
}
action.setOutput("failing_tests", failingTestDisplay);
action.setOutput("failing_tests_count", failing_tests.length);
if (failing_tests.length) {
const { env } = process;
const tag = process.env.BUN_TAG || "unknown";
const url = `${env.GITHUB_SERVER_URL}/${env.GITHUB_REPOSITORY}/actions/runs/${env.GITHUB_RUN_ID}`;
let comment = `## ${linkify(`${emojiTag(tag)}${failing_tests.length} failing tests`, url)} ${tag
.split("-")
.map(mabeCapitalize)
.join(" ")}
${failingTestDisplay}
`;
writeFileSync("comment.md", comment);
}
let truncated_report = report;
if (truncated_report.length > 512 * 1000) {
truncated_report = truncated_report.slice(0, 512 * 1000) + "\n\n...truncated...";
}
action.summary.addRaw(truncated_report);
await action.summary.write();
}
function emojiTag(tag) {
let emojiText = "";
tag = tag.toLowerCase();
if (tag.includes("win32") || tag.includes("windows")) {
emojiText += "🪟";
}
if (tag.includes("linux")) {
emojiText += "🐧";
}
if (tag.includes("macos") || tag.includes("darwin")) {
emojiText += "";
}
if (tag.includes("x86") || tag.includes("x64") || tag.includes("_64") || tag.includes("amd64")) {
if (!tag.includes("linux")) {
emojiText += "💻";
} else {
emojiText += "🖥";
}
}
if (tag.includes("arm64") || tag.includes("aarch64")) {
emojiText += "💪";
}
if (emojiText) {
emojiText += " ";
}
return emojiText;
}
process.exit(failing_tests.length ? 1 : process.exitCode);

9
scripts/all-dependencies.ps1 Normal file → Executable file
View File

@@ -3,13 +3,14 @@ param(
)
$ErrorActionPreference = 'Stop'
. (Join-Path $PSScriptRoot "env.ps1")
if ($env:CI) {
& (Join-Path $PSScriptRoot "update-submodules.ps1")
}
$DidAnything = $false;
$BUN_BASE_DIR = if ($env:BUN_BASE_DIR) { $env:BUN_BASE_DIR } else { Join-Path $PSScriptRoot '..' }
$BUN_DEPS_DIR = if ($env:BUN_DEPS_DIR) { $env:BUN_DEPS_DIR } else { Join-Path $BUN_BASE_DIR 'src\deps' }
$BUN_DEPS_OUT_DIR = if ($env:BUN_DEPS_OUT_DIR) { $env:BUN_DEPS_OUT_DIR } else { $BUN_DEPS_DIR }
function Build-Dependency {
param(
$Script,

View File

@@ -1,6 +1,11 @@
#!/usr/bin/env bash
set -euo pipefail
set -eo pipefail
source "$(dirname -- "${BASH_SOURCE[0]}")/env.sh"
if [[ "$CI" ]]; then
$(dirname -- "${BASH_SOURCE[0]}")/update-submodules.sh
fi
FORCE=
while getopts "f" opt; do
@@ -19,16 +24,35 @@ while getopts "f" opt; do
done
BUILT_ANY=0
SUBMODULES=
CACHE_DIR=
CACHE=0
if [ -n "$BUN_DEPS_CACHE_DIR" ]; then
CACHE_DIR="$BUN_DEPS_CACHE_DIR"
CACHE=1
SUBMODULES="$(git submodule status)"
fi
dep() {
local script="$1"
local submodule="$1"
local script="$2"
CACHE_KEY=
if [ "$CACHE" == "1" ]; then
CACHE_KEY="$submodule/$(echo "$SUBMODULES" | grep "$submodule" | git hash-object --stdin)"
fi
if [ -z "$FORCE" ]; then
HAS_ALL_DEPS=1
shift
for lib in "$@"; do
for lib in "${@:2}"; do
if [ ! -f "$BUN_DEPS_OUT_DIR/$lib" ]; then
HAS_ALL_DEPS=0
break
if [[ "$CACHE" == "1" && -f "$CACHE_DIR/$CACHE_KEY/$lib" ]]; then
mkdir -p "$BUN_DEPS_OUT_DIR"
cp "$CACHE_DIR/$CACHE_KEY/$lib" "$BUN_DEPS_OUT_DIR/$lib"
printf "%s %s - already cached\n" "$script" "$lib"
else
HAS_ALL_DEPS=0
break
fi
fi
done
if [ "$HAS_ALL_DEPS" == "1" ]; then
@@ -41,27 +65,34 @@ dep() {
set +e
bash "$SCRIPT_DIR/build-$script.sh"
EXIT=$?
set -e
if [ "$EXIT" -ne 0 ]; then
printf "Failed to build %s\n" "$script"
exit "$EXIT"
fi
set -e
if [ "$CACHE" == "1" ]; then
mkdir -p "$CACHE_DIR/$CACHE_KEY"
for lib in "${@:2}"; do
cp "$BUN_DEPS_OUT_DIR/$lib" "$CACHE_DIR/$CACHE_KEY/$lib"
printf "%s %s - cached\n" "$script" "$lib"
done
fi
BUILT_ANY=1
}
dep boringssl libcrypto.a libssl.a libdecrepit.a
dep cares libcares.a
dep libarchive libarchive.a
dep lolhtml liblolhtml.a
dep mimalloc-debug libmimalloc-debug.a libmimalloc-debug.o
dep mimalloc libmimalloc.a libmimalloc.o
dep tinycc libtcc.a
dep zlib libz.a
dep zstd libzstd.a
dep lshpack liblshpack.a
dep boringssl boringssl libcrypto.a libssl.a libdecrepit.a
dep c-ares cares libcares.a
dep libarchive libarchive libarchive.a
dep lol-html lolhtml liblolhtml.a
dep mimalloc mimalloc-debug libmimalloc-debug.a libmimalloc-debug.o
dep mimalloc mimalloc libmimalloc.a libmimalloc.o
dep tinycc tinycc libtcc.a
dep zlib zlib libz.a
dep zstd zstd libzstd.a
dep ls-hpack lshpack liblshpack.a
if [ "$BUILT_ANY" -eq 0 ]; then
printf "(run with -f to rebuild)\n"

0
scripts/build-boringssl.ps1 Normal file → Executable file
View File

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env bash
set -euxo pipefail
set -exo pipefail
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
cd $BUN_DEPS_DIR/boringssl

29
scripts/build-bun-cpp.ps1 Executable file
View File

@@ -0,0 +1,29 @@
param (
[switch] $Baseline = $False,
[switch] $Fast = $False
)
$ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash
$Tag = If ($Baseline) { "-Baseline" } Else { "" }
$UseBaselineBuild = If ($Baseline) { "ON" } Else { "OFF" }
$UseLto = If ($Fast) { "OFF" } Else { "ON" }
# $CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
$CANARY_REVISION = 0
.\scripts\env.ps1 $Tag
.\scripts\update-submodules.ps1
.\scripts\build-libuv.ps1 -CloneOnly $True
cd build
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release `
-DNO_CODEGEN=0 `
-DNO_CONFIGURE_DEPENDS=1 `
"-DUSE_BASELINE_BUILD=${UseBaselineBuild}" `
"-DUSE_LTO=${UseLto}" `
"-DCANARY=${CANARY_REVISION}" `
-DBUN_CPP_ONLY=1 $Flags
if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" }
.\compile-cpp-only.ps1 -v
if ($LASTEXITCODE -ne 0) { throw "C++ compilation failed" }

48
scripts/build-bun-cpp.sh Executable file
View File

@@ -0,0 +1,48 @@
#!/usr/bin/env bash
set -exo pipefail
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
export USE_LTO="${USE_LTO:-ON}"
case "$(uname -m)" in
aarch64|arm64)
export CPU_TARGET="${CPU_TARGET:-native}"
;;
*)
export CPU_TARGET="${CPU_TARGET:-haswell}"
;;
esac
while [[ $# -gt 0 ]]; do
case "$1" in
--fast|--no-lto)
export USE_LTO="OFF"
shift
;;
--baseline)
export CPU_TARGET="nehalem"
shift
;;
--cpu)
export CPU_TARGET="$2"
shift
shift
;;
*|-*|--*)
echo "Unknown option $1"
exit 1
;;
esac
done
mkdir -p build
cd build
mkdir -p tmp_modules tmp_functions js codegen
cmake .. \
-GNinja \
-DCMAKE_BUILD_TYPE=Release \
-DUSE_LTO=${USE_LTO} \
-DCPU_TARGET=${CPU_TARGET} \
-DBUN_CPP_ONLY=1 \
-DNO_CONFIGURE_DEPENDS=1
chmod +x ./compile-cpp-only.sh
bash ./compile-cpp-only.sh -v

95
scripts/build-bun-zig.sh Executable file
View File

@@ -0,0 +1,95 @@
#!/usr/bin/env bash
set -exo pipefail
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
cwd=$(pwd)
zig=
if [[ "$CI" ]]; then
# Since the zig build depends on files from the zig submodule,
# make sure to update the submodule before building.
git submodule update --init --recursive --progress --depth=1 --checkout src/deps/zig
# Also update the correct version of zig in the submodule.
$(dirname -- "${BASH_SOURCE[0]}")/download-zig.sh
fi
if [ -f "$cwd/.cache/zig/zig" ]; then
zig="$cwd/.cache/zig/zig"
else
zig=$(which zig)
fi
ZIG_OPTIMIZE="${ZIG_OPTIMIZE:-ReleaseFast}"
CANARY="${CANARY:-0}"
GIT_SHA="${GIT_SHA:-$(git rev-parse HEAD)}"
BUILD_MACHINE_ARCH="${BUILD_MACHINE_ARCH:-$(uname -m)}"
DOCKER_MACHINE_ARCH=""
if [[ "$BUILD_MACHINE_ARCH" == "x86_64" || "$BUILD_MACHINE_ARCH" == "amd64" ]]; then
BUILD_MACHINE_ARCH="x86_64"
DOCKER_MACHINE_ARCH="amd64"
elif [[ "$BUILD_MACHINE_ARCH" == "aarch64" || "$BUILD_MACHINE_ARCH" == "arm64" ]]; then
BUILD_MACHINE_ARCH="aarch64"
DOCKER_MACHINE_ARCH="arm64"
fi
TARGET_OS="${1:-linux}"
TARGET_ARCH="${2:-x64}"
TARGET_CPU="${3:-${CPU_TARGET:-native}}"
BUILDARCH=""
if [[ "$TARGET_ARCH" == "x64" || "$TARGET_ARCH" == "x86_64" || "$TARGET_ARCH" == "amd64" ]]; then
TARGET_ARCH="x86_64"
BUILDARCH="amd64"
elif [[ "$TARGET_ARCH" == "aarch64" || "$TARGET_ARCH" == "arm64" ]]; then
TARGET_ARCH="aarch64"
BUILDARCH="arm64"
fi
TRIPLET=""
if [[ "$TARGET_OS" == "linux" ]]; then
TRIPLET="$TARGET_ARCH-linux-gnu"
elif [[ "$TARGET_OS" == "darwin" ]]; then
TRIPLET="$TARGET_ARCH-macos-none"
elif [[ "$TARGET_OS" == "windows" ]]; then
TRIPLET="$TARGET_ARCH-windows-msvc"
fi
echo "--- Building identifier-cache"
$zig run src/js_lexer/identifier_data.zig
echo "--- Building node-fallbacks"
cd src/node-fallbacks
bun install --frozen-lockfile
bun run build
cd "$cwd"
echo "--- Building codegen"
bun install --frozen-lockfile
make runtime_js fallback_decoder bun_error
echo "--- Building modules"
mkdir -p build
bun run src/codegen/bundle-modules.ts --debug=OFF build
echo "--- Building zig"
cd build
cmake .. \
-GNinja \
-DCMAKE_BUILD_TYPE=Release \
-DUSE_LTO=ON \
-DZIG_OPTIMIZE="${ZIG_OPTIMIZE}" \
-DGIT_SHA="${GIT_SHA}" \
-DARCH="${TARGET_ARCH}" \
-DBUILDARCH="${BUILDARCH}" \
-DCPU_TARGET="${TARGET_CPU}" \
-DZIG_TARGET="${TRIPLET}" \
-DASSERTIONS="OFF" \
-DWEBKIT_DIR="omit" \
-DNO_CONFIGURE_DEPENDS=1 \
-DNO_CODEGEN=1 \
-DBUN_ZIG_OBJ_DIR="$cwd/build" \
-DCANARY="$CANARY" \
-DZIG_LIB_DIR=src/deps/zig/lib
ONLY_ZIG=1 ninja "$cwd/build/bun-zig.o" -v

0
scripts/build-cares.ps1 Normal file → Executable file
View File

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env bash
set -euxo pipefail
set -exo pipefail
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
cd $BUN_DEPS_DIR/c-ares

0
scripts/build-libarchive.ps1 Normal file → Executable file
View File

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env bash
set -euxo pipefail
set -exo pipefail
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
mkdir -p $BUN_DEPS_OUT_DIR

0
scripts/build-libuv.ps1 Normal file → Executable file
View File

0
scripts/build-lolhtml.ps1 Normal file → Executable file
View File

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env bash
set -euxo pipefail
set -exo pipefail
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
cd $BUN_DEPS_DIR/lol-html/c-api

0
scripts/build-lshpack.ps1 Normal file → Executable file
View File

2
scripts/build-lshpack.sh Normal file → Executable file
View File

@@ -1,5 +1,5 @@
#!/usr/bin/env bash
set -euxo pipefail
set -exo pipefail
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
mkdir -p $BUN_DEPS_OUT_DIR

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env bash
set -euxo pipefail
set -exo pipefail
source "$(dirname -- "${BASH_SOURCE[0]}")/env.sh"
MIMALLOC_OVERRIDE_FLAG=${MIMALLOC_OVERRIDE_FLAG:-}

0
scripts/build-mimalloc.ps1 Normal file → Executable file
View File

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env bash
set -euxo pipefail
set -exo pipefail
source "$(dirname -- "${BASH_SOURCE[0]}")/env.sh"
MIMALLOC_OVERRIDE_FLAG=${MIMALLOC_OVERRIDE_FLAG:-}

0
scripts/build-tinycc.ps1 Normal file → Executable file
View File

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env bash
set -euxo pipefail
set -exo pipefail
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
mkdir -p $BUN_DEPS_OUT_DIR

0
scripts/build-zlib.ps1 Normal file → Executable file
View File

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env bash
set -euxo pipefail
set -exo pipefail
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
mkdir -p $BUN_DEPS_OUT_DIR

0
scripts/build-zstd.ps1 Normal file → Executable file
View File

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env bash
set -euxo pipefail
set -exo pipefail
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
mkdir -p $BUN_DEPS_OUT_DIR

0
scripts/build.ps1 Normal file → Executable file
View File

58
scripts/buildkite-link-bun.ps1 Executable file
View File

@@ -0,0 +1,58 @@
param (
[switch] $Baseline = $False,
[switch] $Fast = $False
)
$ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash
$Tag = If ($Baseline) { "bun-windows-x64-baseline" } Else { "bun-windows-x64" }
$TagSuffix = If ($Baseline) { "-Baseline" } Else { "" }
$UseBaselineBuild = If ($Baseline) { "ON" } Else { "OFF" }
$UseLto = If ($Fast) { "OFF" } Else { "ON" }
.\scripts\env.ps1 $TagSuffix
mkdir -Force build
buildkite-agent artifact download "**" build --step "${Tag}-build-zig"
buildkite-agent artifact download "**" build --step "${Tag}-build-cpp"
buildkite-agent artifact download "**" build --step "${Tag}-build-deps"
mv -Force -ErrorAction SilentlyContinue build\build\bun-deps\* build\bun-deps
mv -Force -ErrorAction SilentlyContinue build\build\* build
Set-Location build
$CANARY_REVISION = 0
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release `
-DNO_CODEGEN=1 `
-DNO_CONFIGURE_DEPENDS=1 `
"-DCPU_TARGET=${CPU_TARGET}" `
"-DCANARY=${CANARY_REVISION}" `
-DBUN_LINK_ONLY=1 `
"-DUSE_BASELINE_BUILD=${UseBaselineBuild}" `
"-DUSE_LTO=${UseLto}" `
"-DBUN_DEPS_OUT_DIR=$(Resolve-Path bun-deps)" `
"-DBUN_CPP_ARCHIVE=$(Resolve-Path bun-cpp-objects.a)" `
"-DBUN_ZIG_OBJ_DIR=$(Resolve-Path .)" `
"$Flags"
if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" }
ninja -v
if ($LASTEXITCODE -ne 0) { throw "Link failed!" }
ls
if ($UseLto -eq "OFF") {
$Tag = "$Tag-no-lto"
}
Set-Location ..
$Dist = mkdir -Force "${Tag}"
cp -r build\bun.exe "$Dist\bun.exe"
Compress-Archive -Force "$Dist" "${Dist}.zip"
$Dist = "$Dist-profile"
MkDir -Force "$Dist"
cp -r build\bun.exe "$Dist\bun.exe"
cp -r build\bun.pdb "$Dist\bun.pdb"
Compress-Archive -Force "$Dist" "$Dist.zip"
$env:BUN_GARBAGE_COLLECTOR_LEVEL = "1"
$env:BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING = "1"
.\build\bun.exe --print "JSON.stringify(require('bun:internal-for-testing').crash_handler.getFeatureData())" > .\features.json

80
scripts/buildkite-link-bun.sh Executable file
View File

@@ -0,0 +1,80 @@
#!/usr/bin/env bash
set -exo pipefail
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
export USE_LTO="${USE_LTO:-ON}"
case "$(uname -m)" in
aarch64|arm64)
export CPU_TARGET="${CPU_TARGET:-native}"
;;
*)
export CPU_TARGET="${CPU_TARGET:-haswell}"
;;
esac
export TAG=""
while [[ $# -gt 0 ]]; do
case "$1" in
--tag)
export TAG="$2"
shift
shift
;;
--fast|--no-lto)
export USE_LTO="OFF"
shift
;;
--baseline)
export CPU_TARGET="nehalem"
shift
;;
--cpu)
export CPU_TARGET="$2"
shift
shift
;;
*|-*|--*)
echo "Unknown option $1"
exit 1
;;
esac
done
if [[ -z "$TAG" ]]; then
echo "--tag <name> is required"
exit 1
fi
rm -rf release
mkdir -p release
buildkite-agent artifact download '**' release --step $TAG-build-deps
buildkite-agent artifact download '**' release --step $TAG-build-zig
buildkite-agent artifact download '**' release --step $TAG-build-cpp
cd release
cmake .. \
-GNinja \
-DCMAKE_BUILD_TYPE=Release \
-DCPU_TARGET=${CPU_TARGET} \
-DUSE_LTO=${USE_LTO} \
-DBUN_LINK_ONLY=1 \
-DBUN_ZIG_OBJ_DIR="$(pwd)/build" \
-DBUN_CPP_ARCHIVE="$(pwd)/build/bun-cpp-objects.a" \
-DBUN_DEPS_OUT_DIR="$(pwd)/build/bun-deps" \
-DNO_CONFIGURE_DEPENDS=1
ninja -v
if [[ "${USE_LTO}" == "OFF" ]]; then
TAG="${TAG}-no-lto"
fi
chmod +x bun-profile bun
mkdir -p bun-$TAG-profile/ bun-$TAG/
mv bun-profile bun-$TAG-profile/bun-profile
mv bun bun-$TAG/bun
zip -r bun-$TAG-profile.zip bun-$TAG-profile
zip -r bun-$TAG.zip bun-$TAG
cd ..
mv release/bun-$TAG.zip bun-$TAG.zip
mv release/bun-$TAG-profile.zip bun-$TAG-profile.zip

0
scripts/clean-dependencies.ps1 Normal file → Executable file
View File

0
scripts/download-webkit.ps1 Normal file → Executable file
View File

0
scripts/download-webkit.sh Normal file → Executable file
View File

2
scripts/download-zig.ps1 Normal file → Executable file
View File

@@ -23,7 +23,7 @@ try {
if (!(Test-Path $TarPath)) {
try {
Write-Host "-- Downloading Zig"
Invoke-WebRequest $Url -OutFile $TarPath
Invoke-RestMethod $Url -OutFile $TarPath
} catch {
Write-Error "Failed to fetch Zig from: $Url"
throw $_

0
scripts/download-zls.ps1 Normal file → Executable file
View File

36
scripts/env.ps1 Normal file → Executable file
View File

@@ -20,8 +20,12 @@ if ($env:VSINSTALLDIR -eq $null) {
}
$vsDir = (& $vswhere -prerelease -latest -property installationPath)
if ($vsDir -eq $null) {
throw "Visual Studio directory not found."
}
$vsDir = Get-ChildItem -Path "C:\Program Files\Microsoft Visual Studio\2022" -Directory
if ($vsDir -eq $null) {
throw "Visual Studio directory not found."
}
$vsDir = $vsDir.FullName;
}
Push-Location $vsDir
try {
Import-Module 'C:\Program Files (x86)\Microsoft Visual Studio\2022\BuildTools\Common7\Tools\Microsoft.VisualStudio.DevShell.dll'
@@ -41,7 +45,7 @@ $ENV:BUN_DEV_ENV_SET = "Baseline=$Baseline";
$BUN_BASE_DIR = if ($env:BUN_BASE_DIR) { $env:BUN_BASE_DIR } else { Join-Path $ScriptDir '..' }
$BUN_DEPS_DIR = if ($env:BUN_DEPS_DIR) { $env:BUN_DEPS_DIR } else { Join-Path $BUN_BASE_DIR 'src\deps' }
$BUN_DEPS_OUT_DIR = if ($env:BUN_DEPS_OUT_DIR) { $env:BUN_DEPS_OUT_DIR } else { $BUN_DEPS_DIR }
$BUN_DEPS_OUT_DIR = if ($env:BUN_DEPS_OUT_DIR) { $env:BUN_DEPS_OUT_DIR } else { Join-Path $BUN_BASE_DIR 'build\bun-deps' }
$CPUS = if ($env:CPUS) { $env:CPUS } else { (Get-CimInstance -Class Win32_Processor).NumberOfCores }
@@ -49,11 +53,12 @@ $CC = "clang-cl"
$CXX = "clang-cl"
$CFLAGS = '/O2'
# $CFLAGS = '/O2 /MT'
# $CFLAGS = '/O2 /Z7 /MT'
$CXXFLAGS = '/O2'
# $CXXFLAGS = '/O2 /MT'
# $CXXFLAGS = '/O2 /Z7 /MT'
$CPU_NAME = if ($Baseline) { "nehalem" } else { "haswell" };
$env:CPU_TARGET = $CPU_NAME
$CFLAGS += " -march=${CPU_NAME}"
$CXXFLAGS += " -march=${CPU_NAME}"
@@ -76,6 +81,25 @@ if ($Baseline) {
$CMAKE_FLAGS += "-DUSE_BASELINE_BUILD=ON"
}
$ccache=""
# if (Get-Command sccache -ErrorAction SilentlyContinue) {
# $ccache="sccache"
# $CMAKE_FLAGS += "-DCMAKE_MSVC_DEBUG_INFORMATION_FORMAT=Embedded"
# $CMAKE_FLAGS += "-DCMAKE_POLICY_CMP0141=NEW"
# # Continue with local compiler if sccache has an error
# $env:SCCACHE_IGNORE_SERVER_IO_ERROR = "1"
# }
if (Get-Command buildcache -ErrorAction SilentlyContinue) {
$ccache="buildcache"
}
if ($ccache) {
$CMAKE_FLAGS += "-DCMAKE_C_COMPILER_LAUNCHER=$ccache"
$CMAKE_FLAGS += "-DCMAKE_CXX_COMPILER_LAUNCHER=$ccache"
}
$null = New-Item -ItemType Directory -Force -Path $BUN_DEPS_OUT_DIR
function Run() {
@@ -99,4 +123,4 @@ function Run() {
if ($result -ne 0) {
throw "$command $commandArgs exited with code $result."
}
}
}

View File

@@ -1,14 +1,27 @@
#!/usr/bin/env bash
# hack for buildkite
if [[ "${CI:-}" == "1" || "${CI:-}" == "true" ]]; then
if [[ $(uname -s) == 'Darwin' ]]; then
export BUN_INSTALL="$HOME/.bun"
export PATH="$BUN_INSTALL/bin:$PATH"
export PATH="$(brew --prefix llvm@16)/bin:$PATH"
fi
if [ -f ~/.bashrc ]; then
source ~/.bashrc
fi
fi
# this is the environment script for building bun's dependencies
# it sets c compiler and flags
export SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd)
export BUN_BASE_DIR=${BUN_BASE_DIR:-$(cd $SCRIPT_DIR && cd .. && pwd)}
export BUN_DEPS_DIR=${BUN_DEPS_DIR:-$BUN_BASE_DIR/src/deps/}
export BUN_DEPS_OUT_DIR=${BUN_DEPS_OUT_DIR:-$BUN_BASE_DIR/src/deps/}
export BUN_DEPS_DIR=${BUN_DEPS_DIR:-$BUN_BASE_DIR/src/deps}
export BUN_DEPS_OUT_DIR=${BUN_DEPS_OUT_DIR:-$BUN_BASE_DIR/build/bun-deps}
# Silence a perl script warning
export LC_CTYPE="en_US.UTF-8"
export LC_ALL="en_US.UTF-8"
# export LC_CTYPE="en_US.UTF-8"
# export LC_ALL="en_US.UTF-8"
# this compiler detection could be better
export CC=${CC:-$(which clang-16 || which clang || which cc)}
@@ -23,20 +36,28 @@ export CFLAGS='-O3 -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidd
export CXXFLAGS='-O3 -fno-exceptions -fno-rtti -fvisibility=hidden -fvisibility-inlines-hidden -mno-omit-leaf-frame-pointer -fno-omit-frame-pointer'
export CMAKE_FLAGS=(
-DCMAKE_C_COMPILER="${CC}"
-DCMAKE_CXX_COMPILER="${CXX}"
-DCMAKE_C_FLAGS="$CFLAGS"
-DCMAKE_CXX_FLAGS="$CXXFLAGS"
-DCMAKE_BUILD_TYPE=Release
-DCMAKE_CXX_STANDARD=20
-DCMAKE_C_STANDARD=17
-DCMAKE_CXX_STANDARD_REQUIRED=ON
-DCMAKE_C_STANDARD_REQUIRED=ON
-DCMAKE_C_COMPILER="${CC}"
-DCMAKE_CXX_COMPILER="${CXX}"
-DCMAKE_C_FLAGS="$CFLAGS"
-DCMAKE_CXX_FLAGS="$CXXFLAGS"
-DCMAKE_BUILD_TYPE=Release
-DCMAKE_CXX_STANDARD=20
-DCMAKE_C_STANDARD=17
-DCMAKE_CXX_STANDARD_REQUIRED=ON
-DCMAKE_C_STANDARD_REQUIRED=ON
)
CCACHE=$(which ccache || which sccache || which buildcache || echo "")
if [ -n "$CCACHE" ]; then
CMAKE_FLAGS+=(
-DCMAKE_C_COMPILER_LAUNCHER="$CCACHE"
-DCMAKE_CXX_COMPILER_LAUNCHER="$CCACHE"
)
fi
if [[ $(uname -s) == 'Linux' ]]; then
# Ensure we always use -std=gnu++20 on Linux
export CMAKE_FLAGS+=(-DCMAKE_CXX_EXTENSIONS=ON)
# Ensure we always use -std=gnu++20 on Linux
CMAKE_FLAGS+=(-DCMAKE_CXX_EXTENSIONS=ON)
fi
if [[ $(uname -s) == 'Darwin' ]]; then
@@ -52,7 +73,10 @@ mkdir -p $BUN_DEPS_OUT_DIR
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
echo "C Compiler: ${CC}"
echo "C++ Compiler: ${CXX}"
if [ -n "$CCACHE" ]; then
echo "Ccache: ${CCACHE}"
fi
if [[ $(uname -s) == 'Darwin' ]]; then
echo "OSX Deployment Target: ${CMAKE_OSX_DEPLOYMENT_TARGET}"
echo "OSX Deployment Target: ${CMAKE_OSX_DEPLOYMENT_TARGET}"
fi
fi

300
scripts/experimental-build.mjs Executable file
View File

@@ -0,0 +1,300 @@
#! /usr/bin/env node
import {} from "node:fs/promises";
import { spawn, spawnSync } from "node:child_process";
import { copyFileSync, existsSync, mkdirSync, mkdtempSync, readFileSync, readdirSync, writeFileSync } from "node:fs";
import { basename, dirname, join } from "node:path";
import { tmpdir } from "node:os";
const projectPath = dirname(import.meta.dirname);
const vendorPath = process.env.BUN_VENDOR_PATH || join(projectPath, "vendor");
const isWindows = process.platform === "win32";
const isMacOS = process.platform === "darwin";
const isLinux = process.platform === "linux";
const spawnSyncTimeout = 1000 * 60;
const spawnTimeout = 1000 * 60 * 3;
async function spawnSafe(command, args, options = {}) {
const result = new Promise((resolve, reject) => {
let stdout = "";
let stderr = "";
let subprocess;
try {
subprocess = spawn(command, args, {
stdio: ["ignore", "pipe", "pipe"],
timeout: spawnTimeout,
...options,
});
subprocess.on("error", reject);
subprocess.on("exit", (exitCode, signalCode) => {
if (exitCode !== 0 || signalCode) {
const reason = signalCode || `code ${exitCode}`;
const cause = stderr || stdout;
reject(new Error(`Process exited with ${reason}`, { cause }));
} else {
resolve({ exitCode, signalCode, stdout, stderr });
}
});
subprocess?.stdout?.on("data", chunk => {
process.stdout.write(chunk);
stdout += chunk.toString("utf-8");
});
subprocess?.stderr?.on("data", chunk => {
process.stderr.write(chunk);
stderr += chunk.toString("utf-8");
});
} catch (cause) {
reject(cause);
}
});
try {
return await result;
} catch (cause) {
if (options.throwOnError === false) {
return;
}
const description = `${command} ${args.join(" ")}`;
throw new Error(`Command failed: ${description}`, { cause });
}
}
function spawnSyncSafe(command, args, options = {}) {
try {
const { error, status, signal, stdout, stderr } = spawnSync(command, args, {
stdio: ["ignore", "pipe", "pipe"],
encoding: "utf-8",
timeout: spawnSyncTimeout,
...options,
});
if (error) {
throw error;
}
if (signal || status !== 0) {
const reason = signal || `code ${status}`;
const cause = stderr || stdout;
throw new Error(`Process exited with ${reason}`, { cause });
}
return stdout;
} catch (cause) {
if (options.throwOnError === false) {
return;
}
const description = `${command} ${args.join(" ")}`;
throw new Error(`Command failed: ${description}`, { cause });
}
}
async function fetchSafe(url, options = {}) {
let response;
try {
response = await fetch(url, options);
if (!response.ok) {
const { status, statusText } = response;
const body = await response.text();
throw new Error(`${status} ${statusText}`, { cause: body });
}
switch (options.format) {
case "json":
return await response.json();
case "text":
return await response.text();
case "bytes":
return new Uint8Array(await response.arrayBuffer());
default:
return response;
}
} catch (cause) {
if (options.throwOnError === false) {
return response;
}
throw new Error(`Fetch failed: ${url}`, { cause });
}
}
/**
* @param {string} command
* @param {string} [path]
* @returns {string | undefined}
*/
function which(command, path) {
const cmd = isWindows ? "where" : "which";
const result = spawnSyncSafe(cmd, [command], {
throwOnError: false,
env: {
PATH: path || process.env.PATH,
},
});
if (!result) {
return;
}
if (isWindows) {
// On Windows, multiple paths can be returned from `where`.
for (const line of result.split("\r\n")) {
return line;
}
}
return result.trimEnd();
}
function getZigTarget(os = process.platform, arch = process.arch) {
if (arch === "x64") {
if (os === "linux") return "linux-x86_64";
if (os === "darwin") return "macos-x86_64";
if (os === "win32") return "windows-x86_64";
}
if (arch === "arm64") {
if (os === "linux") return "linux-aarch64";
if (os === "darwin") return "macos-aarch64";
}
throw new Error(`Unsupported zig target: os=${os}, arch=${arch}`);
}
function getRecommendedZigVersion() {
const scriptPath = join(projectPath, "build.zig");
try {
const scriptContent = readFileSync(scriptPath, "utf-8");
const match = scriptContent.match(/recommended_zig_version = "([^"]+)"/);
if (!match) {
throw new Error("File does not contain string: 'recommended_zig_version'");
}
return match[1];
} catch (cause) {
throw new Error("Failed to find recommended Zig version", { cause });
}
}
/**
* @returns {Promise<string>}
*/
async function getLatestZigVersion() {
try {
const response = await fetchSafe("https://ziglang.org/download/index.json", { format: "json" });
const { master } = response;
const { version } = master;
return version;
} catch (cause) {
throw new Error("Failed to get latest Zig version", { cause });
}
}
/**
* @param {string} execPath
* @returns {string | undefined}
*/
function getVersion(execPath) {
const args = /(?:zig)(?:\.exe)?/i.test(execPath) ? ["version"] : ["--version"];
const result = spawnSyncSafe(execPath, args, { throwOnError: false });
if (!result) {
return;
}
return result.trim();
}
/**
* @returns {string}
*/
function getTmpdir() {
if (isMacOS && existsSync("/tmp")) {
return "/tmp";
}
return tmpdir();
}
/**
* @returns {string}
*/
function mkTmpdir() {
return mkdtempSync(join(getTmpdir(), "bun-"));
}
/**
* @param {string} url
* @param {string} [path]
* @returns {Promise<string>}
*/
async function downloadFile(url, path) {
const outPath = path || join(mkTmpdir(), basename(url));
const bytes = await fetchSafe(url, { format: "bytes" });
mkdirSync(dirname(outPath), { recursive: true });
writeFileSync(outPath, bytes);
return outPath;
}
/**
* @param {string} tarPath
* @param {string} [path]
* @returns {Promise<string>}
*/
async function extractFile(tarPath, path) {
const outPath = path || join(mkTmpdir(), basename(tarPath));
mkdirSync(outPath, { recursive: true });
await spawnSafe("tar", ["-xf", tarPath, "-C", outPath, "--strip-components=1"]);
return outPath;
}
const dependencies = [
{
name: "zig",
version: getRecommendedZigVersion(),
download: downloadZig,
},
];
async function getDependencyPath(name) {
let dependency;
for (const entry of dependencies) {
if (name === entry.name) {
dependency = entry;
break;
}
}
if (!dependency) {
throw new Error(`Unknown dependency: ${name}`);
}
const { version, download } = dependency;
mkdirSync(vendorPath, { recursive: true });
for (const path of readdirSync(vendorPath)) {
if (!path.startsWith(name)) {
continue;
}
const dependencyPath = join(vendorPath, path);
const dependencyVersion = getVersion(dependencyPath);
if (dependencyVersion === version) {
return dependencyPath;
}
}
if (!download) {
throw new Error(`Dependency not found: ${name}`);
}
return await download(version);
}
/**
* @param {string} [version]
*/
async function downloadZig(version) {
const target = getZigTarget();
const expectedVersion = version || getRecommendedZigVersion();
const url = `https://ziglang.org/builds/zig-${target}-${expectedVersion}.tar.xz`;
const tarPath = await downloadFile(url);
const extractedPath = await extractFile(tarPath);
const zigPath = join(extractedPath, exePath("zig"));
const actualVersion = getVersion(zigPath);
const outPath = join(vendorPath, exePath(`zig-${actualVersion}`));
mkdirSync(dirname(outPath), { recursive: true });
copyFileSync(zigPath, outPath);
return outPath;
}
/**
* @param {string} path
* @returns {string}
*/
function exePath(path) {
return isWindows ? `${path}.exe` : path;
}
const execPath = await getDependencyPath("zig");
console.log(execPath);

View File

@@ -2,7 +2,7 @@
# this script is the magic script to configure your devenv for making a patch to WebKit
# once you are done with the patch you can run this again with --undo
# you can also run this with --danger-reset to force reset the submodule (danger)
set -euo pipefail
set -exo pipefail
cd "$(dirname "$0")/.."

0
scripts/internal-test.ps1 Normal file → Executable file
View File

0
scripts/make-old-js.ps1 Normal file → Executable file
View File

1682
scripts/runner.node.mjs Executable file

File diff suppressed because it is too large Load Diff

0
scripts/set-webkit-submodule-to-cmake.ps1 Normal file → Executable file
View File

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env bash
set -euo pipefail
set -exo pipefail
cd -- "$(dirname -- "${BASH_SOURCE[0]}")/.."

0
scripts/setup.ps1 Normal file → Executable file
View File

View File

@@ -10,5 +10,5 @@ if ! [ "$1" == '--webkit' ]; then
fi
fi
set -euxo pipefail
set -exo pipefail
git submodule update --init --recursive --progress --depth=1 --checkout $NAMES

View File

@@ -1,5 +1,5 @@
#!/bin/bash
set -euxo pipefail
set -exo pipefail
WEBKIT_VERSION=$(git rev-parse HEAD:./src/bun.js/WebKit)
MIMALLOC_VERSION=$(git rev-parse HEAD:./src/deps/mimalloc)

View File

@@ -1,8 +1,7 @@
// @ts-nocheck
import path from "path";
import type { Field, ClassDefinition } from "./class-definitions";
import { writeIfNotChanged } from "./helpers";
import { camelCase, pascalCase } from "change-case";
import { writeIfNotChanged, camelCase, pascalCase } from "./helpers";
if (process.env.BUN_SILENT === "1") {
console.log = () => {};

View File

@@ -105,3 +105,13 @@ export function pathToUpperSnakeCase(filepath: string) {
.join("_")
.toUpperCase();
}
export function camelCase(string: string) {
return string
.split(/[\s_]/)
.map((e, i) => (i ? e.charAt(0).toUpperCase() + e.slice(1).toLowerCase() : e.toLowerCase()));
}
export function pascalCase(string: string) {
return string.split(/[\s_]/).map((e, i) => (i ? e.charAt(0).toUpperCase() + e.slice(1) : e.toLowerCase()));
}

Binary file not shown.

View File

@@ -79,7 +79,7 @@ describe("Bun.build", () => {
test("rebuilding busts the directory entries cache", () => {
Bun.gc(true);
const { exitCode, stderr } = Bun.spawnSync({
cmd: [bunExe(), join(import.meta.dir, "bundler-reloader-script.ts")],
cmd: [bunExe(), join(import.meta.dir, "fixtures", "bundler-reloader-script.ts")],
env: bunEnv,
stderr: "pipe",
stdout: "inherit",
@@ -263,34 +263,6 @@ describe("Bun.build", () => {
expect(x.logs[0].position).toBeTruthy();
});
test("test bun target", async () => {
const x = await Bun.build({
entrypoints: [join(import.meta.dir, "./fixtures/trivial/bundle-ws.ts")],
target: "bun",
});
expect(x.success).toBe(true);
const [blob] = x.outputs;
const content = await blob.text();
// use bun's ws
expect(content).toContain('import {WebSocket} from "ws"');
expect(content).not.toContain("var websocket = __toESM(require_websocket(), 1);");
});
test("test node target, issue #3844", async () => {
const x = await Bun.build({
entrypoints: [join(import.meta.dir, "./fixtures/trivial/bundle-ws.ts")],
target: "node",
});
expect(x.success).toBe(true);
const [blob] = x.outputs;
const content = await blob.text();
expect(content).not.toContain('import {WebSocket} from "ws"');
// depends on the ws package in the test/node_modules.
expect(content).toContain("var websocket = __toESM(require_websocket(), 1);");
});
test("module() throws error", async () => {
expect(() =>
Bun.build({

View File

@@ -1,7 +1,6 @@
import assert from "assert";
import dedent from "dedent";
import { itBundled, testForFile } from "./expectBundled";
var { describe, test, expect } = testForFile(import.meta.path);
import { itBundled } from "./expectBundled";
import { describe, expect } from "bun:test";
describe("bundler", () => {
const nodePolyfillList = {

View File

@@ -1,9 +1,6 @@
import assert from "assert";
import dedent from "dedent";
import { ESBUILD, itBundled, testForFile } from "./expectBundled";
import { itBundled } from "./expectBundled";
import { Database } from "bun:sqlite";
import { isWindows } from "harness";
var { describe, test, expect } = testForFile(import.meta.path);
import { describe, expect } from "bun:test";
describe("bundler", () => {
itBundled("bun/embedded-sqlite-file", {
@@ -82,9 +79,8 @@ describe("bundler", () => {
run: {
exitCode: 1,
validate({ stderr }) {
assert(
stderr.startsWith(
`1 | // this file has comments and weird whitespace, intentionally
expect(stderr).toStartWith(
`1 | // this file has comments and weird whitespace, intentionally
2 | // to make it obvious if sourcemaps were generated and mapped properly
3 | if (true) code();
4 | function code() {
@@ -92,7 +88,6 @@ describe("bundler", () => {
6 | throw new
^
error: Hello World`,
) || void console.error(stderr),
);
expect(stderr).toInclude("entry.ts:6:19");
},

View File

@@ -1,7 +1,5 @@
import assert from "assert";
import dedent from "dedent";
import { itBundled, testForFile } from "./expectBundled";
var { describe, test, expect } = testForFile(import.meta.path);
import { itBundled } from "./expectBundled";
import { describe, expect } from "bun:test";
const fakeReactNodeModules = {
"/node_modules/react/index.js": /* js */ `

View File

@@ -1,9 +1,6 @@
import assert from "assert";
import dedent from "dedent";
import { ESBUILD, itBundled, testForFile } from "./expectBundled";
import { itBundled } from "./expectBundled";
import { Database } from "bun:sqlite";
import { fillRepeating } from "harness";
var { describe, test, expect } = testForFile(import.meta.path);
import { describe } from "bun:test";
describe("bundler", () => {
itBundled("compile/HelloWorld", {

View File

@@ -1,6 +1,5 @@
import assert from "assert";
import { itBundled, testForFile } from "./expectBundled";
var { describe, test, expect } = testForFile(import.meta.path);
import { itBundled } from "./expectBundled";
import { describe } from "bun:test";
const reflectMetadata = `
var Reflect2;

View File

@@ -1,8 +1,6 @@
import assert from "assert";
import dedent from "dedent";
import { sep, join } from "path";
import { itBundled, testForFile } from "./expectBundled";
var { describe, test, expect } = testForFile(import.meta.path);
import { join } from "node:path";
import { itBundled } from "./expectBundled";
import { describe, expect } from "bun:test";
describe("bundler", () => {
itBundled("edgecase/EmptyFile", {
@@ -37,7 +35,7 @@ describe("bundler", () => {
},
target: "bun",
run: {
stdout: `a${sep}b`,
stdout: join("a", "b"),
},
});
itBundled("edgecase/ImportStarFunction", {

View File

@@ -1,7 +1,5 @@
import assert from "assert";
import dedent from "dedent";
import { BundlerTestInput, itBundled, testForFile } from "./expectBundled";
var { describe, test, expect } = testForFile(import.meta.path);
import { BundlerTestInput, itBundled } from "./expectBundled";
import { describe, expect } from "bun:test";
const helpers = {
"/node_modules/bun-test-helpers/index.js": /* js */ `

View File

@@ -1,6 +1,6 @@
import { fileURLToPath, pathToFileURL } from "bun";
import { itBundled, testForFile } from "./expectBundled";
var { describe, test, expect } = testForFile(import.meta.path);
import { fileURLToPath } from "bun";
import { itBundled } from "./expectBundled";
import { describe } from "bun:test";
describe("bundler", async () => {
for (let target of ["bun", "node"] as const) {

View File

@@ -1,6 +1,5 @@
import assert from "assert";
import { itBundled, testForFile } from "./expectBundled";
var { describe, test, expect } = testForFile(import.meta.path);
import { itBundled } from "./expectBundled";
import { describe, expect } from "bun:test";
describe("bundler", () => {
itBundled("minify/TemplateStringFolding", {
@@ -122,7 +121,7 @@ describe("bundler", () => {
run: { stdout: "4 2 3\n4 5 3\n4 5 6" },
onAfterBundle(api) {
const code = api.readFile("/out.js");
assert([...code.matchAll(/var /g)].length === 1, "expected only 1 variable declaration statement");
expect([...code.matchAll(/var /g)]).toHaveLength(1);
},
});
itBundled("minify/Infinity", {

View File

@@ -1,7 +1,5 @@
import assert from "assert";
import dedent from "dedent";
import { ESBUILD, itBundled, testForFile } from "./expectBundled";
var { describe, test, expect } = testForFile(import.meta.path);
import { ESBUILD, itBundled } from "./expectBundled";
import { describe } from "bun:test";
describe("bundler", () => {
itBundled("naming/EntryNamingCollission", {

View File

@@ -1,9 +1,10 @@
import { ESBUILD, itBundled, testForFile } from "./expectBundled";
var { describe, test, expect } = testForFile(import.meta.path);
import { itBundled } from "./expectBundled";
import { describe } from "bun:test";
import { isWindows } from "harness";
describe("bundler", () => {
itBundled("npm/ReactSSR", {
todo: process.platform === "win32", // TODO(@paperdave)
todo: isWindows, // TODO(@paperdave)
install: ["react@18.3.1", "react-dom@18.3.1"],
files: {
"/entry.tsx": /* tsx */ `
@@ -25,14 +26,14 @@ describe("bundler", () => {
</html>
);
const port = 42001;
const port = 0;
using server = Bun.serve({
port,
async fetch(req) {
return new Response(await renderToReadableStream(<App />), headers);
},
});
const res = await fetch("http://localhost:" + port);
const res = await fetch("http://localhost:" + server.port);
if (res.status !== 200) throw "status error";
console.log(await res.text());
`,
@@ -61,7 +62,7 @@ describe("bundler", () => {
["react.development.js:696:''Component'", '1:7470:\'Component "%s"'],
["entry.tsx:6:'\"Content-Type\"'", '1:221669:"Content-Type"'],
["entry.tsx:11:'<html>'", "1:221925:void"],
["entry.tsx:23:'await'", "1:222030:await"],
["entry.tsx:23:'await'", "1:222026:await"],
],
},
},

View File

@@ -1,8 +1,6 @@
import assert from "assert";
import dedent from "dedent";
import path from "path";
import { itBundled, testForFile } from "./expectBundled";
var { describe, test, expect } = testForFile(import.meta.path);
import { join, resolve, dirname } from "node:path";
import { itBundled } from "./expectBundled";
import { describe, expect } from "bun:test";
describe("bundler", () => {
const loadFixture = {
@@ -35,7 +33,7 @@ describe("bundler", () => {
plugins(builder) {
builder.onResolve({ filter: /\.magic$/ }, args => {
return {
path: path.resolve(path.dirname(args.importer), args.path.replace(/\.magic$/, ".ts")),
path: resolve(dirname(args.importer), args.path.replace(/\.magic$/, ".ts")),
};
});
},
@@ -817,7 +815,7 @@ describe("bundler", () => {
plugins(build) {
const opts = (build as any).initialOptions;
expect(opts.bundle).toEqual(true);
expect(opts.entryPoints).toEqual([root + path.sep + "index.ts"]);
expect(opts.entryPoints).toEqual([join(root, "index.ts")]);
expect(opts.external).toEqual(["esbuild"]);
expect(opts.format).toEqual(undefined);
expect(opts.minify).toEqual(false);

View File

@@ -1,7 +1,5 @@
import assert from "assert";
import dedent from "dedent";
import { itBundled, testForFile } from "./expectBundled";
var { describe, test, expect } = testForFile(import.meta.path);
import { itBundled } from "./expectBundled";
import { describe } from "bun:test";
describe("bundler", () => {
// https://x.com/jeroendotdot/status/1740651288239460384?s=46&t=0Uhw6mmGT650_9M2pXUsCw

View File

@@ -1,7 +1,5 @@
import assert from "assert";
import dedent from "dedent";
import { itBundled, testForFile } from "./expectBundled";
var { describe, test, expect } = testForFile(import.meta.path);
import { itBundled, dedent } from "./expectBundled";
import { describe } from "bun:test";
interface TemplateStringTest {
expr: string;

View File

@@ -1,7 +1,6 @@
import { bunEnv, bunExe, tmpdirSync } from "harness";
import { describe, expect, test } from "bun:test";
import fs from "node:fs";
import { tmpdir } from "node:os";
import path from "node:path";
describe("bun build", () => {
@@ -18,34 +17,34 @@ describe("bun build", () => {
test("generating a standalone binary in nested path, issue #4195", () => {
function testCompile(outfile: string) {
const { exitCode } = Bun.spawnSync({
cmd: [
bunExe(),
"build",
path.join(import.meta.dir, "./fixtures/trivial/index.js"),
"--compile",
"--outfile",
outfile,
],
env: bunEnv,
});
expect(exitCode).toBe(0);
expect([
"build",
path.join(import.meta.dir, "./fixtures/trivial/index.js"),
"--compile",
"--outfile",
outfile,
]).toRun();
}
function testExec(outfile: string) {
const { exitCode } = Bun.spawnSync({
const { exitCode, stderr } = Bun.spawnSync({
cmd: [outfile],
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
expect(stderr.toString("utf8")).toBeEmpty();
expect(exitCode).toBe(0);
}
const tmpdir = tmpdirSync();
{
const baseDir = `${tmpdir()}/bun-build-outfile-${Date.now()}`;
const baseDir = `${tmpdir}/bun-build-outfile-${Date.now()}`;
const outfile = path.join(baseDir, "index.exe");
testCompile(outfile);
testExec(outfile);
fs.rmSync(baseDir, { recursive: true, force: true });
}
{
const baseDir = `${tmpdir()}/bun-build-outfile2-${Date.now()}`;
const baseDir = `${tmpdir}/bun-build-outfile2-${Date.now()}`;
const outfile = path.join(baseDir, "b/u/n", "index.exe");
testCompile(outfile);
testExec(outfile);
@@ -57,15 +56,12 @@ describe("bun build", () => {
const tmp = tmpdirSync();
const src = path.join(tmp, "index.js");
fs.writeFileSync(src, '\ufeffconsole.log("hello world");', { encoding: "utf8" });
const { exitCode } = Bun.spawnSync({
cmd: [bunExe(), "build", src],
env: bunEnv,
});
expect(exitCode).toBe(0);
expect(["build", src]).toRun();
});
test("__dirname and __filename are printed correctly", () => {
const baseDir = `${tmpdir()}/bun-build-dirname-filename-${Date.now()}`;
const tmpdir = tmpdirSync();
const baseDir = `${tmpdir}/bun-build-dirname-filename-${Date.now()}`;
fs.mkdirSync(baseDir, { recursive: true });
fs.mkdirSync(path.join(baseDir, "我")), { recursive: true };
fs.writeFileSync(path.join(baseDir, "我", "我.ts"), "console.log(__dirname); console.log(__filename);");

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,5 @@
import assert from "assert";
import dedent from "dedent";
import { ESBUILD, itBundled, testForFile } from "../expectBundled";
var { describe, test, expect } = testForFile(import.meta.path);
import { itBundled, dedent } from "../expectBundled";
import { describe, expect } from "bun:test";
// Tests ported from:
// https://github.com/evanw/esbuild/blob/main/internal/bundler_tests/bundler_dce_test.go
@@ -1019,8 +1017,8 @@ describe("bundler", () => {
},
onAfterBundle(api) {
const code = api.readFile("/out.js");
assert(!code.includes("_yes"), "should not contain any *_yes variables");
assert(code.includes("var bare = foo(bar)"), "should contain `var bare = foo(bar)`");
expect(code).not.toContain("_yes"); // should not contain any *_yes variables
expect(code).toContain("var bare = foo(bar)"); // should contain `var bare = foo(bar)`
const keep = [
["at_no", true],
["new_at_no", true],
@@ -1048,8 +1046,8 @@ describe("bundler", () => {
for (const [name, pureComment] of keep) {
const regex = new RegExp(`${name}\\s*=[^\/\n]*(\\/\\*.*?\\*\\/)?`, "g");
const match = regex.exec(code);
assert(!!match, `should contain ${name}`);
assert(pureComment ? !!match[1] : !match[1], `should contain a pure comment for ${name}`);
expect(match).toBeTruthy(); // should contain ${name}
expect(pureComment ? !!match[1] : !match[1]).toBeTruthy(); // should contain a pure comment for ${name}
}
},
});
@@ -1204,10 +1202,7 @@ describe("bundler", () => {
dce: true,
onAfterBundle(api) {
const code = api.readFile("/out.js");
assert(
[...code.matchAll(/return/g)].length === 2,
"should remove 3 trailing returns and the arrow function return",
);
expect([...code.matchAll(/return/g)]).toHaveLength(2); // should remove 3 trailing returns and the arrow function return
},
});
itBundled("dce/ImportReExportOfNamespaceImport", {
@@ -2813,7 +2808,7 @@ describe("bundler", () => {
dce: true,
onAfterBundle(api) {
const code = api.readFile("/out.js");
assert([...code.matchAll(/\[\.\.\.args\]/g)].length === 2, "spread should be preserved");
expect([...code.matchAll(/\[\.\.\.args\]/g)]).toHaveLength(2); // spread should be preserved
},
});
itBundled("dce/TopLevelFunctionInliningWithSpread", {

View File

@@ -1,9 +1,8 @@
import assert from "assert";
import dedent from "dedent";
import { ESBUILD_PATH, RUN_UNCHECKED_TESTS, itBundled, testForFile } from "../expectBundled";
import { ESBUILD_PATH, itBundled, dedent } from "../expectBundled";
import { osSlashes } from "harness";
var { describe, test, expect } = testForFile(import.meta.path);
import { describe, expect } from "bun:test";
// Tests ported from:
// https://github.com/evanw/esbuild/blob/main/internal/bundler_tests/bundler_default_test.go

View File

@@ -1,7 +1,5 @@
import assert from "assert";
import dedent from "dedent";
import { itBundled, testForFile } from "../expectBundled";
var { describe, test, expect } = testForFile(import.meta.path);
import { itBundled } from "../expectBundled";
import { describe } from "bun:test";
// Tests ported from:
// https://github.com/evanw/esbuild

View File

@@ -1,6 +1,5 @@
import assert from "assert";
import { itBundled, testForFile } from "../expectBundled";
var { describe, test, expect } = testForFile(import.meta.path);
import { itBundled } from "../expectBundled";
import { describe } from "bun:test";
// Tests ported from:
// https://github.com/evanw/esbuild/blob/main/internal/bundler_tests/bundler_importstar_test.go

View File

@@ -1,6 +1,5 @@
import assert from "assert";
import { itBundled, testForFile } from "../expectBundled";
var { describe, test, expect } = testForFile(import.meta.path);
import { itBundled } from "../expectBundled";
import { describe } from "bun:test";
// Tests ported from:
// https://github.com/evanw/esbuild/blob/main/internal/bundler_tests/bundler_importstar_ts_test.go

View File

@@ -1,6 +1,5 @@
import fs from "fs";
import { itBundled, testForFile } from "../expectBundled";
var { describe, test, expect } = testForFile(import.meta.path);
import { itBundled } from "../expectBundled";
import { describe } from "bun:test";
// Tests ported from:
// https://github.com/evanw/esbuild/blob/main/internal/bundler_tests/bundler_loader_test.go

View File

@@ -1,13 +1,12 @@
import { itBundled, testForFile } from "../expectBundled";
var { describe, test, expect } = testForFile(import.meta.path);
import { itBundled } from "../expectBundled";
import { describe } from "bun:test";
// Tests ported from:
// https://github.com/evanw/esbuild/blob/main/internal/bundler_tests/bundler_lower_test.go
// For debug, all files are written to $TEMP/bun-bundle-tests/lower
describe("bundler", () => {
return;
describe.todo("bundler", () => {
itBundled("lower/LowerOptionalCatchNameCollisionNoBundle", {
// GENERATED
files: {

View File

@@ -1,5 +1,5 @@
import { itBundled, testForFile } from "../expectBundled";
var { describe, test, expect } = testForFile(import.meta.path);
import { itBundled } from "../expectBundled";
import { describe } from "bun:test";
// Tests ported from:
// https://github.com/evanw/esbuild/blob/main/internal/bundler_tests/bundler_packagejson_test.go

View File

@@ -1,7 +1,7 @@
import assert from "assert";
import { readdirSync } from "fs";
import { itBundled, testForFile } from "../expectBundled";
var { describe, test, expect } = testForFile(import.meta.path);
import { describe, expect } from "bun:test";
// Tests ported from:
// https://github.com/evanw/esbuild/blob/main/internal/bundler_tests/bundler_splitting_test.go

View File

@@ -1,6 +1,6 @@
import assert from "assert";
import { itBundled, testForFile } from "../expectBundled";
var { describe, test, expect } = testForFile(import.meta.path);
import { itBundled } from "../expectBundled";
import { describe, expect } from "bun:test";
// Tests ported from:
// https://github.com/evanw/esbuild/blob/main/internal/bundler_tests/bundler_ts_test.go

View File

@@ -1,5 +1,5 @@
import { itBundled, testForFile } from "../expectBundled";
var { describe, test, expect } = testForFile(import.meta.path);
import { itBundled } from "../expectBundled";
import { describe, test } from "bun:test";
// Tests ported from:
// https://github.com/evanw/esbuild/blob/main/internal/bundler_tests/bundler_tsconfig_test.go

View File

@@ -3,7 +3,7 @@
*/
import { existsSync, mkdirSync, mkdtempSync, readFileSync, rmSync, writeFileSync, readdirSync, realpathSync } from "fs";
import path from "path";
import { bunEnv, bunExe, joinP } from "harness";
import { bunEnv, bunExe, isDebug } from "harness";
import { tmpdir } from "os";
import { callerSourceOrigin } from "bun:jsc";
import { BuildConfig, BunPlugin, fileURLToPath } from "bun";
@@ -13,7 +13,7 @@ import * as esbuild from "esbuild";
import { SourceMapConsumer } from "source-map";
/** Dedent module does a bit too much with their stuff. we will be much simpler */
function dedent(str: string | TemplateStringsArray, ...args: any[]) {
export function dedent(str: string | TemplateStringsArray, ...args: any[]) {
// https://github.com/tc39/proposal-string-cooked#motivation
let single_string = String.raw({ raw: str }, ...args);
single_string = single_string.trim();
@@ -564,7 +564,8 @@ function expectBundled(
cwd: root,
});
if (!installProcess.success) {
throw new Error("Failed to install dependencies");
const reason = installProcess.signalCode || `code ${installProcess.exitCode}`;
throw new Error(`Failed to install dependencies: ${reason}`);
}
}
for (const [file, contents] of Object.entries(files)) {
@@ -1546,7 +1547,7 @@ export function itBundled(
id,
() => expectBundled(id, opts as any),
// sourcemap code is slow
opts.snapshotSourceMap ? 20_000 : undefined,
isDebug ? Infinity : opts.snapshotSourceMap ? 30_000 : undefined,
);
}
return ref;

View File

@@ -5,10 +5,12 @@
// That way, if the developer changes a file, we will see the change.
//
// 2. Checks the file descriptor count to make sure we're not leaking any files between re-builds.
import { tmpdir } from "os";
import { realpathSync, unlinkSync } from "fs";
import { join } from "path";
import { openSync, closeSync } from "fs";
const tmp = realpathSync(tmpdir());
const input = join(tmp, "input.js");
const mutate = join(tmp, "mutate.js");

View File

@@ -1,9 +1,12 @@
import { spawn } from "bun";
import { beforeEach, expect, it } from "bun:test";
import { bunExe, bunEnv, tmpdirSync, isDebug, isWindows } from "harness";
import { bunExe, bunEnv, tmpdirSync, isDebug } from "harness";
import { cpSync, readFileSync, renameSync, rmSync, unlinkSync, writeFileSync, copyFileSync } from "fs";
import { join } from "path";
const timeout = isDebug ? Infinity : 10_000;
const longTimeout = isDebug ? Infinity : 30_000;
let hotRunnerRoot: string = "",
cwd = "";
beforeEach(() => {
@@ -14,311 +17,331 @@ beforeEach(() => {
cwd = hotPath;
});
it("should hot reload when file is overwritten", async () => {
const root = hotRunnerRoot;
try {
var runner = spawn({
cmd: [bunExe(), "--hot", "run", root],
env: bunEnv,
cwd,
stdout: "pipe",
stderr: "inherit",
stdin: "ignore",
});
it(
"should hot reload when file is overwritten",
async () => {
const root = hotRunnerRoot;
try {
var runner = spawn({
cmd: [bunExe(), "--hot", "run", root],
env: bunEnv,
cwd,
stdout: "pipe",
stderr: "inherit",
stdin: "ignore",
});
var reloadCounter = 0;
var reloadCounter = 0;
async function onReload() {
writeFileSync(root, readFileSync(root, "utf-8"));
}
var str = "";
for await (const line of runner.stdout) {
str += new TextDecoder().decode(line);
var any = false;
if (!/\[#!root\].*[0-9]\n/g.test(str)) continue;
for (let line of str.split("\n")) {
if (!line.includes("[#!root]")) continue;
reloadCounter++;
str = "";
if (reloadCounter === 3) {
runner.unref();
runner.kill();
break;
}
expect(line).toContain(`[#!root] Reloaded: ${reloadCounter}`);
any = true;
async function onReload() {
writeFileSync(root, readFileSync(root, "utf-8"));
}
if (any) await onReload();
}
var str = "";
for await (const line of runner.stdout) {
str += new TextDecoder().decode(line);
var any = false;
if (!/\[#!root\].*[0-9]\n/g.test(str)) continue;
expect(reloadCounter).toBe(3);
} finally {
// @ts-ignore
runner?.unref?.();
// @ts-ignore
runner?.kill?.(9);
}
});
for (let line of str.split("\n")) {
if (!line.includes("[#!root]")) continue;
reloadCounter++;
str = "";
it("should recover from errors", async () => {
const root = hotRunnerRoot;
try {
var runner = spawn({
cmd: [bunExe(), "--hot", "run", root],
env: bunEnv,
cwd,
stdout: "pipe",
stderr: "pipe",
stdin: "ignore",
});
let reloadCounter = 0;
const input = readFileSync(root, "utf-8");
function onReloadGood() {
writeFileSync(root, input);
}
function onReloadError() {
writeFileSync(root, "throw new Error('error');\n");
}
var queue = [onReloadError, onReloadGood, onReloadError, onReloadGood];
var errors: string[] = [];
var onError: (...args: any[]) => void;
(async () => {
for await (let line of runner.stderr) {
var str = new TextDecoder().decode(line);
errors.push(str);
// @ts-ignore
onError && onError(str);
}
})();
var str = "";
for await (const line of runner.stdout) {
str += new TextDecoder().decode(line);
var any = false;
if (!/\[#!root\].*[0-9]\n/g.test(str)) continue;
for (let line of str.split("\n")) {
if (!line.includes("[#!root]")) continue;
reloadCounter++;
str = "";
if (reloadCounter === 3) {
runner.unref();
runner.kill();
break;
}
expect(line).toContain(`[#!root] Reloaded: ${reloadCounter}`);
any = true;
}
if (any) {
queue.shift()!();
await new Promise<void>((resolve, reject) => {
if (errors.length > 0) {
errors.length = 0;
resolve();
return;
if (reloadCounter === 3) {
runner.unref();
runner.kill();
break;
}
onError = resolve;
});
queue.shift()!();
}
}
expect(reloadCounter).toBe(3);
} finally {
// @ts-ignore
runner?.unref?.();
// @ts-ignore
runner?.kill?.(9);
}
});
it("should not hot reload when a random file is written", async () => {
const root = hotRunnerRoot;
try {
var runner = spawn({
cmd: [bunExe(), "--hot", "run", root],
env: bunEnv,
cwd,
stdout: "pipe",
stderr: "inherit",
stdin: "ignore",
});
let reloadCounter = 0;
const code = readFileSync(root, "utf-8");
async function onReload() {
writeFileSync(root + ".another.yet.js", code);
unlinkSync(root + ".another.yet.js");
}
var finished = false;
await Promise.race([
Bun.sleep(200),
(async () => {
if (finished) {
return;
expect(line).toContain(`[#!root] Reloaded: ${reloadCounter}`);
any = true;
}
var str = "";
for await (const line of runner.stdout) {
if (any) await onReload();
}
expect(reloadCounter).toBeGreaterThanOrEqual(3);
} finally {
// @ts-ignore
runner?.unref?.();
// @ts-ignore
runner?.kill?.(9);
}
},
timeout,
);
it(
"should recover from errors",
async () => {
const root = hotRunnerRoot;
try {
var runner = spawn({
cmd: [bunExe(), "--hot", "run", root],
env: bunEnv,
cwd,
stdout: "pipe",
stderr: "pipe",
stdin: "ignore",
});
let reloadCounter = 0;
const input = readFileSync(root, "utf-8");
function onReloadGood() {
writeFileSync(root, input);
}
function onReloadError() {
writeFileSync(root, "throw new Error('error');\n");
}
var queue = [onReloadError, onReloadGood, onReloadError, onReloadGood];
var errors: string[] = [];
var onError: (...args: any[]) => void;
(async () => {
for await (let line of runner.stderr) {
var str = new TextDecoder().decode(line);
errors.push(str);
// @ts-ignore
onError && onError(str);
}
})();
var str = "";
for await (const line of runner.stdout) {
str += new TextDecoder().decode(line);
var any = false;
if (!/\[#!root\].*[0-9]\n/g.test(str)) continue;
for (let line of str.split("\n")) {
if (!line.includes("[#!root]")) continue;
reloadCounter++;
str = "";
if (reloadCounter === 3) {
runner.unref();
runner.kill();
break;
}
expect(line).toContain(`[#!root] Reloaded: ${reloadCounter}`);
any = true;
}
if (any) {
queue.shift()!();
await new Promise<void>((resolve, reject) => {
if (errors.length > 0) {
errors.length = 0;
resolve();
return;
}
onError = resolve;
});
queue.shift()!();
}
}
expect(reloadCounter).toBe(3);
} finally {
// @ts-ignore
runner?.unref?.();
// @ts-ignore
runner?.kill?.(9);
}
},
timeout,
);
it(
"should not hot reload when a random file is written",
async () => {
const root = hotRunnerRoot;
try {
var runner = spawn({
cmd: [bunExe(), "--hot", "run", root],
env: bunEnv,
cwd,
stdout: "pipe",
stderr: "inherit",
stdin: "ignore",
});
let reloadCounter = 0;
const code = readFileSync(root, "utf-8");
async function onReload() {
writeFileSync(root + ".another.yet.js", code);
unlinkSync(root + ".another.yet.js");
}
var finished = false;
await Promise.race([
Bun.sleep(200),
(async () => {
if (finished) {
return;
}
str += new TextDecoder().decode(line);
if (!/\[#!root\].*[0-9]\n/g.test(str)) continue;
for (let line of str.split("\n")) {
if (!line.includes("[#!root]")) continue;
var str = "";
for await (const line of runner.stdout) {
if (finished) {
return;
}
await onReload();
reloadCounter++;
str = "";
expect(line).toContain(`[#!root] Reloaded: ${reloadCounter}`);
str += new TextDecoder().decode(line);
if (!/\[#!root\].*[0-9]\n/g.test(str)) continue;
for (let line of str.split("\n")) {
if (!line.includes("[#!root]")) continue;
if (finished) {
return;
}
await onReload();
reloadCounter++;
str = "";
expect(line).toContain(`[#!root] Reloaded: ${reloadCounter}`);
}
}
})(),
]);
finished = true;
runner.kill(0);
runner.unref();
expect(reloadCounter).toBe(1);
} finally {
// @ts-ignore
runner?.unref?.();
// @ts-ignore
runner?.kill?.(9);
}
},
timeout,
);
it(
"should hot reload when a file is deleted and rewritten",
async () => {
try {
const root = hotRunnerRoot + ".tmp.js";
copyFileSync(hotRunnerRoot, root);
var runner = spawn({
cmd: [bunExe(), "--hot", "run", root],
env: bunEnv,
cwd,
stdout: "pipe",
stderr: "inherit",
stdin: "ignore",
});
var reloadCounter = 0;
async function onReload() {
const contents = readFileSync(root, "utf-8");
rmSync(root);
writeFileSync(root, contents);
}
var str = "";
for await (const line of runner.stdout) {
str += new TextDecoder().decode(line);
var any = false;
if (!/\[#!root\].*[0-9]\n/g.test(str)) continue;
for (let line of str.split("\n")) {
if (!line.includes("[#!root]")) continue;
reloadCounter++;
str = "";
if (reloadCounter === 3) {
runner.unref();
runner.kill();
break;
}
expect(line).toContain(`[#!root] Reloaded: ${reloadCounter}`);
any = true;
}
})(),
]);
finished = true;
runner.kill(0);
runner.unref();
expect(reloadCounter).toBe(1);
} finally {
// @ts-ignore
runner?.unref?.();
// @ts-ignore
runner?.kill?.(9);
}
});
if (any) await onReload();
}
rmSync(root);
expect(reloadCounter).toBe(3);
} finally {
// @ts-ignore
runner?.unref?.();
// @ts-ignore
runner?.kill?.(9);
}
},
timeout,
);
it("should hot reload when a file is deleted and rewritten", async () => {
try {
it(
"should hot reload when a file is renamed() into place",
async () => {
const root = hotRunnerRoot + ".tmp.js";
copyFileSync(hotRunnerRoot, root);
var runner = spawn({
cmd: [bunExe(), "--hot", "run", root],
env: bunEnv,
cwd,
stdout: "pipe",
stderr: "inherit",
stdin: "ignore",
});
try {
var runner = spawn({
cmd: [bunExe(), "--hot", "run", root],
env: bunEnv,
cwd,
stdout: "pipe",
stderr: "inherit",
stdin: "ignore",
});
var reloadCounter = 0;
var reloadCounter = 0;
async function onReload() {
const contents = readFileSync(root, "utf-8");
rmSync(root);
writeFileSync(root, contents);
}
var str = "";
for await (const line of runner.stdout) {
str += new TextDecoder().decode(line);
var any = false;
if (!/\[#!root\].*[0-9]\n/g.test(str)) continue;
for (let line of str.split("\n")) {
if (!line.includes("[#!root]")) continue;
reloadCounter++;
str = "";
if (reloadCounter === 3) {
runner.unref();
runner.kill();
break;
}
expect(line).toContain(`[#!root] Reloaded: ${reloadCounter}`);
any = true;
async function onReload() {
const contents = readFileSync(root, "utf-8");
rmSync(root + ".tmpfile", { force: true });
await 1;
writeFileSync(root + ".tmpfile", contents);
await 1;
rmSync(root);
await 1;
renameSync(root + ".tmpfile", root);
await 1;
}
if (any) await onReload();
}
rmSync(root);
expect(reloadCounter).toBe(3);
} finally {
// @ts-ignore
runner?.unref?.();
// @ts-ignore
runner?.kill?.(9);
}
});
var str = "";
for await (const line of runner.stdout) {
str += new TextDecoder().decode(line);
var any = false;
if (!/\[#!root\].*[0-9]\n/g.test(str)) continue;
it("should hot reload when a file is renamed() into place", async () => {
const root = hotRunnerRoot + ".tmp.js";
copyFileSync(hotRunnerRoot, root);
try {
var runner = spawn({
cmd: [bunExe(), "--hot", "run", root],
env: bunEnv,
cwd,
stdout: "pipe",
stderr: "inherit",
stdin: "ignore",
});
for (let line of str.split("\n")) {
if (!line.includes("[#!root]")) continue;
reloadCounter++;
str = "";
var reloadCounter = 0;
if (reloadCounter === 3) {
runner.unref();
runner.kill();
break;
}
async function onReload() {
const contents = readFileSync(root, "utf-8");
rmSync(root + ".tmpfile", { force: true });
await 1;
writeFileSync(root + ".tmpfile", contents);
await 1;
rmSync(root);
await 1;
renameSync(root + ".tmpfile", root);
await 1;
}
var str = "";
for await (const line of runner.stdout) {
str += new TextDecoder().decode(line);
var any = false;
if (!/\[#!root\].*[0-9]\n/g.test(str)) continue;
for (let line of str.split("\n")) {
if (!line.includes("[#!root]")) continue;
reloadCounter++;
str = "";
if (reloadCounter === 3) {
runner.unref();
runner.kill();
break;
expect(line).toContain(`[#!root] Reloaded: ${reloadCounter}`);
any = true;
}
expect(line).toContain(`[#!root] Reloaded: ${reloadCounter}`);
any = true;
if (any) await onReload();
}
if (any) await onReload();
rmSync(root);
expect(reloadCounter).toBe(3);
} finally {
// @ts-ignore
runner?.unref?.();
// @ts-ignore
runner?.kill?.(9);
}
rmSync(root);
expect(reloadCounter).toBe(3);
} finally {
// @ts-ignore
runner?.unref?.();
// @ts-ignore
runner?.kill?.(9);
}
});
},
timeout,
);
const comment_spam = ("//" + "B".repeat(2000) + "\n").repeat(1000);
it(
@@ -385,81 +408,85 @@ ${" ".repeat(reloadCounter * 2)}throw new Error(${reloadCounter});`,
await runner.exited;
expect(reloadCounter).toBe(50);
},
isDebug ? Infinity : 10_000,
timeout,
);
it("should work with sourcemap loading", async () => {
let bundleIn = join(cwd, "bundle_in.ts");
rmSync(hotRunnerRoot);
writeFileSync(
bundleIn,
`// source content
//
//
throw new Error('0');`,
);
await using bundler = spawn({
cmd: [bunExe(), "build", "--watch", bundleIn, "--target=bun", "--sourcemap", "--outfile", hotRunnerRoot],
env: bunEnv,
cwd,
stdout: "inherit",
stderr: "inherit",
stdin: "ignore",
});
await using runner = spawn({
cmd: [bunExe(), "--hot", "run", hotRunnerRoot],
env: bunEnv,
cwd,
stdout: "ignore",
stderr: "pipe",
stdin: "ignore",
});
let reloadCounter = 0;
function onReload() {
it(
"should work with sourcemap loading",
async () => {
let bundleIn = join(cwd, "bundle_in.ts");
rmSync(hotRunnerRoot);
writeFileSync(
bundleIn,
`// source content
//
//
throw new Error('0');`,
);
await using bundler = spawn({
cmd: [bunExe(), "build", "--watch", bundleIn, "--target=bun", "--sourcemap", "--outfile", hotRunnerRoot],
env: bunEnv,
cwd,
stdout: "inherit",
stderr: "inherit",
stdin: "ignore",
});
await using runner = spawn({
cmd: [bunExe(), "--hot", "run", hotRunnerRoot],
env: bunEnv,
cwd,
stdout: "ignore",
stderr: "pipe",
stdin: "ignore",
});
let reloadCounter = 0;
function onReload() {
writeFileSync(
bundleIn,
`// source content
// etc etc
// etc etc
${" ".repeat(reloadCounter * 2)}throw new Error(${reloadCounter});`,
);
}
let str = "";
outer: for await (const chunk of runner.stderr) {
str += new TextDecoder().decode(chunk);
var any = false;
if (!/error: .*[0-9]\n.*?\n/g.test(str)) continue;
let it = str.split("\n");
let line;
while ((line = it.shift())) {
if (!line.includes("error")) continue;
str = "";
if (reloadCounter === 50) {
runner.kill();
break;
}
if (line.includes(`error: ${reloadCounter - 1}`)) {
onReload(); // re-save file to prevent deadlock
continue outer;
}
expect(line).toContain(`error: ${reloadCounter}`);
reloadCounter++;
let next = it.shift()!;
expect(next).toInclude("bundle_in.ts");
const col = next.match(/\s*at.*?:4:(\d+)$/)![1];
expect(Number(col)).toBe(1 + "throw ".length + (reloadCounter - 1) * 2);
any = true;
);
}
let str = "";
outer: for await (const chunk of runner.stderr) {
str += new TextDecoder().decode(chunk);
var any = false;
if (!/error: .*[0-9]\n.*?\n/g.test(str)) continue;
if (any) await onReload();
}
expect(reloadCounter).toBe(50);
bundler.kill();
});
let it = str.split("\n");
let line;
while ((line = it.shift())) {
if (!line.includes("error")) continue;
str = "";
if (reloadCounter === 50) {
runner.kill();
break;
}
if (line.includes(`error: ${reloadCounter - 1}`)) {
onReload(); // re-save file to prevent deadlock
continue outer;
}
expect(line).toContain(`error: ${reloadCounter}`);
reloadCounter++;
let next = it.shift()!;
expect(next).toInclude("bundle_in.ts");
const col = next.match(/\s*at.*?:4:(\d+)$/)![1];
expect(Number(col)).toBe(1 + "throw ".length + (reloadCounter - 1) * 2);
any = true;
}
if (any) await onReload();
}
expect(reloadCounter).toBe(50);
bundler.kill();
},
timeout,
);
const long_comment = "BBBB".repeat(100000);
@@ -566,5 +593,5 @@ ${" ".repeat(reloadCounter * 2)}throw new Error(${reloadCounter});`,
// TODO: bun has a memory leak when --hot is used on very large files
// console.log({ sampleMemory10, sampleMemory100 });
},
isDebug ? Infinity : 20_000,
longTimeout,
);

View File

@@ -15,7 +15,7 @@ import {
root_url,
setHandler,
} from "./dummy.registry";
import { cpSync, rmSync } from "js/node/fs/export-star-from";
import { cpSync } from "node:fs";
beforeAll(dummyBeforeAll);
afterAll(dummyAfterAll);

View File

@@ -1,27 +1,28 @@
import { spawn, spawnSync } from "bun";
import { beforeEach, expect, it, setDefaultTimeout, beforeAll } from "bun:test";
import { bunExe, bunEnv as env, tls, tmpdirSync } from "harness";
import { join } from "path";
import { copyFileSync } from "js/node/fs/export-star-from";
import { join, basename } from "path";
import { copyFileSync } from "node:fs";
import { upgrade_test_helpers } from "bun:internal-for-testing";
const { openTempDirWithoutSharingDelete, closeTempDirHandle } = upgrade_test_helpers;
let run_dir: string;
let exe_name: string = "bun-debug" + (process.platform === "win32" ? ".exe" : "");
let cwd: string;
let execPath: string;
beforeAll(() => {
setDefaultTimeout(1000 * 60 * 5);
});
beforeEach(async () => {
run_dir = tmpdirSync();
copyFileSync(bunExe(), join(run_dir, exe_name));
cwd = tmpdirSync();
execPath = join(cwd, basename(bunExe()));
copyFileSync(bunExe(), execPath);
});
it("two invalid arguments, should display error message and suggest command", async () => {
const { stderr } = spawn({
cmd: [join(run_dir, exe_name), "upgrade", "bun-types", "--dev"],
cwd: run_dir,
cmd: [execPath, "upgrade", "bun-types", "--dev"],
cwd,
stdout: null,
stdin: "pipe",
stderr: "pipe",
@@ -35,8 +36,8 @@ it("two invalid arguments, should display error message and suggest command", as
it("two invalid arguments flipped, should display error message and suggest command", async () => {
const { stderr } = spawn({
cmd: [join(run_dir, exe_name), "upgrade", "--dev", "bun-types"],
cwd: run_dir,
cmd: [execPath, "upgrade", "--dev", "bun-types"],
cwd,
stdout: null,
stdin: "pipe",
stderr: "pipe",
@@ -50,8 +51,8 @@ it("two invalid arguments flipped, should display error message and suggest comm
it("one invalid argument, should display error message and suggest command", async () => {
const { stderr } = spawn({
cmd: [join(run_dir, exe_name), "upgrade", "bun-types"],
cwd: run_dir,
cmd: [execPath, "upgrade", "bun-types"],
cwd,
stdout: null,
stdin: "pipe",
stderr: "pipe",
@@ -65,8 +66,8 @@ it("one invalid argument, should display error message and suggest command", asy
it("one valid argument, should succeed", async () => {
const { stderr } = spawn({
cmd: [join(run_dir, exe_name), "upgrade", "--help"],
cwd: run_dir,
cmd: [execPath, "upgrade", "--help"],
cwd,
stdout: null,
stdin: "pipe",
stderr: "pipe",
@@ -81,8 +82,8 @@ it("one valid argument, should succeed", async () => {
it("two valid argument, should succeed", async () => {
const { stderr } = spawn({
cmd: [join(run_dir, exe_name), "upgrade", "--stable", "--profile"],
cwd: run_dir,
cmd: [execPath, "upgrade", "--stable", "--profile"],
cwd,
stdout: null,
stdin: "pipe",
stderr: "pipe",
@@ -96,55 +97,56 @@ it("two valid argument, should succeed", async () => {
});
it("zero arguments, should succeed", async () => {
const tagName = bunExe().includes("-debug") ? "canary" : `bun-v${Bun.version}`;
using server = Bun.serve({
tls: tls,
port: 0,
async fetch() {
return new Response(
JSON.stringify({
"tag_name": "bun-v1.1.4",
"tag_name": tagName,
"assets": [
{
"url": "foo",
"content_type": "application/zip",
"name": "bun-windows-x64.zip",
"browser_download_url": `https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/latest/bun-windows-x64.zip`,
"browser_download_url": `https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/${tagName}/bun-windows-x64.zip`,
},
{
"url": "foo",
"content_type": "application/zip",
"name": "bun-windows-x64-baseline.zip",
"browser_download_url": `https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/latest/bun-windows-x64-baseline.zip`,
"browser_download_url": `https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/${tagName}/bun-windows-x64-baseline.zip`,
},
{
"url": "foo",
"content_type": "application/zip",
"name": "bun-linux-x64.zip",
"browser_download_url": `https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/latest/bun-linux-x64.zip`,
"browser_download_url": `https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/${tagName}/bun-linux-x64.zip`,
},
{
"url": "foo",
"content_type": "application/zip",
"name": "bun-linux-x64-baseline.zip",
"browser_download_url": `https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/latest/bun-linux-x64-baseline.zip`,
"browser_download_url": `https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/${tagName}/bun-linux-x64-baseline.zip`,
},
{
"url": "foo",
"content_type": "application/zip",
"name": "bun-darwin-x64.zip",
"browser_download_url": `https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/latest/bun-darwin-x64.zip`,
"browser_download_url": `https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/${tagName}/bun-darwin-x64.zip`,
},
{
"url": "foo",
"content_type": "application/zip",
"name": "bun-darwin-x64-baseline.zip",
"browser_download_url": `https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/latest/bun-darwin-x64-baseline.zip`,
"browser_download_url": `https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/${tagName}/bun-darwin-x64-baseline.zip`,
},
{
"url": "foo",
"content_type": "application/zip",
"name": "bun-darwin-aarch64.zip",
"browser_download_url": `https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/latest/bun-darwin-aarch64.zip`,
"browser_download_url": `https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/${tagName}/bun-darwin-aarch64.zip`,
},
],
}),
@@ -157,15 +159,15 @@ it("zero arguments, should succeed", async () => {
openTempDirWithoutSharingDelete();
const { stderr } = spawnSync({
cmd: [join(run_dir, exe_name), "upgrade"],
cwd: run_dir,
cmd: [execPath, "upgrade"],
cwd,
stdout: null,
stdin: "pipe",
stderr: "pipe",
env: {
...env,
NODE_TLS_REJECT_UNAUTHORIZED: "0",
GITHUB_API_DOMAIN: `localhost:${server.port}`,
GITHUB_API_DOMAIN: `${server.hostname}:${server.port}`,
},
});

Some files were not shown because too many files have changed in this diff Show More