mirror of
https://github.com/oven-sh/bun
synced 2026-02-17 14:22:01 +00:00
Compare commits
590 Commits
lithdew/pi
...
bun-v0.0.6
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9c5dead232 | ||
|
|
0191cabe6e | ||
|
|
e1747c0dd7 | ||
|
|
e50ccc25db | ||
|
|
3a788fa035 | ||
|
|
9715c9223a | ||
|
|
ca6cdb4e98 | ||
|
|
e431dbe5bd | ||
|
|
3063df9b78 | ||
|
|
d8f174e54c | ||
|
|
51abe58a1a | ||
|
|
7f1415c7ae | ||
|
|
6cf3a08780 | ||
|
|
2b45c8dffe | ||
|
|
860d7e93c0 | ||
|
|
dabcac2e96 | ||
|
|
c03b7a6f19 | ||
|
|
909e6a6bab | ||
|
|
a2f91b167d | ||
|
|
b2b8c7f0b7 | ||
|
|
29554f4de3 | ||
|
|
735dc8f1d6 | ||
|
|
a9577c9ba8 | ||
|
|
a89726b30c | ||
|
|
3225a4e7e8 | ||
|
|
432e823f7e | ||
|
|
62541f1ac8 | ||
|
|
faf137b9be | ||
|
|
3c9e30ffcd | ||
|
|
b22a3f5973 | ||
|
|
4232000020 | ||
|
|
475aab6cc3 | ||
|
|
dddd9c23e4 | ||
|
|
1064b9dada | ||
|
|
baffe26dd1 | ||
|
|
1993f9f9a5 | ||
|
|
7f00482cfe | ||
|
|
b2a69a35b8 | ||
|
|
a52a948a70 | ||
|
|
94cbfa4579 | ||
|
|
1a4ccd3f5c | ||
|
|
68cb6130d3 | ||
|
|
64b74ede87 | ||
|
|
213960a04a | ||
|
|
170e58a99d | ||
|
|
f3c4bfcbae | ||
|
|
fa971895a8 | ||
|
|
a5a9f8e821 | ||
|
|
0a547304a7 | ||
|
|
45e7bb0027 | ||
|
|
5d7c34093f | ||
|
|
9322cec8f2 | ||
|
|
047501999d | ||
|
|
30f5e0d37c | ||
|
|
205a6d45b5 | ||
|
|
d47e0de175 | ||
|
|
8c470194ce | ||
|
|
410a6bd32d | ||
|
|
af69b47c22 | ||
|
|
fb2c7e5f38 | ||
|
|
4a1c195b24 | ||
|
|
1489e48950 | ||
|
|
5922ba29ac | ||
|
|
1de275325e | ||
|
|
14301284af | ||
|
|
adeb880d7f | ||
|
|
857e9bee00 | ||
|
|
711e0cef78 | ||
|
|
73449bf433 | ||
|
|
4aabccfc79 | ||
|
|
1e2a61c6a0 | ||
|
|
844fae826f | ||
|
|
ef8d1a95a4 | ||
|
|
bf4943eec1 | ||
|
|
97d17904d3 | ||
|
|
98af486b89 | ||
|
|
8f27b27a0e | ||
|
|
aced5882d1 | ||
|
|
839fd03472 | ||
|
|
fb5a90cb1b | ||
|
|
64a83f1427 | ||
|
|
f684a16051 | ||
|
|
2666b45f2a | ||
|
|
aad33c2611 | ||
|
|
ff2335a910 | ||
|
|
32733210dc | ||
|
|
9f5a1705a5 | ||
|
|
b4c1cea735 | ||
|
|
2572636004 | ||
|
|
3a962f3bad | ||
|
|
bfa2bc6736 | ||
|
|
354ed2880c | ||
|
|
9db4d195a7 | ||
|
|
0808f29375 | ||
|
|
7dbfb3ef41 | ||
|
|
333bccee5d | ||
|
|
25c35e59b4 | ||
|
|
ff7785e023 | ||
|
|
0e138bcc8f | ||
|
|
ecea12d206 | ||
|
|
61d1c7b6b2 | ||
|
|
b5cec4b704 | ||
|
|
e895605e5f | ||
|
|
f5a79ea6d2 | ||
|
|
de0cf42111 | ||
|
|
ec9e4eb97e | ||
|
|
47b6a82920 | ||
|
|
28f77b9823 | ||
|
|
4ac8c1bd84 | ||
|
|
962b7fa8d9 | ||
|
|
d6ebb478e3 | ||
|
|
2b8893da53 | ||
|
|
744f52fb27 | ||
|
|
43b9f9462f | ||
|
|
d89aa9a9e6 | ||
|
|
084d78be29 | ||
|
|
9c627e884d | ||
|
|
da83583cdf | ||
|
|
4d9ae0df06 | ||
|
|
4d95e44317 | ||
|
|
4b7235ac5f | ||
|
|
4152c1b177 | ||
|
|
dc5745080d | ||
|
|
3dfac788fa | ||
|
|
c56ff7efb4 | ||
|
|
114c560912 | ||
|
|
1576d183c9 | ||
|
|
0e0bfe91d0 | ||
|
|
d3559d1cca | ||
|
|
222a1cc9e8 | ||
|
|
03103925bd | ||
|
|
9e470cb48e | ||
|
|
376602a65c | ||
|
|
8493cfe983 | ||
|
|
d11ac34dcb | ||
|
|
08bc06dfe0 | ||
|
|
f4fbf84294 | ||
|
|
56ae4bcb55 | ||
|
|
ec61dc0665 | ||
|
|
b21b0ea849 | ||
|
|
318c423e14 | ||
|
|
834ff71157 | ||
|
|
39a187d32e | ||
|
|
146d2cc231 | ||
|
|
87c95d45f7 | ||
|
|
f7f2f6e6b8 | ||
|
|
3e803b3a58 | ||
|
|
22c01ca14c | ||
|
|
111f0921f5 | ||
|
|
8c515c7077 | ||
|
|
04579909ff | ||
|
|
40ff3e4618 | ||
|
|
c0a446df02 | ||
|
|
8f7cf6bf47 | ||
|
|
4a7b5892af | ||
|
|
9a5aa95893 | ||
|
|
8d623e21b6 | ||
|
|
7a87e41ab8 | ||
|
|
f62f184241 | ||
|
|
8ea69cb5d1 | ||
|
|
97f0cef391 | ||
|
|
85084c6db9 | ||
|
|
a09b995651 | ||
|
|
4098484ff5 | ||
|
|
91b24c6032 | ||
|
|
38d0f0bbd0 | ||
|
|
d3e39632bf | ||
|
|
43ae61e095 | ||
|
|
bd31c88814 | ||
|
|
02c59df1b3 | ||
|
|
f6c05ef7a1 | ||
|
|
505e4b80fd | ||
|
|
cd8d88716f | ||
|
|
d3a93d5273 | ||
|
|
ed9637de50 | ||
|
|
4e744c057b | ||
|
|
4b717fe554 | ||
|
|
236a0fde35 | ||
|
|
306d309209 | ||
|
|
a8f4cd271e | ||
|
|
3cd129544e | ||
|
|
1004e924ff | ||
|
|
c013ede227 | ||
|
|
914b363a37 | ||
|
|
8ad349dcf0 | ||
|
|
7146bfc04a | ||
|
|
e852bc0980 | ||
|
|
8a9b1416ed | ||
|
|
9da7a1ff0a | ||
|
|
219aa57fbd | ||
|
|
5930ac09b7 | ||
|
|
8837c3c10b | ||
|
|
0d0dd65a6a | ||
|
|
0ac9c4276f | ||
|
|
0edf6fd1e4 | ||
|
|
c9d6c25f71 | ||
|
|
73fed6c093 | ||
|
|
75f238fcb3 | ||
|
|
8706f77eb7 | ||
|
|
3cf0689d46 | ||
|
|
623f77e097 | ||
|
|
61d6f8f18f | ||
|
|
6480193c65 | ||
|
|
23a31b3771 | ||
|
|
112b352b03 | ||
|
|
a48c20d470 | ||
|
|
5fd47bc613 | ||
|
|
6966d94d90 | ||
|
|
5311219de7 | ||
|
|
4119f60010 | ||
|
|
0960f3d6d1 | ||
|
|
64b49ddd95 | ||
|
|
2c27526a34 | ||
|
|
fe6b340382 | ||
|
|
8148b8824f | ||
|
|
be0c4b5451 | ||
|
|
168c6138d8 | ||
|
|
b9555a5775 | ||
|
|
b78fff9386 | ||
|
|
963c89cab2 | ||
|
|
d7ef6efd04 | ||
|
|
35fef33868 | ||
|
|
8a9e81d514 | ||
|
|
b17df61043 | ||
|
|
4e0cdf34b6 | ||
|
|
a97dec5c30 | ||
|
|
ad0834bedb | ||
|
|
b53c63910d | ||
|
|
5c0908ee52 | ||
|
|
7f33846005 | ||
|
|
4afda10b82 | ||
|
|
cc0e581bc3 | ||
|
|
df1aa76cc5 | ||
|
|
a3a2fb1e4f | ||
|
|
915dadd9d7 | ||
|
|
d899e0ac6f | ||
|
|
3915e01cfb | ||
|
|
2fc6da125f | ||
|
|
ba4013816d | ||
|
|
b64113b7d9 | ||
|
|
a9483dd8dc | ||
|
|
5f2fb86a76 | ||
|
|
386ba990bf | ||
|
|
7f1b5c09de | ||
|
|
fcfdd71c06 | ||
|
|
73336bbc00 | ||
|
|
e7fa50635f | ||
|
|
fddec80755 | ||
|
|
dac3389b27 | ||
|
|
c952eb7dea | ||
|
|
6a28cfd2ba | ||
|
|
e4693b8aaf | ||
|
|
87e78e2a48 | ||
|
|
6ac9b5fa9d | ||
|
|
71449c8638 | ||
|
|
a17088363f | ||
|
|
83004f0a0a | ||
|
|
c42fbbd10e | ||
|
|
d068d80ab9 | ||
|
|
eb142a6be5 | ||
|
|
9fde2b28e1 | ||
|
|
195723180b | ||
|
|
2e051cebaa | ||
|
|
795d0b056c | ||
|
|
d7c69d3b78 | ||
|
|
059aa425b7 | ||
|
|
aaf5c28c5b | ||
|
|
3083bbadc1 | ||
|
|
150ca5d686 | ||
|
|
58d1e2e1be | ||
|
|
090bae9a94 | ||
|
|
113f0427ab | ||
|
|
36520d6ec1 | ||
|
|
2a7d8df4c5 | ||
|
|
ac6a2bf2e5 | ||
|
|
3c1cb806d4 | ||
|
|
e5587c2b31 | ||
|
|
8128c79e81 | ||
|
|
ae5e2f5af2 | ||
|
|
a8b7b2a6b9 | ||
|
|
111bef7f06 | ||
|
|
6f1c6fbc1a | ||
|
|
31cd05191d | ||
|
|
efb8c4e36b | ||
|
|
ed71379aa1 | ||
|
|
b1c3fce49b | ||
|
|
e75c711c68 | ||
|
|
8d031f13c0 | ||
|
|
42a73f91fe | ||
|
|
c508fd9b13 | ||
|
|
c62949b98e | ||
|
|
0b640c785f | ||
|
|
fe17d51b03 | ||
|
|
565cf23d92 | ||
|
|
020e2e00c3 | ||
|
|
f4870050e0 | ||
|
|
f31e67b13d | ||
|
|
6f4d48cc6c | ||
|
|
fb34ebc674 | ||
|
|
6c867c34d7 | ||
|
|
70a56028b6 | ||
|
|
08327e2748 | ||
|
|
6d6b7f93f3 | ||
|
|
0d95cf8f5b | ||
|
|
471f9aec19 | ||
|
|
b5beb20e08 | ||
|
|
c8b6e2d5cd | ||
|
|
8ccb833421 | ||
|
|
93ec969005 | ||
|
|
5a42b59e91 | ||
|
|
266870fee8 | ||
|
|
ca5245bb9b | ||
|
|
84b3c952d9 | ||
|
|
6ef71a800f | ||
|
|
77778aeadf | ||
|
|
e8275b767e | ||
|
|
efdf7fb93e | ||
|
|
1fd2d26185 | ||
|
|
cac19d9e8b | ||
|
|
1d03df7d33 | ||
|
|
90ae5cd897 | ||
|
|
bb346c8d2b | ||
|
|
317932204e | ||
|
|
a48c7e1d83 | ||
|
|
336a4b6ac2 | ||
|
|
c9c7521f4f | ||
|
|
b455bc467d | ||
|
|
d1df057d08 | ||
|
|
062aebeba0 | ||
|
|
cff83a3fec | ||
|
|
b08d5267cb | ||
|
|
1ce37acde6 | ||
|
|
b7cd0c63e5 | ||
|
|
a866da84d9 | ||
|
|
6908f44aa2 | ||
|
|
181b8a2744 | ||
|
|
5a7bebc8b5 | ||
|
|
536d6ab302 | ||
|
|
99446ffa8a | ||
|
|
143973e031 | ||
|
|
bb5ad6e1ad | ||
|
|
6ae433249d | ||
|
|
5f5b824ade | ||
|
|
34abc8116c | ||
|
|
1efaaefd63 | ||
|
|
cfd635786a | ||
|
|
11e3faa8da | ||
|
|
1002990619 | ||
|
|
a94bbd8f3c | ||
|
|
46354da728 | ||
|
|
2c7c2b3fa6 | ||
|
|
938d866c1b | ||
|
|
42ff5415ba | ||
|
|
0e52be8427 | ||
|
|
5105614b83 | ||
|
|
77fc310a70 | ||
|
|
3541177d50 | ||
|
|
3c3ab57cf6 | ||
|
|
b73e7a9732 | ||
|
|
2047140dda | ||
|
|
b7e2f6c71a | ||
|
|
e1505d88b1 | ||
|
|
d04e5ea0ba | ||
|
|
3ebcd9bbe7 | ||
|
|
ab9ccc985a | ||
|
|
2188be219e | ||
|
|
a34792ce16 | ||
|
|
9160c9ae31 | ||
|
|
8e05fbf43f | ||
|
|
c59ff7416a | ||
|
|
c943072193 | ||
|
|
d39cc1fb5f | ||
|
|
005a0ec931 | ||
|
|
2cfdf29364 | ||
|
|
7254c71683 | ||
|
|
328534605f | ||
|
|
5eb7383613 | ||
|
|
75c2f4f394 | ||
|
|
6cbe506915 | ||
|
|
3d6ac8face | ||
|
|
14fe014fdf | ||
|
|
8f16eeb82a | ||
|
|
84e5072741 | ||
|
|
3f72da9d0b | ||
|
|
873e12c7c6 | ||
|
|
fecfb784ee | ||
|
|
5a7014a418 | ||
|
|
ceae7ef672 | ||
|
|
67b09911c1 | ||
|
|
3043bb70e1 | ||
|
|
ff99155f7d | ||
|
|
dc2dae5d87 | ||
|
|
76eeb5df42 | ||
|
|
185a985a85 | ||
|
|
834e8359ee | ||
|
|
503f70bba4 | ||
|
|
c3e45b662a | ||
|
|
4f16f87b4d | ||
|
|
9db1a5786d | ||
|
|
b9b150f147 | ||
|
|
0eecec75e2 | ||
|
|
581f3540b6 | ||
|
|
68dabd4d25 | ||
|
|
59b171db04 | ||
|
|
e97ebfb036 | ||
|
|
7380d03e00 | ||
|
|
5be558ef15 | ||
|
|
a7116cf353 | ||
|
|
3147f69435 | ||
|
|
0ef9b00dc7 | ||
|
|
bc05793cbe | ||
|
|
fd349f3020 | ||
|
|
df5a3c4da4 | ||
|
|
fe415a30c5 | ||
|
|
382208e59c | ||
|
|
ed02a24e0e | ||
|
|
e712d3458c | ||
|
|
685681dedc | ||
|
|
b412da6b8a | ||
|
|
03dc674c2d | ||
|
|
d0bcd3c6fb | ||
|
|
6649218e17 | ||
|
|
453fae4a5c | ||
|
|
3f32222360 | ||
|
|
479705e8ce | ||
|
|
e8fc9d5c9b | ||
|
|
38d3b200b5 | ||
|
|
a0551a9b8b | ||
|
|
be03fc273a | ||
|
|
ab496e9e34 | ||
|
|
81e7576150 | ||
|
|
2e69f15c20 | ||
|
|
0c7b35d0ab | ||
|
|
e12f10ae0e | ||
|
|
961b704a8e | ||
|
|
4ee8055f17 | ||
|
|
394335fa91 | ||
|
|
8125c05be0 | ||
|
|
059f01d23a | ||
|
|
5f80f9c814 | ||
|
|
0cee57f1d9 | ||
|
|
eab99b3bae | ||
|
|
a1e1509358 | ||
|
|
39d8b0c8c1 | ||
|
|
1356029bea | ||
|
|
b8af8a1af0 | ||
|
|
06c79cffae | ||
|
|
430fe09d6d | ||
|
|
5455c71f93 | ||
|
|
e2d6c692c4 | ||
|
|
501fab0bef | ||
|
|
1f1c337eb6 | ||
|
|
1c9131a5d7 | ||
|
|
fb758a32e1 | ||
|
|
d4c55557c3 | ||
|
|
0e39174c48 | ||
|
|
40401a6ed2 | ||
|
|
74ea36ac1c | ||
|
|
2d48674c5b | ||
|
|
42c7d887d3 | ||
|
|
8dcaaa9e09 | ||
|
|
267e926a38 | ||
|
|
11e6ffaae9 | ||
|
|
6a5dd6ed58 | ||
|
|
bcd68b937d | ||
|
|
9fab7ca06c | ||
|
|
4082a34599 | ||
|
|
f9debc72fa | ||
|
|
b363402f45 | ||
|
|
b0942fbc37 | ||
|
|
e3a12ea1a9 | ||
|
|
5dd6fb8547 | ||
|
|
8586d2e41c | ||
|
|
ba1e95fd43 | ||
|
|
e7538d7b26 | ||
|
|
2b7ebaceb2 | ||
|
|
3de3c3c143 | ||
|
|
0e0e325e5b | ||
|
|
e97106c770 | ||
|
|
50f8e0daa8 | ||
|
|
efcf846b2c | ||
|
|
afe3e13345 | ||
|
|
461c769ac4 | ||
|
|
0dc0d6c31c | ||
|
|
49d8ea4886 | ||
|
|
e66776a79f | ||
|
|
3036cbe552 | ||
|
|
393aa076bc | ||
|
|
0ef6f81843 | ||
|
|
339fe00dfe | ||
|
|
6382bb5333 | ||
|
|
41c60fb848 | ||
|
|
8df49bc823 | ||
|
|
ab4129061e | ||
|
|
021a670d86 | ||
|
|
fa27551428 | ||
|
|
538243dc3b | ||
|
|
d83c610002 | ||
|
|
196f7eb101 | ||
|
|
a1167d80b2 | ||
|
|
a765b13f52 | ||
|
|
45b3e9d03c | ||
|
|
5844166047 | ||
|
|
c6dcc1cd9e | ||
|
|
118ed4d2ab | ||
|
|
9fcd2c53c8 | ||
|
|
da80830bc5 | ||
|
|
b060056480 | ||
|
|
b71df3a3d4 | ||
|
|
128e9861f9 | ||
|
|
5d2cb27562 | ||
|
|
75e340bfbf | ||
|
|
b5744e5da4 | ||
|
|
7bad5770a1 | ||
|
|
809c1e46d8 | ||
|
|
c056093125 | ||
|
|
88ea1de80f | ||
|
|
7c8a835fa0 | ||
|
|
c6317888da | ||
|
|
9f1946ce70 | ||
|
|
32be2866f6 | ||
|
|
448d60ebbf | ||
|
|
c536370ab8 | ||
|
|
dccc6bf1af | ||
|
|
e331ebbf7d | ||
|
|
b1660fa468 | ||
|
|
4af743766d | ||
|
|
8f35f16c7e | ||
|
|
e08710e373 | ||
|
|
48b9af1298 | ||
|
|
cec59bc580 | ||
|
|
dff23f5a7b | ||
|
|
bcdff7f6f0 | ||
|
|
49dbf79e81 | ||
|
|
1610ff33ea | ||
|
|
b5fd121f26 | ||
|
|
c292ea7b94 | ||
|
|
de56f3a562 | ||
|
|
94e469c803 | ||
|
|
0d369baf9d | ||
|
|
0ab00df154 | ||
|
|
6167d95167 | ||
|
|
d582e42d4d | ||
|
|
6da14ae310 | ||
|
|
442f85e89e | ||
|
|
8cf8bf441e | ||
|
|
2cc25f64f2 | ||
|
|
035008cd9d | ||
|
|
e7f1ff14d6 | ||
|
|
c092d2ba70 | ||
|
|
d1783babd9 | ||
|
|
7519f364e5 | ||
|
|
7f9ab7dfce | ||
|
|
f9d87f28f6 | ||
|
|
dd7225fb2a | ||
|
|
134748a238 | ||
|
|
b57d890cbd | ||
|
|
50163e1385 | ||
|
|
fde4cd4d17 | ||
|
|
b6e164355b | ||
|
|
2e9af40bca | ||
|
|
2f45c3d2cd | ||
|
|
d763c0ab4e | ||
|
|
baa93d4b37 | ||
|
|
dd7659c643 | ||
|
|
aed6f89772 | ||
|
|
39af1ccc7c | ||
|
|
52f4ba767a | ||
|
|
0f853a2fee | ||
|
|
ca807afba2 | ||
|
|
e349e54cb8 | ||
|
|
39218ec0f7 | ||
|
|
908d9d86b3 | ||
|
|
92f3efeac2 | ||
|
|
a772b56cb6 | ||
|
|
bd6b9a58d3 | ||
|
|
873cacc15a | ||
|
|
55ff681976 | ||
|
|
618ecdb76c | ||
|
|
8af6ad7278 | ||
|
|
c07de8c4d3 | ||
|
|
e10fe8aaad | ||
|
|
feb45edf6d | ||
|
|
0a916543b3 | ||
|
|
2c53ff2980 | ||
|
|
b2fcc027a9 | ||
|
|
1dc78ba4b7 | ||
|
|
04e0910dea | ||
|
|
d19a6199ea | ||
|
|
e8925e47b0 |
65
.devcontainer/devcontainer.json
Normal file
65
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,65 @@
|
||||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
|
||||
// https://github.com/microsoft/vscode-dev-containers/tree/v0.209.6/containers/docker-existing-dockerfile
|
||||
{
|
||||
"name": "bun (Ubuntu)",
|
||||
|
||||
// Sets the run context to one level up instead of the .devcontainer folder.
|
||||
"context": "..",
|
||||
|
||||
// Update the 'dockerFile' property if you aren't using the standard 'Dockerfile' filename.
|
||||
"dockerFile": "../Dockerfile",
|
||||
|
||||
// Set *default* container specific settings.json values on container create.
|
||||
"settings": {
|
||||
"terminal.integrated.shell.linux": "/bin/zsh",
|
||||
"zigLanguageClient.path": "/home/ubuntu/zls/zig-out/bin/zls",
|
||||
"zig.zigPath": "/build/zig/zig",
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
|
||||
// Add the IDs of extensions you want installed when the container is created.
|
||||
"extensions": [
|
||||
"AugusteRame.zls-vscode",
|
||||
"ms-vscode.cpptools",
|
||||
"/home/ubuntu/vscode-zig.vsix",
|
||||
"vadimcn.vscode-lldb",
|
||||
"esbenp.prettier-vscode",
|
||||
"xaver.clang-format"
|
||||
],
|
||||
"postCreateCommand": "cd /build/bun; bash /build/getting-started.sh; zsh",
|
||||
|
||||
"build": {
|
||||
"target": "bun.devcontainer",
|
||||
"cacheFrom": ["bun.devcontainer:latest"],
|
||||
"args": {}
|
||||
},
|
||||
"runArgs": [
|
||||
"--ulimit",
|
||||
"memlock=-1:-1",
|
||||
"--ulimit",
|
||||
"nofile=65536:65536",
|
||||
"--cap-add=SYS_PTRACE",
|
||||
"--security-opt",
|
||||
"seccomp=unconfined"
|
||||
],
|
||||
"workspaceMount": "source=bun,target=/build/bun,type=volume",
|
||||
"workspaceFolder": "/build/bun",
|
||||
"mounts": [
|
||||
"source=bun-install,target=/home/ubuntu/.bun,type=volume",
|
||||
"source=bun-config,target=/home/ubuntu/.config,type=volume"
|
||||
],
|
||||
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
"forwardPorts": [3000, 8081, 8080]
|
||||
|
||||
// Uncomment the next line to run commands after the container is created - for example installing curl.
|
||||
// "postCreateCommand": "apt-get update && apt-get install -y curl",
|
||||
|
||||
// Uncomment when using a ptrace-based debugger like C++, Go, and Rust
|
||||
|
||||
// Uncomment to use the Docker CLI from inside the container. See https://aka.ms/vscode-remote/samples/docker-from-docker.
|
||||
// "mounts": [ "source=/var/run/docker.sock,target=/var/run/docker.sock,type=bind" ],
|
||||
|
||||
// Uncomment to connect as a non-root user if you've added one. See https://aka.ms/vscode-remote/containers/non-root.
|
||||
// "remoteUser": "vscode"
|
||||
}
|
||||
61
.devcontainer/limits.conf
Normal file
61
.devcontainer/limits.conf
Normal file
@@ -0,0 +1,61 @@
|
||||
# /etc/security/limits.conf
|
||||
#
|
||||
#Each line describes a limit for a user in the form:
|
||||
#
|
||||
#<domain> <type> <item> <value>
|
||||
#
|
||||
#Where:
|
||||
#<domain> can be:
|
||||
# - a user name
|
||||
# - a group name, with @group syntax
|
||||
# - the wildcard *, for default entry
|
||||
# - the wildcard %, can be also used with %group syntax,
|
||||
# for maxlogin limit
|
||||
# - NOTE: group and wildcard limits are not applied to root.
|
||||
# To apply a limit to the root user, <domain> must be
|
||||
# the literal username root.
|
||||
#
|
||||
#<type> can have the two values:
|
||||
# - "soft" for enforcing the soft limits
|
||||
# - "hard" for enforcing hard limits
|
||||
#
|
||||
#<item> can be one of the following:
|
||||
# - core - limits the core file size (KB)
|
||||
# - data - max data size (KB)
|
||||
# - fsize - maximum filesize (KB)
|
||||
# - memlock - max locked-in-memory address space (KB)
|
||||
# - nofile - max number of open file descriptors
|
||||
# - rss - max resident set size (KB)
|
||||
# - stack - max stack size (KB)
|
||||
# - cpu - max CPU time (MIN)
|
||||
# - nproc - max number of processes
|
||||
# - as - address space limit (KB)
|
||||
# - maxlogins - max number of logins for this user
|
||||
# - maxsyslogins - max number of logins on the system
|
||||
# - priority - the priority to run user process with
|
||||
# - locks - max number of file locks the user can hold
|
||||
# - sigpending - max number of pending signals
|
||||
# - msgqueue - max memory used by POSIX message queues (bytes)
|
||||
# - nice - max nice priority allowed to raise to values: [-20, 19]
|
||||
# - rtprio - max realtime priority
|
||||
# - chroot - change root to directory (Debian-specific)
|
||||
#
|
||||
#<domain> <type> <item> <value>
|
||||
#
|
||||
|
||||
* soft memlock 33554432
|
||||
* hard memlock 33554432
|
||||
* soft nofile 33554432
|
||||
* hard nofile 33554432
|
||||
|
||||
#* soft core 0
|
||||
#root hard core 100000
|
||||
#* hard rss 10000
|
||||
#@student hard nproc 20
|
||||
#@faculty soft nproc 20
|
||||
#@faculty hard nproc 50
|
||||
#ftp hard nproc 0
|
||||
#ftp - chroot /ftp
|
||||
#@student - maxlogins 4
|
||||
|
||||
# End of file
|
||||
445
.devcontainer/scripts/common-debian.sh
Normal file
445
.devcontainer/scripts/common-debian.sh
Normal file
@@ -0,0 +1,445 @@
|
||||
#!/usr/bin/env bash
|
||||
#-------------------------------------------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
|
||||
#-------------------------------------------------------------------------------------------------------------
|
||||
#
|
||||
# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/common.md
|
||||
# Maintainer: The VS Code and Codespaces Teams
|
||||
#
|
||||
# Syntax: ./common-debian.sh [install zsh flag] [username] [user UID] [user GID] [upgrade packages flag] [install Oh My Zsh! flag] [Add non-free packages]
|
||||
|
||||
set -e
|
||||
|
||||
INSTALL_ZSH=${1:-"true"}
|
||||
USERNAME=${2:-"automatic"}
|
||||
USER_UID=${3:-"automatic"}
|
||||
USER_GID=${4:-"automatic"}
|
||||
UPGRADE_PACKAGES=${5:-"true"}
|
||||
INSTALL_OH_MYS=${6:-"true"}
|
||||
ADD_NON_FREE_PACKAGES=${7:-"false"}
|
||||
SCRIPT_DIR="$(cd $(dirname "${BASH_SOURCE[0]}") && pwd)"
|
||||
MARKER_FILE="/usr/local/etc/vscode-dev-containers/common"
|
||||
|
||||
if [ "$(id -u)" -ne 0 ]; then
|
||||
echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Ensure that login shells get the correct path if the user updated the PATH using ENV.
|
||||
rm -f /etc/profile.d/00-restore-env.sh
|
||||
echo "export PATH=${PATH//$(sh -lc 'echo $PATH')/\$PATH}" >/etc/profile.d/00-restore-env.sh
|
||||
chmod +x /etc/profile.d/00-restore-env.sh
|
||||
|
||||
# If in automatic mode, determine if a user already exists, if not use vscode
|
||||
if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then
|
||||
USERNAME=""
|
||||
POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)")
|
||||
for CURRENT_USER in ${POSSIBLE_USERS[@]}; do
|
||||
if id -u ${CURRENT_USER} >/dev/null 2>&1; then
|
||||
USERNAME=${CURRENT_USER}
|
||||
break
|
||||
fi
|
||||
done
|
||||
if [ "${USERNAME}" = "" ]; then
|
||||
USERNAME=vscode
|
||||
fi
|
||||
elif [ "${USERNAME}" = "none" ]; then
|
||||
USERNAME=root
|
||||
USER_UID=0
|
||||
USER_GID=0
|
||||
fi
|
||||
|
||||
# Load markers to see which steps have already run
|
||||
if [ -f "${MARKER_FILE}" ]; then
|
||||
echo "Marker file found:"
|
||||
cat "${MARKER_FILE}"
|
||||
source "${MARKER_FILE}"
|
||||
fi
|
||||
|
||||
# Ensure apt is in non-interactive to avoid prompts
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Function to call apt-get if needed
|
||||
apt_get_update_if_needed() {
|
||||
if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then
|
||||
echo "Running apt-get update..."
|
||||
apt-get update
|
||||
else
|
||||
echo "Skipping apt-get update."
|
||||
fi
|
||||
}
|
||||
|
||||
# Run install apt-utils to avoid debconf warning then verify presence of other common developer tools and dependencies
|
||||
if [ "${PACKAGES_ALREADY_INSTALLED}" != "true" ]; then
|
||||
|
||||
package_list="apt-utils \
|
||||
openssh-client \
|
||||
gnupg2 \
|
||||
dirmngr \
|
||||
iproute2 \
|
||||
procps \
|
||||
lsof \
|
||||
htop \
|
||||
net-tools \
|
||||
psmisc \
|
||||
curl \
|
||||
wget \
|
||||
rsync \
|
||||
ca-certificates \
|
||||
unzip \
|
||||
zip \
|
||||
nano \
|
||||
vim-tiny \
|
||||
less \
|
||||
jq \
|
||||
lsb-release \
|
||||
apt-transport-https \
|
||||
dialog \
|
||||
libc6 \
|
||||
libgcc1 \
|
||||
libkrb5-3 \
|
||||
libgssapi-krb5-2 \
|
||||
libicu[0-9][0-9] \
|
||||
liblttng-ust0 \
|
||||
libstdc++6 \
|
||||
zlib1g \
|
||||
locales \
|
||||
sudo \
|
||||
ncdu \
|
||||
man-db \
|
||||
strace \
|
||||
manpages \
|
||||
manpages-dev \
|
||||
init-system-helpers"
|
||||
|
||||
# Needed for adding manpages-posix and manpages-posix-dev which are non-free packages in Debian
|
||||
if [ "${ADD_NON_FREE_PACKAGES}" = "true" ]; then
|
||||
# Bring in variables from /etc/os-release like VERSION_CODENAME
|
||||
. /etc/os-release
|
||||
sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i -E "s/deb-src http:\/\/(deb|httredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i -E "s/deb-src http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i "s/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i "s/deb-src http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list
|
||||
# Handle bullseye location for security https://www.debian.org/releases/bullseye/amd64/release-notes/ch-information.en.html
|
||||
sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list
|
||||
echo "Running apt-get update..."
|
||||
apt-get update
|
||||
package_list="${package_list} manpages-posix manpages-posix-dev"
|
||||
else
|
||||
apt_get_update_if_needed
|
||||
fi
|
||||
|
||||
# Install libssl1.1 if available
|
||||
if [[ ! -z $(apt-cache --names-only search ^libssl1.1$) ]]; then
|
||||
package_list="${package_list} libssl1.1"
|
||||
fi
|
||||
|
||||
# Install appropriate version of libssl1.0.x if available
|
||||
libssl_package=$(dpkg-query -f '${db:Status-Abbrev}\t${binary:Package}\n' -W 'libssl1\.0\.?' 2>&1 || echo '')
|
||||
if [ "$(echo "$LIlibssl_packageBSSL" | grep -o 'libssl1\.0\.[0-9]:' | uniq | sort | wc -l)" -eq 0 ]; then
|
||||
if [[ ! -z $(apt-cache --names-only search ^libssl1.0.2$) ]]; then
|
||||
# Debian 9
|
||||
package_list="${package_list} libssl1.0.2"
|
||||
elif [[ ! -z $(apt-cache --names-only search ^libssl1.0.0$) ]]; then
|
||||
# Ubuntu 18.04, 16.04, earlier
|
||||
package_list="${package_list} libssl1.0.0"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "Packages to verify are installed: ${package_list}"
|
||||
apt-get -y install --no-install-recommends ${package_list} 2> >(grep -v 'debconf: delaying package configuration, since apt-utils is not installed' >&2)
|
||||
|
||||
# Install git if not already installed (may be more recent than distro version)
|
||||
if ! type git >/dev/null 2>&1; then
|
||||
apt-get -y install --no-install-recommends git
|
||||
fi
|
||||
|
||||
PACKAGES_ALREADY_INSTALLED="true"
|
||||
fi
|
||||
|
||||
# Get to latest versions of all packages
|
||||
if [ "${UPGRADE_PACKAGES}" = "true" ]; then
|
||||
apt_get_update_if_needed
|
||||
apt-get -y upgrade --no-install-recommends
|
||||
apt-get autoremove -y
|
||||
fi
|
||||
|
||||
# Ensure at least the en_US.UTF-8 UTF-8 locale is available.
|
||||
# Common need for both applications and things like the agnoster ZSH theme.
|
||||
if [ "${LOCALE_ALREADY_SET}" != "true" ] && ! grep -o -E '^\s*en_US.UTF-8\s+UTF-8' /etc/locale.gen >/dev/null; then
|
||||
echo "en_US.UTF-8 UTF-8" >>/etc/locale.gen
|
||||
locale-gen
|
||||
LOCALE_ALREADY_SET="true"
|
||||
fi
|
||||
|
||||
# Create or update a non-root user to match UID/GID.
|
||||
group_name="${USERNAME}"
|
||||
if id -u ${USERNAME} >/dev/null 2>&1; then
|
||||
# User exists, update if needed
|
||||
if [ "${USER_GID}" != "automatic" ] && [ "$USER_GID" != "$(id -g $USERNAME)" ]; then
|
||||
group_name="$(id -gn $USERNAME)"
|
||||
groupmod --gid $USER_GID ${group_name}
|
||||
usermod --gid $USER_GID $USERNAME
|
||||
fi
|
||||
if [ "${USER_UID}" != "automatic" ] && [ "$USER_UID" != "$(id -u $USERNAME)" ]; then
|
||||
usermod --uid $USER_UID $USERNAME
|
||||
fi
|
||||
else
|
||||
# Create user
|
||||
if [ "${USER_GID}" = "automatic" ]; then
|
||||
groupadd $USERNAME
|
||||
else
|
||||
groupadd --gid $USER_GID $USERNAME
|
||||
fi
|
||||
if [ "${USER_UID}" = "automatic" ]; then
|
||||
useradd -s /bin/bash --gid $USERNAME -m $USERNAME
|
||||
else
|
||||
useradd -s /bin/bash --uid $USER_UID --gid $USERNAME -m $USERNAME
|
||||
fi
|
||||
fi
|
||||
|
||||
# Add add sudo support for non-root user
|
||||
if [ "${USERNAME}" != "root" ] && [ "${EXISTING_NON_ROOT_USER}" != "${USERNAME}" ]; then
|
||||
echo $USERNAME ALL=\(root\) NOPASSWD:ALL >/etc/sudoers.d/$USERNAME
|
||||
chmod 0440 /etc/sudoers.d/$USERNAME
|
||||
EXISTING_NON_ROOT_USER="${USERNAME}"
|
||||
fi
|
||||
|
||||
# ** Shell customization section **
|
||||
if [ "${USERNAME}" = "root" ]; then
|
||||
user_rc_path="/root"
|
||||
else
|
||||
user_rc_path="/home/${USERNAME}"
|
||||
fi
|
||||
|
||||
# Restore user .bashrc defaults from skeleton file if it doesn't exist or is empty
|
||||
if [ ! -f "${user_rc_path}/.bashrc" ] || [ ! -s "${user_rc_path}/.bashrc" ]; then
|
||||
cp /etc/skel/.bashrc "${user_rc_path}/.bashrc"
|
||||
fi
|
||||
|
||||
# Restore user .profile defaults from skeleton file if it doesn't exist or is empty
|
||||
if [ ! -f "${user_rc_path}/.profile" ] || [ ! -s "${user_rc_path}/.profile" ]; then
|
||||
cp /etc/skel/.profile "${user_rc_path}/.profile"
|
||||
fi
|
||||
|
||||
# .bashrc/.zshrc snippet
|
||||
rc_snippet="$(
|
||||
cat <<'EOF'
|
||||
if [ -z "${USER}" ]; then export USER=$(whoami); fi
|
||||
if [[ "${PATH}" != *"$HOME/.local/bin"* ]]; then export PATH="${PATH}:$HOME/.local/bin"; fi
|
||||
# Display optional first run image specific notice if configured and terminal is interactive
|
||||
if [ -t 1 ] && [[ "${TERM_PROGRAM}" = "vscode" || "${TERM_PROGRAM}" = "codespaces" ]] && [ ! -f "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed" ]; then
|
||||
if [ -f "/usr/local/etc/vscode-dev-containers/first-run-notice.txt" ]; then
|
||||
cat "/usr/local/etc/vscode-dev-containers/first-run-notice.txt"
|
||||
elif [ -f "/workspaces/.codespaces/shared/first-run-notice.txt" ]; then
|
||||
cat "/workspaces/.codespaces/shared/first-run-notice.txt"
|
||||
fi
|
||||
mkdir -p "$HOME/.config/vscode-dev-containers"
|
||||
# Mark first run notice as displayed after 10s to avoid problems with fast terminal refreshes hiding it
|
||||
((sleep 10s; touch "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed") &)
|
||||
fi
|
||||
# Set the default git editor if not already set
|
||||
if [ -z "$(git config --get core.editor)" ] && [ -z "${GIT_EDITOR}" ]; then
|
||||
if [ "${TERM_PROGRAM}" = "vscode" ]; then
|
||||
if [[ -n $(command -v code-insiders) && -z $(command -v code) ]]; then
|
||||
export GIT_EDITOR="code-insiders --wait"
|
||||
else
|
||||
export GIT_EDITOR="code --wait"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
EOF
|
||||
)"
|
||||
|
||||
# code shim, it fallbacks to code-insiders if code is not available
|
||||
cat <<'EOF' >/usr/local/bin/code
|
||||
#!/bin/sh
|
||||
get_in_path_except_current() {
|
||||
which -a "$1" | grep -A1 "$0" | grep -v "$0"
|
||||
}
|
||||
code="$(get_in_path_except_current code)"
|
||||
if [ -n "$code" ]; then
|
||||
exec "$code" "$@"
|
||||
elif [ "$(command -v code-insiders)" ]; then
|
||||
exec code-insiders "$@"
|
||||
else
|
||||
echo "code or code-insiders is not installed" >&2
|
||||
exit 127
|
||||
fi
|
||||
EOF
|
||||
chmod +x /usr/local/bin/code
|
||||
|
||||
# systemctl shim - tells people to use 'service' if systemd is not running
|
||||
cat <<'EOF' >/usr/local/bin/systemctl
|
||||
#!/bin/sh
|
||||
set -e
|
||||
if [ -d "/run/systemd/system" ]; then
|
||||
exec /bin/systemctl/systemctl "$@"
|
||||
else
|
||||
echo '\n"systemd" is not running in this container due to its overhead.\nUse the "service" command to start services intead. e.g.: \n\nservice --status-all'
|
||||
fi
|
||||
EOF
|
||||
chmod +x /usr/local/bin/systemctl
|
||||
|
||||
# Codespaces bash and OMZ themes - partly inspired by https://github.com/ohmyzsh/ohmyzsh/blob/master/themes/robbyrussell.zsh-theme
|
||||
codespaces_bash="$(
|
||||
cat \
|
||||
<<'EOF'
|
||||
# Codespaces bash prompt theme
|
||||
__bash_prompt() {
|
||||
local userpart='`export XIT=$? \
|
||||
&& [ ! -z "${GITHUB_USER}" ] && echo -n "\[\033[0;32m\]@${GITHUB_USER} " || echo -n "\[\033[0;32m\]\u " \
|
||||
&& [ "$XIT" -ne "0" ] && echo -n "\[\033[1;31m\]➜" || echo -n "\[\033[0m\]➜"`'
|
||||
local gitbranch='`\
|
||||
if [ "$(git config --get codespaces-theme.hide-status 2>/dev/null)" != 1 ]; then \
|
||||
export BRANCH=$(git symbolic-ref --short HEAD 2>/dev/null || git rev-parse --short HEAD 2>/dev/null); \
|
||||
if [ "${BRANCH}" != "" ]; then \
|
||||
echo -n "\[\033[0;36m\](\[\033[1;31m\]${BRANCH}" \
|
||||
&& if git ls-files --error-unmatch -m --directory --no-empty-directory -o --exclude-standard ":/*" > /dev/null 2>&1; then \
|
||||
echo -n " \[\033[1;33m\]✗"; \
|
||||
fi \
|
||||
&& echo -n "\[\033[0;36m\]) "; \
|
||||
fi; \
|
||||
fi`'
|
||||
local lightblue='\[\033[1;34m\]'
|
||||
local removecolor='\[\033[0m\]'
|
||||
PS1="${userpart} ${lightblue}\w ${gitbranch}${removecolor}\$ "
|
||||
unset -f __bash_prompt
|
||||
}
|
||||
__bash_prompt
|
||||
EOF
|
||||
)"
|
||||
|
||||
codespaces_zsh="$(
|
||||
cat \
|
||||
<<'EOF'
|
||||
# Codespaces zsh prompt theme
|
||||
__zsh_prompt() {
|
||||
local prompt_username
|
||||
if [ ! -z "${GITHUB_USER}" ]; then
|
||||
prompt_username="@${GITHUB_USER}"
|
||||
else
|
||||
prompt_username="%n"
|
||||
fi
|
||||
PROMPT="%{$fg[green]%}${prompt_username} %(?:%{$reset_color%}➜ :%{$fg_bold[red]%}➜ )" # User/exit code arrow
|
||||
PROMPT+='%{$fg_bold[blue]%}%(5~|%-1~/…/%3~|%4~)%{$reset_color%} ' # cwd
|
||||
PROMPT+='$([ "$(git config --get codespaces-theme.hide-status 2>/dev/null)" != 1 ] && git_prompt_info)' # Git status
|
||||
PROMPT+='%{$fg[white]%}$ %{$reset_color%}'
|
||||
unset -f __zsh_prompt
|
||||
}
|
||||
ZSH_THEME_GIT_PROMPT_PREFIX="%{$fg_bold[cyan]%}(%{$fg_bold[red]%}"
|
||||
ZSH_THEME_GIT_PROMPT_SUFFIX="%{$reset_color%} "
|
||||
ZSH_THEME_GIT_PROMPT_DIRTY=" %{$fg_bold[yellow]%}✗%{$fg_bold[cyan]%})"
|
||||
ZSH_THEME_GIT_PROMPT_CLEAN="%{$fg_bold[cyan]%})"
|
||||
__zsh_prompt
|
||||
EOF
|
||||
)"
|
||||
|
||||
# Add RC snippet and custom bash prompt
|
||||
if [ "${RC_SNIPPET_ALREADY_ADDED}" != "true" ]; then
|
||||
echo "${rc_snippet}" >>/etc/bash.bashrc
|
||||
echo "${codespaces_bash}" >>"${user_rc_path}/.bashrc"
|
||||
echo 'export PROMPT_DIRTRIM=4' >>"${user_rc_path}/.bashrc"
|
||||
if [ "${USERNAME}" != "root" ]; then
|
||||
echo "${codespaces_bash}" >>"/root/.bashrc"
|
||||
echo 'export PROMPT_DIRTRIM=4' >>"/root/.bashrc"
|
||||
fi
|
||||
chown ${USERNAME}:${group_name} "${user_rc_path}/.bashrc"
|
||||
RC_SNIPPET_ALREADY_ADDED="true"
|
||||
fi
|
||||
|
||||
# Optionally install and configure zsh and Oh My Zsh!
|
||||
if [ "${INSTALL_ZSH}" = "true" ]; then
|
||||
if ! type zsh >/dev/null 2>&1; then
|
||||
apt_get_update_if_needed
|
||||
apt-get install -y zsh
|
||||
fi
|
||||
if [ "${ZSH_ALREADY_INSTALLED}" != "true" ]; then
|
||||
echo "${rc_snippet}" >>/etc/zsh/zshrc
|
||||
ZSH_ALREADY_INSTALLED="true"
|
||||
fi
|
||||
|
||||
# Adapted, simplified inline Oh My Zsh! install steps that adds, defaults to a codespaces theme.
|
||||
# See https://github.com/ohmyzsh/ohmyzsh/blob/master/tools/install.sh for official script.
|
||||
oh_my_install_dir="${user_rc_path}/.oh-my-zsh"
|
||||
if [ ! -d "${oh_my_install_dir}" ] && [ "${INSTALL_OH_MYS}" = "true" ]; then
|
||||
template_path="${oh_my_install_dir}/templates/zshrc.zsh-template"
|
||||
user_rc_file="${user_rc_path}/.zshrc"
|
||||
umask g-w,o-w
|
||||
mkdir -p ${oh_my_install_dir}
|
||||
git clone --depth=1 \
|
||||
-c core.eol=lf \
|
||||
-c core.autocrlf=false \
|
||||
-c fsck.zeroPaddedFilemode=ignore \
|
||||
-c fetch.fsck.zeroPaddedFilemode=ignore \
|
||||
-c receive.fsck.zeroPaddedFilemode=ignore \
|
||||
"https://github.com/ohmyzsh/ohmyzsh" "${oh_my_install_dir}" 2>&1
|
||||
echo -e "$(cat "${template_path}")\nDISABLE_AUTO_UPDATE=true\nDISABLE_UPDATE_PROMPT=true" >${user_rc_file}
|
||||
sed -i -e 's/ZSH_THEME=.*/ZSH_THEME="codespaces"/g' ${user_rc_file}
|
||||
|
||||
mkdir -p ${oh_my_install_dir}/custom/themes
|
||||
echo "${codespaces_zsh}" >"${oh_my_install_dir}/custom/themes/codespaces.zsh-theme"
|
||||
# Shrink git while still enabling updates
|
||||
cd "${oh_my_install_dir}"
|
||||
git repack -a -d -f --depth=1 --window=1
|
||||
# Copy to non-root user if one is specified
|
||||
if [ "${USERNAME}" != "root" ]; then
|
||||
cp -rf "${user_rc_file}" "${oh_my_install_dir}" /root
|
||||
chown -R ${USERNAME}:${group_name} "${user_rc_path}"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Persist image metadata info, script if meta.env found in same directory
|
||||
meta_info_script="$(
|
||||
cat <<'EOF'
|
||||
#!/bin/sh
|
||||
. /usr/local/etc/vscode-dev-containers/meta.env
|
||||
# Minimal output
|
||||
if [ "$1" = "version" ] || [ "$1" = "image-version" ]; then
|
||||
echo "${VERSION}"
|
||||
exit 0
|
||||
elif [ "$1" = "release" ]; then
|
||||
echo "${GIT_REPOSITORY_RELEASE}"
|
||||
exit 0
|
||||
elif [ "$1" = "content" ] || [ "$1" = "content-url" ] || [ "$1" = "contents" ] || [ "$1" = "contents-url" ]; then
|
||||
echo "${CONTENTS_URL}"
|
||||
exit 0
|
||||
fi
|
||||
#Full output
|
||||
echo
|
||||
echo "Development container image information"
|
||||
echo
|
||||
if [ ! -z "${VERSION}" ]; then echo "- Image version: ${VERSION}"; fi
|
||||
if [ ! -z "${DEFINITION_ID}" ]; then echo "- Definition ID: ${DEFINITION_ID}"; fi
|
||||
if [ ! -z "${VARIANT}" ]; then echo "- Variant: ${VARIANT}"; fi
|
||||
if [ ! -z "${GIT_REPOSITORY}" ]; then echo "- Source code repository: ${GIT_REPOSITORY}"; fi
|
||||
if [ ! -z "${GIT_REPOSITORY_RELEASE}" ]; then echo "- Source code release/branch: ${GIT_REPOSITORY_RELEASE}"; fi
|
||||
if [ ! -z "${BUILD_TIMESTAMP}" ]; then echo "- Timestamp: ${BUILD_TIMESTAMP}"; fi
|
||||
if [ ! -z "${CONTENTS_URL}" ]; then echo && echo "More info: ${CONTENTS_URL}"; fi
|
||||
echo
|
||||
EOF
|
||||
)"
|
||||
if [ -f "${SCRIPT_DIR}/meta.env" ]; then
|
||||
mkdir -p /usr/local/etc/vscode-dev-containers/
|
||||
cp -f "${SCRIPT_DIR}/meta.env" /usr/local/etc/vscode-dev-containers/meta.env
|
||||
echo "${meta_info_script}" >/usr/local/bin/devcontainer-info
|
||||
chmod +x /usr/local/bin/devcontainer-info
|
||||
fi
|
||||
|
||||
# Write marker file
|
||||
mkdir -p "$(dirname "${MARKER_FILE}")"
|
||||
echo -e "\
|
||||
PACKAGES_ALREADY_INSTALLED=${PACKAGES_ALREADY_INSTALLED}\n\
|
||||
LOCALE_ALREADY_SET=${LOCALE_ALREADY_SET}\n\
|
||||
EXISTING_NON_ROOT_USER=${EXISTING_NON_ROOT_USER}\n\
|
||||
RC_SNIPPET_ALREADY_ADDED=${RC_SNIPPET_ALREADY_ADDED}\n\
|
||||
ZSH_ALREADY_INSTALLED=${ZSH_ALREADY_INSTALLED}" >"${MARKER_FILE}"
|
||||
|
||||
echo "Done!"
|
||||
16
.devcontainer/scripts/getting-started.sh
Normal file
16
.devcontainer/scripts/getting-started.sh
Normal file
@@ -0,0 +1,16 @@
|
||||
#!/bin/bash
|
||||
|
||||
echo "To get started, login to GitHub and clone bun's GitHub repo into /workspaces/bun"
|
||||
echo "Make sure to login with a Personal Access Token"
|
||||
echo "# First time setup"
|
||||
echo "gh auth login"
|
||||
echo "gh repo clone Jarred-Sumner/bun . -- --depth=1 --progress -j8"
|
||||
echo ""
|
||||
echo "# Compile bun dependencies (zig is already compiled)"
|
||||
echo "make devcontainer"
|
||||
echo ""
|
||||
echo "# Build bun for development"
|
||||
echo "make dev"
|
||||
echo ""
|
||||
echo "# Run bun"
|
||||
echo "bun-debug"
|
||||
185
.devcontainer/scripts/github.sh
Normal file
185
.devcontainer/scripts/github.sh
Normal file
@@ -0,0 +1,185 @@
|
||||
#!/usr/bin/env bash
|
||||
#-------------------------------------------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
|
||||
#-------------------------------------------------------------------------------------------------------------
|
||||
#
|
||||
# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/github.md
|
||||
# Maintainer: The VS Code and Codespaces Teams
|
||||
#
|
||||
# Syntax: ./github-debian.sh [version]
|
||||
|
||||
CLI_VERSION=${1:-"latest"}
|
||||
|
||||
GITHUB_CLI_ARCHIVE_GPG_KEY=C99B11DEB97541F0
|
||||
GPG_KEY_SERVERS="keyserver hkp://keyserver.ubuntu.com:80
|
||||
keyserver hkps://keys.openpgp.org
|
||||
keyserver hkp://keyserver.pgp.com"
|
||||
|
||||
set -e
|
||||
|
||||
if [ "$(id -u)" -ne 0 ]; then
|
||||
echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get central common setting
|
||||
get_common_setting() {
|
||||
if [ "${common_settings_file_loaded}" != "true" ]; then
|
||||
curl -sfL "https://aka.ms/vscode-dev-containers/script-library/settings.env" -o /tmp/vsdc-settings.env 2>/dev/null || echo "Could not download settings file. Skipping."
|
||||
common_settings_file_loaded=true
|
||||
fi
|
||||
if [ -f "/tmp/vsdc-settings.env" ]; then
|
||||
local multi_line=""
|
||||
if [ "$2" = "true" ]; then multi_line="-z"; fi
|
||||
local result="$(grep ${multi_line} -oP "$1=\"?\K[^\"]+" /tmp/vsdc-settings.env | tr -d '\0')"
|
||||
if [ ! -z "${result}" ]; then declare -g $1="${result}"; fi
|
||||
fi
|
||||
echo "$1=${!1}"
|
||||
}
|
||||
|
||||
# Import the specified key in a variable name passed in as
|
||||
receive_gpg_keys() {
|
||||
get_common_setting $1
|
||||
local keys=${!1}
|
||||
get_common_setting GPG_KEY_SERVERS true
|
||||
|
||||
# Use a temporary locaiton for gpg keys to avoid polluting image
|
||||
export GNUPGHOME="/tmp/tmp-gnupg"
|
||||
mkdir -p ${GNUPGHOME}
|
||||
chmod 700 ${GNUPGHOME}
|
||||
echo -e "disable-ipv6\n${GPG_KEY_SERVERS}" >${GNUPGHOME}/dirmngr.conf
|
||||
# GPG key download sometimes fails for some reason and retrying fixes it.
|
||||
local retry_count=0
|
||||
local gpg_ok="false"
|
||||
set +e
|
||||
until [ "${gpg_ok}" = "true" ] || [ "${retry_count}" -eq "5" ]; do
|
||||
echo "(*) Downloading GPG key..."
|
||||
(echo "${keys}" | xargs -n 1 gpg --recv-keys) 2>&1 && gpg_ok="true"
|
||||
if [ "${gpg_ok}" != "true" ]; then
|
||||
echo "(*) Failed getting key, retring in 10s..."
|
||||
((retry_count++))
|
||||
sleep 10s
|
||||
fi
|
||||
done
|
||||
set -e
|
||||
if [ "${gpg_ok}" = "false" ]; then
|
||||
echo "(!) Failed to get gpg key."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Figure out correct version of a three part version number is not passed
|
||||
find_version_from_git_tags() {
|
||||
local variable_name=$1
|
||||
local requested_version=${!variable_name}
|
||||
if [ "${requested_version}" = "none" ]; then return; fi
|
||||
local repository=$2
|
||||
local prefix=${3:-"tags/v"}
|
||||
local separator=${4:-"."}
|
||||
local last_part_optional=${5:-"false"}
|
||||
if [ "$(echo "${requested_version}" | grep -o "." | wc -l)" != "2" ]; then
|
||||
local escaped_separator=${separator//./\\.}
|
||||
local last_part
|
||||
if [ "${last_part_optional}" = "true" ]; then
|
||||
last_part="(${escaped_separator}[0-9]+)?"
|
||||
else
|
||||
last_part="${escaped_separator}[0-9]+"
|
||||
fi
|
||||
local regex="${prefix}\\K[0-9]+${escaped_separator}[0-9]+${last_part}$"
|
||||
local version_list="$(git ls-remote --tags ${repository} | grep -oP "${regex}" | tr -d ' ' | tr "${separator}" "." | sort -rV)"
|
||||
if [ "${requested_version}" = "latest" ] || [ "${requested_version}" = "current" ] || [ "${requested_version}" = "lts" ]; then
|
||||
declare -g ${variable_name}="$(echo "${version_list}" | head -n 1)"
|
||||
else
|
||||
set +e
|
||||
declare -g ${variable_name}="$(echo "${version_list}" | grep -E -m 1 "^${requested_version//./\\.}([\\.\\s]|$)")"
|
||||
set -e
|
||||
fi
|
||||
fi
|
||||
if [ -z "${!variable_name}" ] || ! echo "${version_list}" | grep "^${!variable_name//./\\.}$" >/dev/null 2>&1; then
|
||||
echo -e "Invalid ${variable_name} value: ${requested_version}\nValid values:\n${version_list}" >&2
|
||||
exit 1
|
||||
fi
|
||||
echo "${variable_name}=${!variable_name}"
|
||||
}
|
||||
|
||||
# Import the specified key in a variable name passed in as
|
||||
receive_gpg_keys() {
|
||||
get_common_setting $1
|
||||
local keys=${!1}
|
||||
get_common_setting GPG_KEY_SERVERS true
|
||||
local keyring_args=""
|
||||
if [ ! -z "$2" ]; then
|
||||
keyring_args="--no-default-keyring --keyring $2"
|
||||
fi
|
||||
|
||||
# Use a temporary locaiton for gpg keys to avoid polluting image
|
||||
export GNUPGHOME="/tmp/tmp-gnupg"
|
||||
mkdir -p ${GNUPGHOME}
|
||||
chmod 700 ${GNUPGHOME}
|
||||
echo -e "disable-ipv6\n${GPG_KEY_SERVERS}" >${GNUPGHOME}/dirmngr.conf
|
||||
# GPG key download sometimes fails for some reason and retrying fixes it.
|
||||
local retry_count=0
|
||||
local gpg_ok="false"
|
||||
set +e
|
||||
until [ "${gpg_ok}" = "true" ] || [ "${retry_count}" -eq "5" ]; do
|
||||
echo "(*) Downloading GPG key..."
|
||||
(echo "${keys}" | xargs -n 1 gpg -q ${keyring_args} --recv-keys) 2>&1 && gpg_ok="true"
|
||||
if [ "${gpg_ok}" != "true" ]; then
|
||||
echo "(*) Failed getting key, retring in 10s..."
|
||||
((retry_count++))
|
||||
sleep 10s
|
||||
fi
|
||||
done
|
||||
set -e
|
||||
if [ "${gpg_ok}" = "false" ]; then
|
||||
echo "(!) Failed to get gpg key."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to run apt-get if needed
|
||||
apt_get_update_if_needed() {
|
||||
if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then
|
||||
echo "Running apt-get update..."
|
||||
apt-get update
|
||||
else
|
||||
echo "Skipping apt-get update."
|
||||
fi
|
||||
}
|
||||
|
||||
# Checks if packages are installed and installs them if not
|
||||
check_packages() {
|
||||
if ! dpkg -s "$@" >/dev/null 2>&1; then
|
||||
apt_get_update_if_needed
|
||||
apt-get -y install --no-install-recommends "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Install curl, apt-transport-https, curl, gpg, or dirmngr, git if missing
|
||||
check_packages curl ca-certificates apt-transport-https dirmngr gnupg2
|
||||
if ! type git >/dev/null 2>&1; then
|
||||
apt_get_update_if_needed
|
||||
apt-get -y install --no-install-recommends git
|
||||
fi
|
||||
|
||||
# Soft version matching
|
||||
if [ "${CLI_VERSION}" != "latest" ] && [ "${CLI_VERSION}" != "lts" ] && [ "${CLI_VERSION}" != "stable" ]; then
|
||||
find_version_from_git_tags CLI_VERSION "https://github.com/cli/cli"
|
||||
version_suffix="=${CLI_VERSION}"
|
||||
else
|
||||
version_suffix=""
|
||||
fi
|
||||
|
||||
# Install the GitHub CLI
|
||||
echo "Downloading github CLI..."
|
||||
# Import key safely (new method rather than deprecated apt-key approach) and install
|
||||
. /etc/os-release
|
||||
receive_gpg_keys GITHUB_CLI_ARCHIVE_GPG_KEY /usr/share/keyrings/githubcli-archive-keyring.gpg
|
||||
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages ${VERSION_CODENAME} main" >/etc/apt/sources.list.d/github-cli.list
|
||||
apt-get update
|
||||
apt-get -y install "gh${version_suffix}"
|
||||
rm -rf "/tmp/gh/gnupg"
|
||||
echo "Done!"
|
||||
7
.devcontainer/scripts/nice.sh
Normal file
7
.devcontainer/scripts/nice.sh
Normal file
@@ -0,0 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
chsh -s $(which zsh)
|
||||
sh -c "$(curl -fsSL https://starship.rs/install.sh) -- --platform linux_musl"
|
||||
echo "eval \"$(starship init zsh)\"" >>~/.zshrc
|
||||
|
||||
curl https://github.com/Jarred-Sumner/vscode-zig/releases/download/fork-v1/zig-0.2.5.vsix >/home/ubuntu/vscode-zig.vsix
|
||||
7
.devcontainer/scripts/zig-env.sh
Normal file
7
.devcontainer/scripts/zig-env.sh
Normal file
@@ -0,0 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
curl -L https://github.com/Jarred-Sumner/vscode-zig/releases/download/fork-v1/zig-0.2.5.vsix >/home/ubuntu/vscode-zig.vsix
|
||||
git clone https://github.com/zigtools/zls /home/ubuntu/zls
|
||||
cd /home/ubuntu/zls
|
||||
git submodule update --init --recursive --progress --depth=1
|
||||
zig build -Drelease-fast
|
||||
9
.devcontainer/workspace.code-workspace
Normal file
9
.devcontainer/workspace.code-workspace
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"folders": [
|
||||
{
|
||||
// Source code
|
||||
"name": "bun",
|
||||
"path": "bun"
|
||||
},
|
||||
]
|
||||
}
|
||||
9
.devcontainer/zls.json
Normal file
9
.devcontainer/zls.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"zig_exe_path": "/build/zig/zig",
|
||||
"enable_snippets": true,
|
||||
"warn_style": false,
|
||||
"enable_semantic_tokens": true,
|
||||
"operator_completions": true,
|
||||
"include_at_in_builtins": false,
|
||||
"max_detail_length": 1048576
|
||||
}
|
||||
15
.docker/build-base-images.sh
Normal file
15
.docker/build-base-images.sh
Normal file
@@ -0,0 +1,15 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
export DOCKER_BUILDKIT=1
|
||||
|
||||
docker login --username bunbunbunbun
|
||||
|
||||
docker build -f Dockerfile.base -t bunbunbunbun/bun-test-base --target bun-test-base . --platform=linux/$BUILDARCH --build-arg BUILDARCH=$BUILDARCH
|
||||
docker build -f Dockerfile.base -t bunbunbunbun/bun-base-with-zig-and-webkit --target bun-base-with-zig-and-webkit . --platform=linux/$BUILDARCH --build-arg BUILDARCH=$BUILDARCH
|
||||
docker build -f Dockerfile.base -t bunbunbunbun/bun-base --target bun-base --platform=linux/$BUILDARCH . --build-arg BUILDARCH=$BUILDARCH
|
||||
|
||||
docker push bunbunbunbun/bun-test-base:latest
|
||||
docker push bunbunbunbun/bun-base-with-zig-and-webkit:latest
|
||||
docker push bunbunbunbun/bun-base:latest
|
||||
24
.docker/build-base.sh
Normal file
24
.docker/build-base.sh
Normal file
@@ -0,0 +1,24 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
export DOCKER_BUILDKIT=1
|
||||
|
||||
docker buildx build \
|
||||
-t bunbunbunbun/bun-test-base:latest -f Dockerfile.base \
|
||||
--target bun-test-base \
|
||||
--platform=linux/$BUILDARCH --build-arg BUILDARCH=$BUILDARCH .
|
||||
docker buildx build \
|
||||
--target bun-base \
|
||||
-f Dockerfile.base \
|
||||
-t bunbunbunbun/bun-base:latest --platform=linux/$BUILDARCH \
|
||||
--build-arg BUILDARCH=$BUILDARCH .
|
||||
docker buildx build \
|
||||
-t bunbunbunbun/bun-base-with-zig-and-webkit:latest \
|
||||
-f Dockerfile.base \
|
||||
--target bun-base-with-zig-and-webkit \
|
||||
--platform=linux/$BUILDARCH --build-arg BUILDARCH=$BUILDARCH .
|
||||
|
||||
docker push bunbunbunbun/bun-test-base:latest
|
||||
docker push bunbunbunbun/bun-base:latest
|
||||
docker push bunbunbunbun/bun-base-with-zig-and-webkit:latest
|
||||
1539
.docker/chrome.json
Normal file
1539
.docker/chrome.json
Normal file
File diff suppressed because it is too large
Load Diff
14
.docker/chromium.pref
Normal file
14
.docker/chromium.pref
Normal file
@@ -0,0 +1,14 @@
|
||||
# Note: 2 blank lines are required between entries
|
||||
Package: *
|
||||
Pin: release a=eoan
|
||||
Pin-Priority: 500
|
||||
|
||||
Package: *
|
||||
Pin: origin "ftp.debian.org"
|
||||
Pin-Priority: 300
|
||||
|
||||
# Pattern includes 'chromium', 'chromium-browser' and similarly
|
||||
# named dependencies:
|
||||
Package: chromium*
|
||||
Pin: origin "ftp.debian.org"
|
||||
Pin-Priority: 700
|
||||
8
.docker/copy-bun-binary.sh
Normal file
8
.docker/copy-bun-binary.sh
Normal file
@@ -0,0 +1,8 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
name=$(openssl rand -hex 12)
|
||||
id=$(docker create --name=bun-binary-$name $CONTAINER_TAG)
|
||||
docker container cp bun-binary-$name:$BUN_RELEASE_DIR bun-binary
|
||||
echo -e "bun-binary-$name"
|
||||
3
.docker/debian.list
Normal file
3
.docker/debian.list
Normal file
@@ -0,0 +1,3 @@
|
||||
deb http://deb.debian.org/debian buster main
|
||||
deb http://deb.debian.org/debian buster-updates main
|
||||
deb http://deb.debian.org/debian-security buster/updates main
|
||||
34
.docker/dockerfile-common.sh
Normal file
34
.docker/dockerfile-common.sh
Normal file
@@ -0,0 +1,34 @@
|
||||
export DOCKER_BUILDKIT=1
|
||||
|
||||
export BUILDKIT_ARCH=$(uname -m)
|
||||
export ARCH=${BUILDKIT_ARCH}
|
||||
|
||||
if [ "$BUILDKIT_ARCH" == "amd64" ]; then
|
||||
export BUILDKIT_ARCH="amd64"
|
||||
export ARCH=x64
|
||||
fi
|
||||
|
||||
if [ "$BUILDKIT_ARCH" == "x86_64" ]; then
|
||||
export BUILDKIT_ARCH="amd64"
|
||||
export ARCH=x64
|
||||
fi
|
||||
|
||||
if [ "$BUILDKIT_ARCH" == "arm64" ]; then
|
||||
export BUILDKIT_ARCH="arm64"
|
||||
export ARCH=aarch64
|
||||
fi
|
||||
|
||||
if [ "$BUILDKIT_ARCH" == "aarch64" ]; then
|
||||
export BUILDKIT_ARCH="arm64"
|
||||
export ARCH=aarch64
|
||||
fi
|
||||
|
||||
if [ "$BUILDKIT_ARCH" == "armv7l" ]; then
|
||||
echo "Unsupported platform: $BUILDKIT_ARCH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
export BUILD_ID=$(cat build-id)
|
||||
export CONTAINER_NAME=bun-linux-$ARCH
|
||||
export DEBUG_CONTAINER_NAME=debug-bun-linux-$ARCH
|
||||
export TEMP=/tmp/bun-0.0.$BUILD_ID
|
||||
11
.docker/pull.sh
Normal file
11
.docker/pull.sh
Normal file
@@ -0,0 +1,11 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
docker pull bunbunbunbun/bun-test-base:latest --platform=linux/amd64
|
||||
docker pull bunbunbunbun/bun-base:latest --platform=linux/amd64
|
||||
docker pull bunbunbunbun/bun-base-with-zig-and-webkit:latest --platform=linux/amd64
|
||||
|
||||
docker tag bunbunbunbun/bun-test-base:latest bun-base:latest
|
||||
docker tag bunbunbunbun/bun-base:latest bun-base:latest
|
||||
docker tag bunbunbunbun/bun-base-with-zig-and-webkit:latest bun-base-with-zig-and-webkit:latest
|
||||
47
.docker/run-dockerfile.sh
Normal file
47
.docker/run-dockerfile.sh
Normal file
@@ -0,0 +1,47 @@
|
||||
#!/bin/bash
|
||||
|
||||
source "dockerfile-common.sh"
|
||||
|
||||
export $CONTAINER_NAME=$CONTAINER_NAME-local
|
||||
|
||||
rm -rf $TEMP
|
||||
mkdir -p $TEMP
|
||||
|
||||
docker build . --target release --progress=plain -t $CONTAINER_NAME:latest --build-arg BUILDKIT_INLINE_CACHE=1 --platform=linux/$BUILDKIT_ARCH --cache-from $CONTAINER_NAME:latest
|
||||
|
||||
if (($?)); then
|
||||
echo "Failed to build container"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
id=$(docker create $CONTAINER_NAME:latest)
|
||||
docker cp $id:/home/ubuntu/bun-release $TEMP/$CONTAINER_NAME
|
||||
if (($?)); then
|
||||
echo "Failed to cp container"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cd $TEMP
|
||||
mkdir -p $TEMP/$CONTAINER_NAME $TEMP/$DEBUG_CONTAINER_NAME
|
||||
mv $CONTAINER_NAME/bun-profile $DEBUG_CONTAINER_NAME/bun
|
||||
zip -r $CONTAINER_NAME.zip $CONTAINER_NAME
|
||||
zip -r $DEBUG_CONTAINER_NAME.zip $DEBUG_CONTAINER_NAME
|
||||
docker rm -v $id
|
||||
abs=$(realpath $TEMP/$CONTAINER_NAME.zip)
|
||||
debug_abs=$(realpath $TEMP/$DEBUG_CONTAINER_NAME.zip)
|
||||
|
||||
case $(uname -s) in
|
||||
"Linux") target="linux" ;;
|
||||
*) target="other" ;;
|
||||
esac
|
||||
|
||||
if [ "$target" = "linux" ]; then
|
||||
if command -v bun --version >/dev/null; then
|
||||
cp $TEMP/$CONTAINER_NAME/bun $(which bun)
|
||||
cp $TEMP/$DEBUG_CONTAINER_NAME/bun $(which bun-profile)
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "Saved to:"
|
||||
echo $debug_abs
|
||||
echo $abs
|
||||
9
.docker/run-test.sh
Executable file
9
.docker/run-test.sh
Executable file
@@ -0,0 +1,9 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
bun install
|
||||
bun install --cwd ./integration/snippets
|
||||
bun install --cwd ./integration/scripts
|
||||
|
||||
make $BUN_TEST_NAME
|
||||
5
.docker/runner.sh
Normal file
5
.docker/runner.sh
Normal file
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
docker container run --security-opt seccomp=.docker/chrome.json --env GITHUB_WORKSPACE=$GITHUB_WORKSPACE --env BUN_TEST_NAME=$BUN_TEST_NAME --ulimit memlock=-1:-1 --init --rm bun-test:latest
|
||||
5
.docker/unit-tests.sh
Normal file
5
.docker/unit-tests.sh
Normal file
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
docker container run --security-opt seccomp=.docker/chrome.json --env GITHUB_WORKSPACE=$GITHUB_WORKSPACE --ulimit memlock=-1:-1 --init --rm bun-unit-tests:latest
|
||||
@@ -10,5 +10,8 @@ examples
|
||||
|
||||
**/.next
|
||||
.git
|
||||
src/javascript/jsc/WebKit/WebKitBuild
|
||||
**/CMakeCache.txt
|
||||
src/javascript/jsc/WebKit
|
||||
**/CMakeCache.txt
|
||||
packages/**/bun
|
||||
packages/**/bun-profile
|
||||
zig-cache
|
||||
|
||||
3
.gitattributes
vendored
Normal file
3
.gitattributes
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
.vscode/launch.json linguist-generated
|
||||
src/api/schema.d.ts linguist-generated
|
||||
src/api/schema.js linguist-generated
|
||||
6
.github/workflows/bun-framework-next.yml
vendored
6
.github/workflows/bun-framework-next.yml
vendored
@@ -1,8 +1,12 @@
|
||||
name: CI workflow for bun-framework-next
|
||||
name: bun-framework-next
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- packages/bun-framework-next/**/*
|
||||
branches: [main, bun-framework-next-actions]
|
||||
pull_request:
|
||||
paths:
|
||||
- packages/bun-framework-next/**/*
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
|
||||
167
.github/workflows/bun.yml
vendored
Normal file
167
.github/workflows/bun.yml
vendored
Normal file
@@ -0,0 +1,167 @@
|
||||
name: bun
|
||||
on:
|
||||
push:
|
||||
branches: [main, bun-actions]
|
||||
paths-ignore:
|
||||
- "examples/**"
|
||||
- "bench/**"
|
||||
- "README.*"
|
||||
- "LICENSE"
|
||||
- ".vscode"
|
||||
- ".devcontainer"
|
||||
pull_request:
|
||||
branches: [main]
|
||||
paths-ignore:
|
||||
- "examples/**"
|
||||
- "bench/**"
|
||||
- README.*
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
TEST_TAG: bun-test'
|
||||
|
||||
jobs:
|
||||
e2e:
|
||||
runs-on: self-hosted
|
||||
name: "Integration tests"
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Checkout submodules
|
||||
run: git -c submodule."src/javascript/jsc/WebKit".update=none submodule update --init --recursive --depth=1 --progress -j 8
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Login to Dockerhub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
- name: Pull Base Image
|
||||
run: bash .docker/pull.sh
|
||||
- name: Build tests
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
target: test_base
|
||||
tags: bun-test:latest
|
||||
load: true
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
builder: ${{ steps.buildx.outputs.name }}
|
||||
- name: Run test-with-hmr
|
||||
env:
|
||||
BUN_TEST_NAME: test-with-hmr
|
||||
GITHUB_WORKSPACE: $GITHUB_WORKSPACE
|
||||
RUNNER_TEMP: ${RUNNER_TEMP}
|
||||
run: bash .docker/runner.sh
|
||||
- name: Run test-no-hmr
|
||||
env:
|
||||
BUN_TEST_NAME: test-no-hmr
|
||||
GITHUB_WORKSPACE: $GITHUB_WORKSPACE
|
||||
RUNNER_TEMP: ${RUNNER_TEMP}
|
||||
run: bash .docker/runner.sh
|
||||
- name: Run test-bun-create-next
|
||||
env:
|
||||
RUNNER_TEMP: ${RUNNER_TEMP}
|
||||
BUN_TEST_NAME: test-create-next
|
||||
GITHUB_WORKSPACE: $GITHUB_WORKSPACE
|
||||
run: bash .docker/runner.sh
|
||||
- name: Run test-bun-create-react
|
||||
env:
|
||||
RUNNER_TEMP: ${RUNNER_TEMP}
|
||||
BUN_TEST_NAME: test-create-react
|
||||
GITHUB_WORKSPACE: $GITHUB_WORKSPACE
|
||||
run: bash .docker/runner.sh
|
||||
- name: Run test-bun-run
|
||||
env:
|
||||
RUNNER_TEMP: ${RUNNER_TEMP}
|
||||
BUN_TEST_NAME: test-bun-run
|
||||
GITHUB_WORKSPACE: $GITHUB_WORKSPACE
|
||||
run: bash .docker/runner.sh
|
||||
- name: Run test-bun-install
|
||||
env:
|
||||
RUNNER_TEMP: ${RUNNER_TEMP}
|
||||
BUN_TEST_NAME: test-bun-install
|
||||
GITHUB_WORKSPACE: $GITHUB_WORKSPACE
|
||||
run: bash .docker/runner.sh
|
||||
# This is commented out because zig test does not work on the CI
|
||||
# Which sucks
|
||||
# zig-unit-tests:
|
||||
# runs-on: self-hosted
|
||||
# name: "Unit tests (Zig)"
|
||||
# steps:
|
||||
# - name: Checkout
|
||||
# uses: actions/checkout@v2
|
||||
# - name: Checkout submodules
|
||||
# run: git -c submodule."src/javascript/jsc/WebKit".update=none submodule update --init --recursive --depth=1 --progress -j 8
|
||||
# - name: Set up Docker Buildx
|
||||
# uses: docker/setup-buildx-action@v1
|
||||
# - name: Login to Dockerhub
|
||||
# uses: docker/login-action@v1
|
||||
# with:
|
||||
# username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
# password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
# - name: Pull Base Image
|
||||
# run: bash .docker/pull.sh
|
||||
# - name: Build tests
|
||||
# uses: docker/build-push-action@v2
|
||||
# with:
|
||||
# context: .
|
||||
# target: build_unit
|
||||
# tags: bun-unit-tests:latest
|
||||
# load: true
|
||||
# cache-from: type=gha
|
||||
# cache-to: type=gha,mode=max
|
||||
# builder: ${{ steps.buildx.outputs.name }}
|
||||
# - name: Run tests
|
||||
# env:
|
||||
# GITHUB_WORKSPACE: $GITHUB_WORKSPACE
|
||||
# RUNNER_TEMP: ${RUNNER_TEMP}
|
||||
# run: bash .docker/unit-tests.sh
|
||||
release:
|
||||
runs-on: self-hosted
|
||||
needs: ["e2e"]
|
||||
if: github.ref == 'refs/heads/main'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Checkout submodules
|
||||
run: git -c submodule."src/javascript/jsc/WebKit".update=none submodule update --init --recursive --depth=1 --progress -j 8
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: jarredsumner
|
||||
password: ${{ secrets.DOCKERHUB_ALT }}
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
with:
|
||||
install: true
|
||||
- name: Pull Base Image
|
||||
run: bash .docker/pull.sh
|
||||
- name: Build release image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
target: release
|
||||
tags: |
|
||||
ghcr.io/jarred-sumner/bun:${{github.sha}}
|
||||
ghcr.io/jarred-sumner/bun:edge
|
||||
jarredsumner/bun:${{github.sha}}
|
||||
jarredsumner/bun:edge
|
||||
platforms: |
|
||||
linux/amd64
|
||||
labels: |
|
||||
org.opencontainers.image.title=bun
|
||||
org.opencontainers.image.description=bun is a fast bundler, transpiler, JavaScript Runtime environment and package manager for web software. The image is an Ubuntu 20.04 image with bun preinstalled into /opt/bun.
|
||||
org.opencontainers.image.vendor=bun
|
||||
org.opencontainers.image.source=https://github.com/Jarred-Sumner/bun
|
||||
org.opencontainers.image.url=https://bun.sh
|
||||
builder: ${{ steps.buildx.outputs.name }}
|
||||
push: true
|
||||
13
.gitignore
vendored
13
.gitignore
vendored
@@ -12,6 +12,7 @@ yarn.lock
|
||||
dist
|
||||
*.log
|
||||
*.out.js
|
||||
*.out.refresh.js
|
||||
/package-lock.json
|
||||
build
|
||||
*.wat
|
||||
@@ -77,4 +78,14 @@ misctools/fetch
|
||||
src/deps/libiconv
|
||||
src/deps/openssl
|
||||
src/tests.zig
|
||||
*.blob
|
||||
*.blob
|
||||
src/deps/s2n-tls
|
||||
.npm
|
||||
.npm.gz
|
||||
|
||||
bun-binary
|
||||
|
||||
src/deps/PLCrashReporter/
|
||||
|
||||
*.dSYM
|
||||
*.crash
|
||||
|
||||
12
.gitmodules
vendored
12
.gitmodules
vendored
@@ -11,7 +11,7 @@
|
||||
ignore = dirty
|
||||
[submodule "src/deps/mimalloc"]
|
||||
path = src/deps/mimalloc
|
||||
url = https://github.com/microsoft/mimalloc.git
|
||||
url = https://github.com/Jarred-Sumner/mimalloc.git
|
||||
ignore = dirty
|
||||
[submodule "src/deps/zlib"]
|
||||
path = src/deps/zlib
|
||||
@@ -21,7 +21,11 @@
|
||||
path = src/deps/libarchive
|
||||
url = https://github.com/libarchive/libarchive.git
|
||||
ignore = dirty
|
||||
[submodule "src/deps/s2n-tls"]
|
||||
path = src/deps/s2n-tls
|
||||
url = https://github.com/Jarred-Sumner/s2n-tls
|
||||
[submodule "src/deps/boringssl"]
|
||||
path = src/deps/boringssl
|
||||
url = https://github.com/google/boringssl.git
|
||||
ignore = dirty
|
||||
[submodule "src/deps/libbacktrace"]
|
||||
path = src/deps/libbacktrace
|
||||
url = https://github.com/ianlancetaylor/libbacktrace
|
||||
ignore = dirty
|
||||
|
||||
23
.scripts/write-versions.sh
Normal file
23
.scripts/write-versions.sh
Normal file
@@ -0,0 +1,23 @@
|
||||
#!/bin/bash
|
||||
set -euxo pipefail
|
||||
|
||||
WEBKIT_VERSION=$(git rev-parse HEAD:./src/javascript/jsc/WebKit)
|
||||
MIMALLOC_VERSION=$(git rev-parse HEAD:./src/deps/mimalloc)
|
||||
LIBARCHIVE_VERSION=$(git rev-parse HEAD:./src/deps/libarchive)
|
||||
PICOHTTPPARSER_VERSION=$(git rev-parse HEAD:./src/deps/picohttpparser)
|
||||
BORINGSSL_VERSION=$(git rev-parse HEAD:./src/deps/boringssl)
|
||||
ZLIB_VERSION=$(git rev-parse HEAD:./src/deps/zlib)
|
||||
|
||||
rm -rf src/generated_versions_list.zig
|
||||
echo "// AUTO-GENERATED FILE. Created via .scripts/write-versions.sh" >src/generated_versions_list.zig
|
||||
echo "" >>src/generated_versions_list.zig
|
||||
echo "pub const webkit = \"$WEBKIT_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const mimalloc = \"$MIMALLOC_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const libarchive = \"$LIBARCHIVE_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const picohttpparser = \"$PICOHTTPPARSER_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const boringssl = \"$BORINGSSL_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const zlib = \"$ZLIB_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const zig = @import(\"std\").fmt.comptimePrint(\"{}\", .{@import(\"builtin\").zig_version});" >>src/generated_versions_list.zig
|
||||
echo "" >>src/generated_versions_list.zig
|
||||
|
||||
zig fmt src/generated_versions_list.zig
|
||||
204
.vscode/launch.json
generated
vendored
204
.vscode/launch.json
generated
vendored
@@ -1,12 +1,84 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "HTTP bench",
|
||||
"program": "${workspaceFolder}/misctools/http_bench",
|
||||
"args": ["https://twitter.com", "--count=100"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "fetch debug",
|
||||
"program": "${workspaceFolder}/misctools/fetch",
|
||||
"args": ["https://api.github.com/repos/hanford/trends/tarball"],
|
||||
"args": ["https://example.com", "--verbose"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "fetch debug #2",
|
||||
"program": "${workspaceFolder}/misctools/fetch",
|
||||
"args": ["https://twitter.com", "--verbose"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "fetch debug #13w",
|
||||
"program": "${workspaceFolder}/misctools/fetch",
|
||||
"args": ["http://127.0.0.1:8080/next.json", "--quiet", "--verbose"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "fetch debug #12w",
|
||||
"program": "${workspaceFolder}/misctools/fetch",
|
||||
"args": ["https://registry.npmjs.org/next", "--quiet", "--verbose"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "hop",
|
||||
"program": "hop",
|
||||
"args": ["swc-linux-arm64-musl-12.0.3.tgz"],
|
||||
"cwd": "/Users/jarred/Downloads/hop-test",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "hop extract",
|
||||
"program": "hop",
|
||||
"args": ["swc-linux-arm64-musl-12.0.3.hop"],
|
||||
"cwd": "/Users/jarred/Downloads/hop-test",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "fetch debug #2",
|
||||
"program": "${workspaceFolder}/misctools/fetch",
|
||||
"args": ["https://registry.npmjs.org/react", "--verbose"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "fetch debug #3",
|
||||
"program": "${workspaceFolder}/misctools/fetch",
|
||||
"args": ["http://example.com/", "--verbose"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
@@ -15,8 +87,17 @@
|
||||
"request": "launch",
|
||||
"name": "bun create debug",
|
||||
"program": "bun-debug",
|
||||
"args": ["create", "hanford/trends", "foo"],
|
||||
"cwd": "/tmp/",
|
||||
"args": ["create", "next", "foo", "--open", "--force"],
|
||||
"cwd": "/tmp",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun dev debug",
|
||||
"program": "bun-debug",
|
||||
"args": ["dev"],
|
||||
"cwd": "/tmp/foo",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
@@ -24,8 +105,9 @@
|
||||
"request": "launch",
|
||||
"name": "bun run debug",
|
||||
"program": "bun-debug",
|
||||
"args": ["paoskdpoasdk"],
|
||||
"cwd": "/tmp/",
|
||||
"args": ["run", "/tmp/bar.js"],
|
||||
// "args": ["--version"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
@@ -185,17 +267,49 @@
|
||||
"request": "launch",
|
||||
"name": "Dazzle serve",
|
||||
"program": "bun-debug",
|
||||
"args": ["--origin=http://localhost:5000", "--disable-hmr"],
|
||||
"args": [
|
||||
"--origin=http://localhost:5001",
|
||||
"--disable-bun.js",
|
||||
"--disable-hmr"
|
||||
],
|
||||
"cwd": "/Users/jarred/Build/lattice/apps/dazzle",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Bun",
|
||||
"name": "Dazzle bun",
|
||||
"program": "bun-debug",
|
||||
"args": ["bun", "--use=next"],
|
||||
"cwd": "/Users/jarred/Build/lattice/apps/dazzle",
|
||||
"console": "internalConsole",
|
||||
"env": { "GOMAXPROCS": "1" }
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun run",
|
||||
"program": "bun-debug",
|
||||
"args": ["cat.js", "./node_modules/@babel/standalone/babel.js"],
|
||||
"cwd": "/Users/jarred/Build/foobar",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun run callback bench",
|
||||
"program": "bun-debug",
|
||||
"args": ["/Users/jarred/Code/bun/bench/snippets/callbacks-overhead.mjs"],
|
||||
"cwd": "/Users/jarred/Build/foobar",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun test",
|
||||
"program": "bun-debug",
|
||||
"args": ["wiptest", "import-meta"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
@@ -281,7 +395,7 @@
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Context Bun Bug",
|
||||
"name": "Context bun Bug",
|
||||
"program": "bun-debug",
|
||||
"args": ["bun", "./code.js"],
|
||||
"cwd": "/Users/jarred/Build/context/www",
|
||||
@@ -290,7 +404,7 @@
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Context Bun",
|
||||
"name": "Context bun",
|
||||
"program": "bun-debug",
|
||||
"args": ["bun", "--use=next"],
|
||||
"cwd": "/Users/jarred/Build/context/www",
|
||||
@@ -299,7 +413,7 @@
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Bun-hello",
|
||||
"name": "bun-hello",
|
||||
"program": "bun-debug",
|
||||
"args": [],
|
||||
"cwd": "${workspaceFolder}/packages/bun-hello",
|
||||
@@ -314,6 +428,58 @@
|
||||
"cwd": "${workspaceFolder}/integration/snippets",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Install",
|
||||
"program": "bun-debug",
|
||||
"args": ["install", "--backend=clonefile", "--force"],
|
||||
"cwd": "/Users/jarred/Build/octokit-test",
|
||||
"env": {},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Install #2",
|
||||
"program": "bun-debug",
|
||||
"args": ["add", "typescript"],
|
||||
"cwd": "/tmp/wow-such-npm",
|
||||
"env": {},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Add",
|
||||
"program": "bun-debug",
|
||||
"args": ["add", "react"],
|
||||
"cwd": "/tmp/wow-such-npm",
|
||||
"env": {},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Remove",
|
||||
"program": "bun-debug",
|
||||
"args": ["remove", "foo"],
|
||||
"cwd": "/Users/jarred/Build/athena.yarn",
|
||||
"env": {},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Print Lockfile",
|
||||
"program": "bun-debug",
|
||||
"args": ["./bun.lockb"],
|
||||
"cwd": "/tmp/wow-such-npm",
|
||||
"env": {
|
||||
"BUN_CONFIG_SKIP_SAVE_LOCKFILE": "1"
|
||||
},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
@@ -332,15 +498,6 @@
|
||||
"cwd": "${workspaceFolder}/src/test/fixtures",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
"request": "launch",
|
||||
"name": "Fixtures Rel",
|
||||
"program": "${workspaceFolder}/build/macos-x86_64/bun",
|
||||
"args": ["dev"],
|
||||
"cwd": "${workspaceFolder}/src/test/fixtures",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
|
||||
@@ -435,15 +592,6 @@
|
||||
"cwd": "${workspaceFolder}/src/test/fixtures",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"name": "esbuild",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "debug",
|
||||
"program": "/Users/jarred/Code/esbuild/cmd/esbuild",
|
||||
"cwd": "/Users/jarred/Code/bun/src/test/fixtures",
|
||||
"args": ["--bundle", "--outfile=out.esbuild.js", "await.ts"]
|
||||
},
|
||||
|
||||
// {
|
||||
// "type": "lldb",
|
||||
|
||||
12
.vscode/settings.json
vendored
12
.vscode/settings.json
vendored
@@ -8,11 +8,18 @@
|
||||
"search.useIgnoreFiles": true,
|
||||
"zig.buildOnSave": false,
|
||||
"[zig]": {
|
||||
"editor.defaultFormatter": "tiehuis.zig"
|
||||
"editor.defaultFormatter": "AugusteRame.zls-vscode"
|
||||
},
|
||||
"[ts]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[tsx]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"lldb.verboseLogging": true,
|
||||
"zig.beforeDebugCmd": "make build-unit ${file} ${filter} ${bin}",
|
||||
"zig.testCmd": "make test ${file} ${filter} ${bin}",
|
||||
|
||||
"lldb.verboseLogging": false,
|
||||
"files.exclude": {
|
||||
"**/.git": true,
|
||||
"**/.svn": true,
|
||||
@@ -31,6 +38,7 @@
|
||||
"src/deps/libarchive": true,
|
||||
"src/deps/mimalloc": true,
|
||||
"src/deps/s2n-tls": true,
|
||||
"src/deps/boringssl": true,
|
||||
"src/deps/openssl": true,
|
||||
"src/deps/zlib": true,
|
||||
"integration/snippets/package-json-exports/_node_modules_copy": true
|
||||
|
||||
362
Dockerfile
Normal file
362
Dockerfile
Normal file
@@ -0,0 +1,362 @@
|
||||
FROM bunbunbunbun/bun-base:latest as mimalloc
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/mimalloc ${BUN_DIR}/src/deps/mimalloc
|
||||
|
||||
RUN cd ${BUN_DIR} && \
|
||||
make mimalloc && rm -rf src/deps/mimalloc Makefile
|
||||
|
||||
FROM bunbunbunbun/bun-base:latest as zlib
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/zlib ${BUN_DIR}/src/deps/zlib
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN cd $BUN_DIR && \
|
||||
make zlib && rm -rf src/deps/zlib Makefile
|
||||
|
||||
FROM bunbunbunbun/bun-base:latest as libarchive
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/libarchive ${BUN_DIR}/src/deps/libarchive
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN cd $BUN_DIR && \
|
||||
make libarchive && rm -rf src/deps/libarchive Makefile
|
||||
|
||||
FROM bunbunbunbun/bun-base:latest as libbacktrace
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/libbacktrace ${BUN_DIR}/src/deps/libbacktrace
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN cd $BUN_DIR && \
|
||||
make libbacktrace && rm -rf src/deps/libbacktrace Makefile
|
||||
|
||||
FROM bunbunbunbun/bun-base:latest as boringssl
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/boringssl ${BUN_DIR}/src/deps/boringssl
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN cd $BUN_DIR && \
|
||||
make boringssl && rm -rf src/deps/boringssl Makefile
|
||||
|
||||
FROM bunbunbunbun/bun-base:latest as picohttp
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/picohttpparser ${BUN_DIR}/src/deps/picohttpparser
|
||||
COPY src/deps/*.c ${BUN_DIR}/src/deps
|
||||
COPY src/deps/*.h ${BUN_DIR}/src/deps
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN cd $BUN_DIR && \
|
||||
make picohttp
|
||||
|
||||
FROM bunbunbunbun/bun-base-with-zig-and-webkit:latest as identifier_cache
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/js_lexer/identifier_data.zig ${BUN_DIR}/src/js_lexer/identifier_data.zig
|
||||
COPY src/js_lexer/identifier_cache.zig ${BUN_DIR}/src/js_lexer/identifier_cache.zig
|
||||
|
||||
RUN cd $BUN_DIR && \
|
||||
make identifier-cache && rm -rf zig-cache Makefile
|
||||
|
||||
FROM bunbunbunbun/bun-base-with-zig-and-webkit:latest as node_fallbacks
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/node-fallbacks ${BUN_DIR}/src/node-fallbacks
|
||||
RUN cd $BUN_DIR && \
|
||||
make node-fallbacks && rm -rf src/node-fallbacks/node_modules Makefile
|
||||
|
||||
FROM bunbunbunbun/bun-base-with-zig-and-webkit:latest as prepare_release
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
COPY ./src ${BUN_DIR}/src
|
||||
COPY ./build.zig ${BUN_DIR}/build.zig
|
||||
COPY ./completions ${BUN_DIR}/completions
|
||||
COPY ./packages ${BUN_DIR}/packages
|
||||
COPY ./build-id ${BUN_DIR}/build-id
|
||||
COPY ./package.json ${BUN_DIR}/package.json
|
||||
COPY ./misctools ${BUN_DIR}/misctools
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
|
||||
COPY --from=mimalloc ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=libarchive ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=picohttp ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=boringssl ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=libbacktrace ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=zlib ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=identifier_cache ${BUN_DIR}/src/js_lexer/*.blob ${BUN_DIR}/src/js_lexer
|
||||
COPY --from=node_fallbacks ${BUN_DIR}/src/node-fallbacks/out ${BUN_DIR}/src/node-fallbacks/out
|
||||
|
||||
WORKDIR ${BUN_DIR}
|
||||
|
||||
|
||||
FROM prepare_release as build_release
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN cd $BUN_DIR && rm -rf $HOME/.cache zig-cache && make \
|
||||
jsc-bindings-headers \
|
||||
api \
|
||||
analytics \
|
||||
bun_error \
|
||||
fallback_decoder && rm -rf $HOME/.cache zig-cache && \
|
||||
mkdir -p $BUN_RELEASE_DIR && \
|
||||
make release copy-to-bun-release-dir && \
|
||||
rm -rf $HOME/.cache zig-cache misctools package.json build-id completions build.zig $(BUN_DIR)/packages
|
||||
|
||||
FROM prepare_release as build_unit
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
ENV PATH "$ZIG_PATH:$PATH"
|
||||
|
||||
CMD make jsc-bindings-headers \
|
||||
api \
|
||||
analytics \
|
||||
bun_error \
|
||||
fallback_decoder \
|
||||
jsc-bindings-mac && \
|
||||
make \
|
||||
run-all-unit-tests
|
||||
|
||||
FROM bunbunbunbun/bun-base-with-zig-and-webkit:latest as bun.devcontainer
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
ENV WEBKIT_OUT_DIR ${WEBKIT_DIR}
|
||||
ENV PATH "$ZIG_PATH:$PATH"
|
||||
ENV JSC_BASE_DIR $WEBKIT_OUT_DIR
|
||||
ENV LIB_ICU_PATH ${GITHUB_WORKSPACE}/icu/source/lib
|
||||
ENV BUN_RELEASE_DIR ${BUN_RELEASE_DIR}
|
||||
ENV PATH "${GITHUB_WORKSPACE}/packages/bun-linux-x64:${GITHUB_WORKSPACE}/packages/bun-linux-aarch64:${GITHUB_WORKSPACE}/packages/debug-bun-linux-x64:${GITHUB_WORKSPACE}/packages/debug-bun-linux-aarch64:$PATH"
|
||||
ENV PATH "/home/ubuntu/zls/zig-out/bin:$PATH"
|
||||
|
||||
ENV BUN_INSTALL /home/ubuntu/.bun
|
||||
ENV XDG_CONFIG_HOME /home/ubuntu/.config
|
||||
|
||||
RUN apt-get -y update && update-alternatives --install /usr/bin/lldb lldb /usr/bin/lldb-13 90
|
||||
|
||||
COPY .devcontainer/workspace.code-workspace $GITHUB_WORKSPACE/workspace.code-workspace
|
||||
COPY .devcontainer/zls.json $GITHUB_WORKSPACE/workspace.code-workspace
|
||||
COPY .devcontainer/limits.conf /etc/security/limits.conf
|
||||
COPY ".devcontainer/scripts/" /scripts/
|
||||
COPY ".devcontainer/scripts/getting-started.sh" $GITHUB_WORKSPACE/getting-started.sh
|
||||
RUN mkdir -p /home/ubuntu/.bun /home/ubuntu/.config $GITHUB_WORKSPACE/bun && \
|
||||
bash /scripts/common-debian.sh && \
|
||||
bash /scripts/github.sh && \
|
||||
bash /scripts/nice.sh && \
|
||||
bash /scripts/zig-env.sh
|
||||
COPY .devcontainer/zls.json /home/ubuntu/.config/zls.json
|
||||
|
||||
FROM ubuntu:20.04 as release_with_debug_info
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
COPY .devcontainer/limits.conf /etc/security/limits.conf
|
||||
|
||||
ENV BUN_INSTALL /opt/bun
|
||||
ENV PATH "/opt/bun/bin:$PATH"
|
||||
ARG BUILDARCH=amd64
|
||||
LABEL org.opencontainers.image.title="bun ${BUILDARCH} (glibc)"
|
||||
LABEL org.opencontainers.image.source=https://github.com/jarred-sumner/bun
|
||||
COPY --from=build_release ${BUN_RELEASE_DIR}/bun /opt/bun/bin/bun
|
||||
COPY --from=build_release ${BUN_RELEASE_DIR}/bun-profile /opt/bun/bin/bun-profile
|
||||
|
||||
WORKDIR /opt/bun
|
||||
|
||||
ENTRYPOINT [ "/opt/bun/bin/bun" ]
|
||||
|
||||
FROM ubuntu:20.04 as release
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
COPY .devcontainer/limits.conf /etc/security/limits.conf
|
||||
|
||||
ENV BUN_INSTALL /opt/bun
|
||||
ENV PATH "/opt/bun/bin:$PATH"
|
||||
ARG BUILDARCH=amd64
|
||||
LABEL org.opencontainers.image.title="bun ${BUILDARCH} (glibc)"
|
||||
LABEL org.opencontainers.image.source=https://github.com/jarred-sumner/bun
|
||||
COPY --from=build_release ${BUN_RELEASE_DIR}/bun /opt/bun/bin/bun
|
||||
WORKDIR /opt/bun
|
||||
|
||||
ENTRYPOINT [ "/opt/bun/bin/bun" ]
|
||||
|
||||
|
||||
FROM bunbunbunbun/bun-test-base as test_base
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
ARG BUILDARCH=amd64
|
||||
RUN groupadd -r chromium && useradd -d ${BUN_DIR} -M -r -g chromium -G audio,video chromium \
|
||||
&& mkdir -p /home/chromium/Downloads && chown -R chromium:chromium /home/chromium
|
||||
|
||||
USER chromium
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
ENV NPM_CLIENT bun
|
||||
ENV PATH "${BUN_DIR}/packages/bun-linux-x64:${BUN_DIR}/packages/bun-linux-aarch64:$PATH"
|
||||
ENV CI 1
|
||||
ENV BROWSER_EXECUTABLE /usr/bin/chromium
|
||||
|
||||
COPY ./integration ${BUN_DIR}/integration
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY package.json ${BUN_DIR}/package.json
|
||||
COPY .docker/run-test.sh ${BUN_DIR}/run-test.sh
|
||||
COPY ./bun.lockb ${BUN_DIR}/bun.lockb
|
||||
|
||||
# # We don't want to worry about architecture differences in this image
|
||||
COPY --from=release /opt/bun/bin/bun ${BUN_DIR}/packages/bun-linux-aarch64/bun
|
||||
COPY --from=release /opt/bun/bin/bun ${BUN_DIR}/packages/bun-linux-x64/bun
|
||||
|
||||
USER root
|
||||
RUN chgrp -R chromium ${BUN_DIR} && chmod g+rwx ${BUN_DIR} && chown -R chromium:chromium ${BUN_DIR}
|
||||
USER chromium
|
||||
|
||||
CMD [ "bash", "run-test.sh" ]
|
||||
|
||||
FROM release
|
||||
@@ -1,52 +0,0 @@
|
||||
FROM ubuntu:latest
|
||||
|
||||
RUN apt-get update && apt-get install --no-install-recommends -y wget gnupg2 curl lsb-release wget software-properties-common
|
||||
RUN curl -s https://deb.nodesource.com/gpgkey/nodesource.gpg.key | apt-key add -
|
||||
|
||||
RUN wget https://apt.llvm.org/llvm.sh --no-check-certificate
|
||||
RUN chmod +x llvm.sh
|
||||
RUN ./llvm.sh 12
|
||||
|
||||
RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||
ca-certificates \
|
||||
curl \
|
||||
gnupg2 \
|
||||
software-properties-common \
|
||||
cmake \
|
||||
build-essential \
|
||||
git \
|
||||
libssl-dev \
|
||||
ruby \
|
||||
liblld-12-dev \
|
||||
libclang-12-dev \
|
||||
nodejs \
|
||||
gcc \
|
||||
g++ \
|
||||
npm \
|
||||
clang-12 \
|
||||
clang-format-12 \
|
||||
libc++-12-dev \
|
||||
libc++abi-12-dev \
|
||||
lld-12 \
|
||||
libicu-dev
|
||||
|
||||
RUN update-alternatives --install /usr/bin/ld ld /usr/bin/lld-12 90 && \
|
||||
update-alternatives --install /usr/bin/cc cc /usr/bin/clang-12 90 && \
|
||||
update-alternatives --install /usr/bin/cpp cpp /usr/bin/clang++-12 90 && \
|
||||
update-alternatives --install /usr/bin/c++ c++ /usr/bin/clang++-12 90
|
||||
|
||||
|
||||
ENV CC=clang-12
|
||||
ENV CXX=clang++-12
|
||||
|
||||
# Compile zig
|
||||
RUN mkdir -p /home/ubuntu/zig; cd /home/ubuntu; git clone https://github.com/jarred-sumner/zig.git; cd /home/ubuntu/zig && git checkout jarred/zig-sloppy-with-small-structs && cmake . -DCMAKE_BUILD_TYPE=Release && make -j$(nproc)
|
||||
|
||||
ENV PATH="/home/ubuntu/zig:$PATH"
|
||||
|
||||
RUN npm install -g esbuild
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
145
Dockerfile.base
Normal file
145
Dockerfile.base
Normal file
@@ -0,0 +1,145 @@
|
||||
FROM ubuntu:20.04 as bun-base-with-args
|
||||
|
||||
FROM bun-base-with-args as bun-base
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
WORKDIR ${GITHUB_WORKSPACE}
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install --no-install-recommends -y wget gnupg2 curl lsb-release wget software-properties-common && \
|
||||
add-apt-repository ppa:longsleep/golang-backports && \
|
||||
wget https://apt.llvm.org/llvm.sh --no-check-certificate && \
|
||||
chmod +x llvm.sh && \
|
||||
./llvm.sh 13 && \
|
||||
apt-get update && \
|
||||
apt-get install --no-install-recommends -y \
|
||||
ca-certificates \
|
||||
curl \
|
||||
gnupg2 \
|
||||
software-properties-common \
|
||||
cmake \
|
||||
build-essential \
|
||||
git \
|
||||
libssl-dev \
|
||||
ruby \
|
||||
liblld-13-dev \
|
||||
libclang-13-dev \
|
||||
nodejs \
|
||||
gcc \
|
||||
g++ \
|
||||
npm \
|
||||
clang-13 \
|
||||
clang-format-13 \
|
||||
libc++-13-dev \
|
||||
libc++abi-13-dev \
|
||||
lld-13 \
|
||||
libicu-dev \
|
||||
wget \
|
||||
unzip \
|
||||
tar \
|
||||
golang-go ninja-build pkg-config automake autoconf libtool curl && \
|
||||
update-alternatives --install /usr/bin/cc cc /usr/bin/clang-13 90 && \
|
||||
update-alternatives --install /usr/bin/cpp cpp /usr/bin/clang++-13 90 && \
|
||||
update-alternatives --install /usr/bin/c++ c++ /usr/bin/clang++-13 90 && \
|
||||
npm install -g esbuild
|
||||
|
||||
ENV CC=clang-13
|
||||
ENV CXX=clang++-13
|
||||
|
||||
|
||||
ARG BUILDARCH=amd64
|
||||
|
||||
|
||||
WORKDIR $GITHUB_WORKSPACE
|
||||
|
||||
ENV WEBKIT_OUT_DIR ${WEBKIT_DIR}
|
||||
|
||||
ENV JSC_BASE_DIR $WEBKIT_OUT_DIR
|
||||
ENV LIB_ICU_PATH ${GITHUB_WORKSPACE}/icu/source/lib
|
||||
ENV BUN_RELEASE_DIR ${BUN_RELEASE_DIR}
|
||||
ENV BUN_DEPS_OUT_DIR ${BUN_DEPS_OUT_DIR}
|
||||
|
||||
RUN cd / && mkdir -p $BUN_RELEASE_DIR $BUN_DEPS_OUT_DIR ${BUN_DIR} ${BUN_DEPS_OUT_DIR}
|
||||
|
||||
LABEL org.opencontainers.image.title="bun base image ${BUILDARCH} (glibc)"
|
||||
LABEL org.opencontainers.image.source=https://github.com/jarred-sumner/bun
|
||||
|
||||
|
||||
FROM bun-base as bun-base-with-zig-and-webkit
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
ARG BUILDARCH=amd64
|
||||
|
||||
WORKDIR $GITHUB_WORKSPACE
|
||||
|
||||
RUN cd $GITHUB_WORKSPACE && \
|
||||
curl -o zig-linux-$BUILDARCH.zip -L https://github.com/Jarred-Sumner/zig/releases/download/jan17/zig-linux-$BUILDARCH.zip && \
|
||||
unzip -q zig-linux-$BUILDARCH.zip && \
|
||||
rm zig-linux-$BUILDARCH.zip;
|
||||
|
||||
RUN cd $GITHUB_WORKSPACE && \
|
||||
curl -o bun-webkit-linux-$BUILDARCH.tar.gz -L https://github.com/Jarred-Sumner/WebKit/releases/download/Bun-v0-llvm13/bun-webkit-linux-$BUILDARCH.tar.gz && \
|
||||
tar -xzf bun-webkit-linux-$BUILDARCH.tar.gz && \
|
||||
rm bun-webkit-linux-$BUILDARCH.tar.gz && \
|
||||
cat $WEBKIT_OUT_DIR/include/cmakeconfig.h > /dev/null
|
||||
|
||||
RUN cd $GITHUB_WORKSPACE && \
|
||||
curl -o icu4c-66_1-src.tgz -L https://github.com/unicode-org/icu/releases/download/release-66-1/icu4c-66_1-src.tgz && \
|
||||
tar -xzf icu4c-66_1-src.tgz && \
|
||||
rm icu4c-66_1-src.tgz && \
|
||||
cd icu/source && \
|
||||
./configure --enable-static --disable-shared && \
|
||||
make -j$(nproc)
|
||||
|
||||
ENV ZIG "${ZIG_PATH}/zig"
|
||||
|
||||
LABEL org.opencontainers.image.title="bun base image with zig & webkit ${BUILDARCH} (glibc)"
|
||||
LABEL org.opencontainers.image.source=https://github.com/jarred-sumner/bun
|
||||
|
||||
|
||||
FROM debian:bullseye-slim as bun-test-base
|
||||
# Original creator:
|
||||
# LABEL maintainer "Jessie Frazelle <jess@linux.com>"
|
||||
|
||||
# Install Chromium
|
||||
# Yes, including the Google API Keys sucks but even debian does the same: https://packages.debian.org/stretch/amd64/chromium/filelist
|
||||
RUN apt-get update && apt-get install -y \
|
||||
chromium \
|
||||
chromium-l10n \
|
||||
fonts-liberation \
|
||||
fonts-roboto \
|
||||
hicolor-icon-theme \
|
||||
libcanberra-gtk-module \
|
||||
libexif-dev \
|
||||
libgl1-mesa-dri \
|
||||
libgl1-mesa-glx \
|
||||
libpangox-1.0-0 \
|
||||
libv4l-0 \
|
||||
fonts-symbola \
|
||||
bash \
|
||||
make \
|
||||
psmisc \
|
||||
curl \
|
||||
--no-install-recommends \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& mkdir -p /etc/chromium.d/ \
|
||||
&& /bin/echo -e 'export GOOGLE_API_KEY="AIzaSyCkfPOPZXDKNn8hhgu3JrA62wIgC93d44k"\nexport GOOGLE_DEFAULT_CLIENT_ID="811574891467.apps.googleusercontent.com"\nexport GOOGLE_DEFAULT_CLIENT_SECRET="kdloedMFGdGla2P1zacGjAQh"' > /etc/chromium.d/googleapikeys && \
|
||||
curl -L https://deb.nodesource.com/setup_16.x | bash - && \
|
||||
apt-get update && \
|
||||
apt-get install -y nodejs npm
|
||||
|
||||
139
Dockerfile.musl
Normal file
139
Dockerfile.musl
Normal file
@@ -0,0 +1,139 @@
|
||||
|
||||
# This doesn't work
|
||||
# Specifically: there are a number of crashes and segfaults when using musl
|
||||
# The cause is likely related to differences in pthreads implementations
|
||||
# It is not just the stack size thing. It's something more complicated and importantly
|
||||
# There was no meaningful file size difference between musl and glibc
|
||||
|
||||
|
||||
# ARG BUILDARCH=aarch64
|
||||
# ARG zig_base_image=ghcr.io/jarred-sumner/zig-linux-musl-${BUILDARCH}
|
||||
# ARG webkit_base_image=ghcr.io/jarred-sumner/bun-webkit-musl-${BUILDARCH}
|
||||
# FROM ${zig_base_image}:latest AS zig
|
||||
# FROM ${webkit_base_image}:latest AS webkit
|
||||
|
||||
# FROM zig as bun_base
|
||||
|
||||
# COPY --from=webkit /webkit /webkit
|
||||
|
||||
# ENV PATH "/zig/bin:$PATH"
|
||||
# ENV JSC_BASE_DIR=/webkit
|
||||
# ENV LIB_ICU_PATH=/webkit/lib
|
||||
# ENV BUN_DEPS_OUT_DIR /bun-deps
|
||||
# ENV STATIC_MUSL_FLAG=-static
|
||||
# ENV MIMALLOC_OVERRIDE_FLAG="-DMI_OVERRIDE=OFF"
|
||||
|
||||
# RUN apk add --no-cache nodejs npm go libtool autoconf pkgconfig automake ninja
|
||||
# RUN mkdir -p $BUN_DEPS_OUT_DIR;
|
||||
|
||||
# WORKDIR /bun
|
||||
# COPY Makefile /bun/Makefile
|
||||
|
||||
# FROM bun_base as mimalloc
|
||||
|
||||
# COPY src/deps/mimalloc /bun/src/deps/mimalloc
|
||||
|
||||
# RUN make mimalloc;
|
||||
|
||||
# FROM bun_base as zlib
|
||||
|
||||
# COPY src/deps/zlib /bun/src/deps/zlib
|
||||
|
||||
# RUN make zlib;
|
||||
|
||||
# FROM bun_base as libarchive
|
||||
|
||||
# COPY src/deps/libarchive /bun/src/deps/libarchive
|
||||
|
||||
# RUN make libarchive;
|
||||
|
||||
# FROM bun_base as boringssl
|
||||
|
||||
# COPY src/deps/boringssl /bun/src/deps/boringssl
|
||||
|
||||
# RUN make boringssl;
|
||||
|
||||
# FROM bun_base as picohttp
|
||||
|
||||
# COPY src/deps/picohttpparser /bun/src/deps/picohttpparser
|
||||
# COPY src/deps/*.c /bun/src/deps
|
||||
# COPY src/deps/*.h /bun/src/deps
|
||||
|
||||
# RUN make picohttp
|
||||
|
||||
# FROM bun_base as identifier_cache
|
||||
|
||||
# COPY src/js_lexer/identifier_data.zig /bun/src/js_lexer/identifier_data.zig
|
||||
# COPY src/js_lexer/identifier_cache.zig /bun/src/js_lexer/identifier_cache.zig
|
||||
|
||||
# RUN make identifier-cache
|
||||
|
||||
# FROM bun_base as node_fallbacks
|
||||
|
||||
# COPY src/node-fallbacks /bun/src/node-fallbacks
|
||||
# RUN make node-fallbacks
|
||||
|
||||
# FROM bun_base as prebuild
|
||||
|
||||
# WORKDIR /bun
|
||||
|
||||
# COPY ./src /bun/src
|
||||
# COPY ./build.zig /bun/build.zig
|
||||
# COPY ./completions /bun/completions
|
||||
# COPY ./packages /bun/packages
|
||||
# COPY ./build-id /bun/build-id
|
||||
# COPY ./package.json /bun/package.json
|
||||
# COPY ./misctools /bun/misctools
|
||||
|
||||
# COPY --from=mimalloc /bun-deps/*.o /bun-deps
|
||||
# COPY --from=libarchive /bun-deps/*.a /bun-deps
|
||||
# COPY --from=picohttp /bun-deps/*.o /bun-deps
|
||||
# COPY --from=boringssl /bun-deps/*.a /bun-deps
|
||||
# COPY --from=zlib /bun-deps/*.a /bun-deps
|
||||
# COPY --from=node_fallbacks /bun/src/node-fallbacks /bun/src/node-fallbacks
|
||||
# COPY --from=identifier_cache /bun/src/js_lexer/*.blob /bun/src/js_lexer/
|
||||
|
||||
# ENV ICU_FLAGS="-I/webkit/include/wtf $ICU_FLAGS"
|
||||
|
||||
# RUN apk add --no-cache chromium && npm install -g esbuild && make \
|
||||
# jsc-bindings-headers \
|
||||
# api \
|
||||
# analytics \
|
||||
# bun_error \
|
||||
# fallback_decoder
|
||||
|
||||
|
||||
|
||||
# FROM prebuild as release
|
||||
|
||||
# ENV BUN_RELEASE_DIR /opt/bun
|
||||
|
||||
# ENV LIB_ICU_PATH /usr/lib
|
||||
|
||||
# RUN apk add icu-static icu-dev && mkdir -p $BUN_RELEASE_DIR; make release \
|
||||
# copy-to-bun-release-dir
|
||||
|
||||
# FROM alpine:3.15 as bun
|
||||
|
||||
# COPY --from=release /opt/bun/bun /opt/bun/bin/bun
|
||||
# ENV BUN_INSTALL /opt/bun
|
||||
# ENV PATH /opt/bun/bin:$PATH
|
||||
|
||||
# LABEL org.opencontainers.image.title="bun - Linux ${BUILDARCH} (musl)"
|
||||
# LABEL org.opencontainers.image.source=https://github.com/jarred-sumner/bun
|
||||
|
||||
# FROM release as test
|
||||
|
||||
# ENV PATH /opt/bun/bin:$PATH
|
||||
# ENV PATH /bun/packages/bun-linux-aarch64:/bun/packages/bun-linux-x64:$PATH
|
||||
# ENV BUN_INSTALL /opt/bun
|
||||
|
||||
# WORKDIR /bun
|
||||
|
||||
# COPY ./integration /bun/integration
|
||||
# COPY ./integration/snippets/package-json-exports/_node_modules_copy /bun/integration/snippets/package-json-exports/_node_modules_copy
|
||||
# CMD [ "bash", "-c", "npm install && cd /bun/integration/snippets && npm install && cd /bun && make copy-test-node-modules test-all"]
|
||||
|
||||
|
||||
# # FROM bun
|
||||
|
||||
@@ -4,10 +4,10 @@ This benchmarks bundler performance for CSS hot reloading.
|
||||
|
||||
## Results
|
||||
|
||||
Bun is 14x faster than Next.js at hot reloading CSS.
|
||||
bun is 14x faster than Next.js at hot reloading CSS.
|
||||
|
||||
```
|
||||
Bun v0.0.34
|
||||
bun v0.0.34
|
||||
Saving every 16ms
|
||||
|
||||
Frame time:
|
||||
@@ -40,7 +40,7 @@ The intent is to be as accurate as possible. Measuring times reported client-sid
|
||||
|
||||
It works like this:
|
||||
|
||||
1. `browser.js` loads either Bun or Next.js and a Chromium instance opened to the correct webpage
|
||||
1. `browser.js` loads either bun or Next.js and a Chromium instance opened to the correct webpage
|
||||
2. `color-looper.zig` updates [`./src/colors.css`](./src/colors.css) in a loop up to `1024` times (1024 is arbitrary), sleeping every `16`ms or `32`ms (a CLI arg you can pass it). The `var(--timestamp)` CSS variable contains the UTC timestamp with precision of milliseconds and one extra decimal point
|
||||
3. `color-looper.zig` automatically records the screen via `screencapture` (builtin on macOS) and saves it, along with a `BigUint64Array` containing all the expected timestamps. When it's done, it writes to a designated file on disk which `browser.js` picks up as the signal to close the browser.
|
||||
4. `ffmpeg` converts each frame into a black and white `.tif` file, which `tesseract` then OCRs
|
||||
|
||||
78
bench/snippets/callbacks-overhead.mjs
Normal file
78
bench/snippets/callbacks-overhead.mjs
Normal file
@@ -0,0 +1,78 @@
|
||||
const iterations = 1_000;
|
||||
|
||||
export var report = {
|
||||
async: 0,
|
||||
callback: 0,
|
||||
sync: 0,
|
||||
then: 0,
|
||||
};
|
||||
|
||||
const tests = {
|
||||
callback(n, cb) {
|
||||
if (n === iterations) return cb();
|
||||
tests.callback(1 + n, () => cb());
|
||||
},
|
||||
|
||||
sync(n) {
|
||||
if (n === iterations) return;
|
||||
|
||||
tests.sync(1 + n);
|
||||
},
|
||||
|
||||
async async(n) {
|
||||
if (n === iterations) return;
|
||||
|
||||
await tests.async(1 + n);
|
||||
},
|
||||
|
||||
then(n) {
|
||||
if (n === iterations) return;
|
||||
return Promise.resolve(1 + n).then(tests.then);
|
||||
},
|
||||
};
|
||||
|
||||
async function test(log) {
|
||||
{
|
||||
const a = performance.now();
|
||||
await tests.async(0);
|
||||
if (log)
|
||||
console.log(
|
||||
`async/await: ${(report.async = (performance.now() - a).toFixed(4))}ms`
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
const a = performance.now();
|
||||
tests.callback(0, function () {
|
||||
if (log)
|
||||
console.log(
|
||||
`callback: ${(report.callback = (performance.now() - a).toFixed(
|
||||
4
|
||||
))}ms`
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
const a = performance.now();
|
||||
await tests.then(0);
|
||||
if (log)
|
||||
console.log(
|
||||
`then: ${(report.then = (performance.now() - a).toFixed(4))}ms`
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
const a = performance.now();
|
||||
tests.sync(0);
|
||||
if (log)
|
||||
console.log(
|
||||
`sync: ${(report.sync = (performance.now() - a).toFixed(4))}ms`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let warmup = 10;
|
||||
while (warmup--) await test();
|
||||
|
||||
await test(true);
|
||||
5
bench/snippets/cat.mjs
Normal file
5
bench/snippets/cat.mjs
Normal file
@@ -0,0 +1,5 @@
|
||||
import { readFileSync } from "node:fs";
|
||||
const count = parseInt(process.env.ITERATIONS || "1", 10) || 1;
|
||||
const arg = process.argv.slice(1);
|
||||
for (let i = 0; i < count; i++)
|
||||
console.log(arg.map((file) => readFileSync(file, "utf8")).join(""));
|
||||
3
bench/snippets/copyfile.mjs
Normal file
3
bench/snippets/copyfile.mjs
Normal file
@@ -0,0 +1,3 @@
|
||||
import { copyFileSync } from "node:fs";
|
||||
const arg = process.argv.slice(2);
|
||||
copyFileSync(arg[0], arg[1]);
|
||||
6
bench/snippets/exists.js
Normal file
6
bench/snippets/exists.js
Normal file
@@ -0,0 +1,6 @@
|
||||
const { existsSync } = require("fs");
|
||||
const cwd = process.cwd();
|
||||
|
||||
const count = parseInt(process.env.ITERATIONS || "1", 10) || 1;
|
||||
|
||||
for (let i = 0; i < count; i++) existsSync(cwd);
|
||||
12
bench/snippets/package.json
Normal file
12
bench/snippets/package.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"name": "snippets",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@babel/core": "^7.16.10",
|
||||
"@babel/preset-react": "^7.16.7",
|
||||
"@swc/core": "^1.2.133",
|
||||
"esbuild": "^0.14.12"
|
||||
}
|
||||
}
|
||||
4
bench/snippets/realpath.mjs
Normal file
4
bench/snippets/realpath.mjs
Normal file
@@ -0,0 +1,4 @@
|
||||
import { realpathSync } from "node:fs";
|
||||
const count = parseInt(process.env.ITERATIONS || "1", 10) || 1;
|
||||
const arg = process.argv[process.argv.length - 1];
|
||||
for (let i = 0; i < count; i++) realpathSync(arg);
|
||||
BIN
bench/snippets/scanner/bun.lockb
Executable file
BIN
bench/snippets/scanner/bun.lockb
Executable file
Binary file not shown.
6
bench/snippets/scanner/package.json
Normal file
6
bench/snippets/scanner/package.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"name": "scan",
|
||||
"dependencies": {
|
||||
"esbuild": "^0.14.11"
|
||||
}
|
||||
}
|
||||
15
bench/snippets/scanner/remix-route.ts
Normal file
15
bench/snippets/scanner/remix-route.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { useParams } from "remix";
|
||||
import type { LoaderFunction, ActionFunction } from "remix";
|
||||
|
||||
export const loader: LoaderFunction = async ({ params }) => {
|
||||
console.log(params.postId);
|
||||
};
|
||||
|
||||
export const action: ActionFunction = async ({ params }) => {
|
||||
console.log(params.postId);
|
||||
};
|
||||
|
||||
export default function PostRoute() {
|
||||
const params = useParams();
|
||||
console.log(params.postId);
|
||||
}
|
||||
15
bench/snippets/scanner/scan-imports-only.js
Normal file
15
bench/snippets/scanner/scan-imports-only.js
Normal file
@@ -0,0 +1,15 @@
|
||||
import { readFileSync } from "fs";
|
||||
const fixture = ["action", "default", "loader"];
|
||||
|
||||
const transpiler = new Bun.Transpiler({
|
||||
loader: "ts",
|
||||
});
|
||||
|
||||
console.time("Get exports");
|
||||
const ITERATIONS = parseInt(process.env.ITERATIONS || "1") || 1;
|
||||
for (let i = 0; i < ITERATIONS; i++) {
|
||||
const imports = transpiler.scanImports(
|
||||
readFileSync("remix-route.ts", "utf8")
|
||||
);
|
||||
}
|
||||
console.timeEnd("Get exports");
|
||||
21
bench/snippets/scanner/scan.bun.js
Normal file
21
bench/snippets/scanner/scan.bun.js
Normal file
@@ -0,0 +1,21 @@
|
||||
import { readFileSync } from "fs";
|
||||
const fixture = ["action", "default", "loader"];
|
||||
const ITERATIONS = parseInt(process.env.ITERATIONS || "1") || 1;
|
||||
|
||||
const transpiler = new Bun.Transpiler({
|
||||
loader: "ts",
|
||||
});
|
||||
|
||||
console.time("Get exports");
|
||||
const file = readFileSync("remix-route.ts", "utf8");
|
||||
for (let i = 0; i < ITERATIONS; i++) {
|
||||
const { imports, exports } = transpiler.scan(file);
|
||||
|
||||
for (let j = 0; j < fixture.length; j++) {
|
||||
if (fixture[j] !== exports[j]) {
|
||||
throw new Error("Mismatch");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.timeEnd("Get exports");
|
||||
42
bench/snippets/scanner/scan.node-esbuild.mjs
Normal file
42
bench/snippets/scanner/scan.node-esbuild.mjs
Normal file
@@ -0,0 +1,42 @@
|
||||
import { build, buildSync } from "esbuild";
|
||||
import { readFileSync } from "fs";
|
||||
const fixture = ["action", "default", "loader"];
|
||||
const ITERATIONS = parseInt(process.env.ITERATIONS || "1") || 1;
|
||||
|
||||
const opts = {
|
||||
metafile: true,
|
||||
format: "esm",
|
||||
platform: "neutral",
|
||||
write: false,
|
||||
logLevel: "silent",
|
||||
stdin: {
|
||||
contents: readFileSync("remix-route.ts", "utf8"),
|
||||
loader: "ts",
|
||||
sourcefile: "remix-route.js",
|
||||
},
|
||||
};
|
||||
|
||||
const getExports = ({ metafile }) => {
|
||||
for (let i = 0; i < fixture.length; i++) {
|
||||
if (fixture[i] !== metafile.outputs["stdin.js"].exports[i]) {
|
||||
throw new Error("Mismatch");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
console.time("Get exports");
|
||||
|
||||
if (!process.env.SYNC) {
|
||||
var promises = new Array(ITERATIONS);
|
||||
for (let i = 0; i < ITERATIONS; i++) {
|
||||
promises[i] = build(opts).then(getExports);
|
||||
}
|
||||
|
||||
await Promise.all(promises);
|
||||
} else {
|
||||
for (let i = 0; i < ITERATIONS; i++) {
|
||||
getExports(buildSync(opts));
|
||||
}
|
||||
}
|
||||
|
||||
console.timeEnd("Get exports");
|
||||
58
bench/snippets/transpiler.mjs
Normal file
58
bench/snippets/transpiler.mjs
Normal file
@@ -0,0 +1,58 @@
|
||||
import { readFileSync } from "fs";
|
||||
|
||||
var transformSync;
|
||||
var transform;
|
||||
var opts;
|
||||
if (process.isBun) {
|
||||
const transpiler = new Bun.Transpiler({ loader: "jsx" });
|
||||
transformSync = transpiler.transformSync.bind(transpiler);
|
||||
transform = transpiler.transform.bind(transpiler);
|
||||
opts = "jsx";
|
||||
} else if (process.env["esbuild"]) {
|
||||
try {
|
||||
const esbuild = await import("esbuild");
|
||||
transformSync = esbuild.transformSync;
|
||||
transform = esbuild.transform;
|
||||
opts = { loader: "jsx" };
|
||||
} catch (exception) {
|
||||
throw exception;
|
||||
}
|
||||
} else if (process.env["swc"]) {
|
||||
try {
|
||||
const swc = await import("@swc/core");
|
||||
transformSync = swc.transformSync;
|
||||
transform = swc.transform;
|
||||
opts = {
|
||||
sourceMaps: false,
|
||||
inlineSourcesContent: false,
|
||||
jsc: {
|
||||
target: "es2022",
|
||||
parser: {
|
||||
jsx: true,
|
||||
},
|
||||
},
|
||||
};
|
||||
} catch (exception) {
|
||||
throw exception;
|
||||
}
|
||||
} else if (process.env["babel"]) {
|
||||
try {
|
||||
const swc = await import("@babel/core");
|
||||
transformSync = swc.transformSync;
|
||||
transform = swc.transform;
|
||||
opts = {
|
||||
sourceMaps: false,
|
||||
presets: [(await import("@babel/preset-react")).default],
|
||||
};
|
||||
} catch (exception) {
|
||||
throw exception;
|
||||
}
|
||||
}
|
||||
|
||||
const code = readFileSync("src/test/fixtures/simple.jsx", "utf8");
|
||||
|
||||
if (process.env.ASYNC) {
|
||||
console.log(await transform(code, opts));
|
||||
} else {
|
||||
console.log(transformSync(code, opts));
|
||||
}
|
||||
697
build.zig
697
build.zig
@@ -1,380 +1,373 @@
|
||||
const std = @import("std");
|
||||
const resolve_path = @import("./src/resolver/resolve_path.zig");
|
||||
|
||||
pub fn addPicoHTTP(step: *std.build.LibExeObjStep) void {
|
||||
step.addPackagePath("picohttp", "src/deps/picohttp.zig");
|
||||
fn pkgPath(comptime out: []const u8) std.build.FileSource {
|
||||
const outpath = comptime std.fs.path.dirname(@src().file).? ++ std.fs.path.sep_str ++ out;
|
||||
return .{ .path = outpath };
|
||||
}
|
||||
pub fn addPicoHTTP(step: *std.build.LibExeObjStep, comptime with_obj: bool) void {
|
||||
step.addIncludeDir("src/deps");
|
||||
step.addCSourceFile("src/deps/picohttpparser.c", &.{});
|
||||
}
|
||||
|
||||
pub fn addMimalloc(step: *std.build.LibExeObjStep) void {
|
||||
var source_files = std.ArrayList([]const u8).init(step.builder.allocator);
|
||||
defer source_files.deinit();
|
||||
|
||||
inline for (.{
|
||||
"src/deps/mimalloc/src/stats.c",
|
||||
"src/deps/mimalloc/src/random.c",
|
||||
"src/deps/mimalloc/src/os.c",
|
||||
"src/deps/mimalloc/src/bitmap.c",
|
||||
"src/deps/mimalloc/src/arena.c",
|
||||
"src/deps/mimalloc/src/region.c",
|
||||
"src/deps/mimalloc/src/segment.c",
|
||||
"src/deps/mimalloc/src/page.c",
|
||||
"src/deps/mimalloc/src/alloc.c",
|
||||
"src/deps/mimalloc/src/alloc-aligned.c",
|
||||
"src/deps/mimalloc/src/alloc-posix.c",
|
||||
"src/deps/mimalloc/src/heap.c",
|
||||
"src/deps/mimalloc/src/options.c",
|
||||
"src/deps/mimalloc/src/init.c",
|
||||
}) |source_file| {
|
||||
source_files.append(source_file) catch unreachable;
|
||||
if (with_obj) {
|
||||
step.addObjectFile("src/deps/picohttpparser.o");
|
||||
}
|
||||
|
||||
var source_flags = std.ArrayList([]const u8).init(step.builder.allocator);
|
||||
defer source_flags.deinit();
|
||||
step.addIncludeDir("src/deps");
|
||||
|
||||
source_flags.append("-DMI_ALLOC_OVERRIDE") catch unreachable;
|
||||
|
||||
if (step.target.getOsTag().isDarwin()) {
|
||||
source_files.append("src/deps/mimalloc/src/alloc-override-osx.c") catch unreachable;
|
||||
source_flags.append("-DMI_OSX_ZONE=1") catch unreachable;
|
||||
if (with_obj) {
|
||||
step.addObjectFile(panicIfNotFound("src/deps/picohttpparser.o"));
|
||||
step.addObjectFile(panicIfNotFound("src/deps/libssl.a"));
|
||||
step.addObjectFile(panicIfNotFound("src/deps/libcrypto.a"));
|
||||
}
|
||||
|
||||
step.addIncludeDir("src/deps/mimalloc/include");
|
||||
step.addCSourceFiles(source_files.items, source_flags.items);
|
||||
// step.add("/Users/jarred/Code/WebKit/WebKitBuild/Release/lib/libWTF.a");
|
||||
|
||||
// ./Tools/Scripts/build-jsc --jsc-only --cmakeargs="-DENABLE_STATIC_JSC=ON"
|
||||
// set -gx ICU_INCLUDE_DIRS "/usr/local/opt/icu4c/include"
|
||||
// homebrew-provided icu4c
|
||||
}
|
||||
|
||||
const color_map = std.ComptimeStringMap([]const u8, .{
|
||||
&.{ "black", "30m" },
|
||||
&.{ "blue", "34m" },
|
||||
&.{ "b", "1m" },
|
||||
&.{ "d", "2m" },
|
||||
&.{ "cyan", "36m" },
|
||||
&.{ "green", "32m" },
|
||||
&.{ "magenta", "35m" },
|
||||
&.{ "red", "31m" },
|
||||
&.{ "white", "37m" },
|
||||
&.{ "yellow", "33m" },
|
||||
});
|
||||
|
||||
fn addInternalPackages(step: *std.build.LibExeObjStep, _: std.mem.Allocator, target: anytype) !void {
|
||||
var boringssl: std.build.Pkg = .{
|
||||
.name = "boringssl",
|
||||
.path = pkgPath("src/deps/boringssl.zig"),
|
||||
};
|
||||
|
||||
var thread_pool: std.build.Pkg = .{
|
||||
.name = "thread_pool",
|
||||
.path = pkgPath("src/thread_pool.zig"),
|
||||
};
|
||||
|
||||
var crash_reporter: std.build.Pkg = .{
|
||||
.name = "crash_reporter",
|
||||
.path = pkgPath("src/deps/backtrace.zig"),
|
||||
};
|
||||
|
||||
var picohttp: std.build.Pkg = .{
|
||||
.name = "picohttp",
|
||||
.path = pkgPath("src/deps/picohttp.zig"),
|
||||
};
|
||||
|
||||
var io_darwin: std.build.Pkg = .{
|
||||
.name = "io",
|
||||
.path = pkgPath("src/io/io_darwin.zig"),
|
||||
};
|
||||
var io_linux: std.build.Pkg = .{
|
||||
.name = "io",
|
||||
.path = pkgPath("src/io/io_linux.zig"),
|
||||
};
|
||||
|
||||
var io = if (target.isDarwin())
|
||||
io_darwin
|
||||
else
|
||||
io_linux;
|
||||
|
||||
var strings: std.build.Pkg = .{
|
||||
.name = "strings",
|
||||
.path = pkgPath("src/string_immutable.zig"),
|
||||
};
|
||||
|
||||
var clap: std.build.Pkg = .{
|
||||
.name = "clap",
|
||||
.path = pkgPath("src/deps/zig-clap/clap.zig"),
|
||||
};
|
||||
|
||||
var http: std.build.Pkg = .{
|
||||
.name = "http",
|
||||
.path = pkgPath("src/http_client_async.zig"),
|
||||
};
|
||||
|
||||
var javascript_core: std.build.Pkg = .{
|
||||
.name = "javascript_core",
|
||||
.path = pkgPath("src/jsc.zig"),
|
||||
};
|
||||
|
||||
var analytics: std.build.Pkg = .{
|
||||
.name = "analytics",
|
||||
.path = pkgPath("src/analytics.zig"),
|
||||
};
|
||||
|
||||
io.dependencies = &.{analytics};
|
||||
|
||||
javascript_core.dependencies = &.{ http, strings, picohttp, io };
|
||||
http.dependencies = &.{
|
||||
strings,
|
||||
picohttp,
|
||||
io,
|
||||
boringssl,
|
||||
thread_pool,
|
||||
};
|
||||
thread_pool.dependencies = &.{ io, http };
|
||||
http.dependencies = &.{
|
||||
strings,
|
||||
picohttp,
|
||||
io,
|
||||
boringssl,
|
||||
thread_pool,
|
||||
};
|
||||
thread_pool.dependencies = &.{ io, http };
|
||||
|
||||
thread_pool.dependencies = &.{
|
||||
io,
|
||||
http,
|
||||
};
|
||||
|
||||
step.addPackage(thread_pool);
|
||||
step.addPackage(picohttp);
|
||||
step.addPackage(io);
|
||||
step.addPackage(strings);
|
||||
step.addPackage(clap);
|
||||
step.addPackage(http);
|
||||
step.addPackage(boringssl);
|
||||
step.addPackage(javascript_core);
|
||||
step.addPackage(crash_reporter);
|
||||
}
|
||||
var output_dir: []const u8 = "";
|
||||
fn panicIfNotFound(comptime filepath: []const u8) []const u8 {
|
||||
var file = std.fs.cwd().openFile(filepath, .{ .read = true }) catch |err| {
|
||||
const linux_only = "\nOn Linux, you'll need to compile libiconv manually and copy the .a file into src/deps.";
|
||||
|
||||
std.debug.panic("error: {s} opening {s}. Please ensure you've downloaded git submodules, and ran `make vendor`, `make jsc`." ++ linux_only, .{ filepath, @errorName(err) });
|
||||
std.debug.panic("error: {s} opening {s}. Please ensure you've downloaded git submodules, and ran `make vendor`, `make jsc`.", .{ filepath, @errorName(err) });
|
||||
};
|
||||
file.close();
|
||||
|
||||
return filepath;
|
||||
}
|
||||
|
||||
const x64 = "x64";
|
||||
fn updateRuntime() anyerror!void {
|
||||
var runtime_out_file = try std.fs.cwd().openFile("src/runtime.out.js", .{ .read = true });
|
||||
const runtime_hash = std.hash.Wyhash.hash(
|
||||
0,
|
||||
try runtime_out_file.readToEndAlloc(std.heap.page_allocator, try runtime_out_file.getEndPos()),
|
||||
);
|
||||
const runtime_version_file = std.fs.cwd().createFile("src/runtime.version", .{ .truncate = true }) catch std.debug.panic("Failed to create src/runtime.version", .{});
|
||||
defer runtime_version_file.close();
|
||||
runtime_version_file.writer().print("{x}", .{runtime_hash}) catch unreachable;
|
||||
var fallback_out_file = try std.fs.cwd().openFile("src/fallback.out.js", .{ .read = true });
|
||||
const fallback_hash = std.hash.Wyhash.hash(
|
||||
0,
|
||||
try fallback_out_file.readToEndAlloc(std.heap.page_allocator, try fallback_out_file.getEndPos()),
|
||||
);
|
||||
|
||||
const fallback_version_file = std.fs.cwd().createFile("src/fallback.version", .{ .truncate = true }) catch std.debug.panic("Failed to create src/fallback.version", .{});
|
||||
|
||||
fallback_version_file.writer().print("{x}", .{fallback_hash}) catch unreachable;
|
||||
|
||||
fallback_version_file.close();
|
||||
}
|
||||
|
||||
var x64 = "x64";
|
||||
var mode: std.builtin.Mode = undefined;
|
||||
pub fn build(b: *std.build.Builder) !void {
|
||||
// Standard target options allows the person running `zig build` to choose
|
||||
// what target to build for. Here we do not override the defaults, which
|
||||
// means any target is allowed, and the default is native. Other options
|
||||
// for restricting supported target set are available.
|
||||
var target = b.standardTargetOptions(.{});
|
||||
const mode = b.standardReleaseOptions();
|
||||
// Standard release options allow the person running `zig build` to select
|
||||
// between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall.
|
||||
mode = b.standardReleaseOptions();
|
||||
|
||||
const cwd: []const u8 = b.pathFromRoot(".");
|
||||
var exe: *std.build.LibExeObjStep = undefined;
|
||||
var output_dir_buf = std.mem.zeroes([4096]u8);
|
||||
const bin_label = if (mode == std.builtin.Mode.Debug) "packages/debug-bun-" else "packages/bun-";
|
||||
|
||||
const cpu_arch: std.Target.Cpu.Arch = target.getCpuArch();
|
||||
|
||||
var os_tag_name = @tagName(target.getOs().tag);
|
||||
if (std.mem.eql(u8, os_tag_name, "macos")) {
|
||||
os_tag_name = "darwin";
|
||||
if (cpu_arch.isAARCH64()) {
|
||||
target.os_version_min = std.build.Target.OsVersion{ .semver = .{ .major = 11, .minor = 0, .patch = 0 } };
|
||||
} else if (cpu_arch.isX86()) {
|
||||
target.os_version_min = std.build.Target.OsVersion{ .semver = .{ .major = 10, .minor = 14, .patch = 0 } };
|
||||
}
|
||||
}
|
||||
var bin_label = if (mode == std.builtin.Mode.Debug) "packages/debug-bun-" else "packages/bun-";
|
||||
|
||||
var triplet_buf: [64]u8 = undefined;
|
||||
std.mem.copy(u8, &triplet_buf, os_tag_name);
|
||||
const os_name = triplet_buf[0..os_tag_name.len];
|
||||
triplet_buf[os_name.len] = '-';
|
||||
var os_tagname = @tagName(target.getOs().tag);
|
||||
|
||||
std.mem.copy(u8, triplet_buf[os_name.len + 1 ..], @tagName(target.getCpuArch()));
|
||||
var cpu_arch_name = triplet_buf[os_name.len + 1 ..][0..@tagName(target.getCpuArch()).len];
|
||||
std.mem.replaceScalar(u8, cpu_arch_name, '_', '-');
|
||||
if (std.mem.eql(u8, cpu_arch_name, "x86-64")) {
|
||||
std.mem.copy(u8, cpu_arch_name, "x64");
|
||||
cpu_arch_name = cpu_arch_name[0..3];
|
||||
const arch: std.Target.Cpu.Arch = target.getCpuArch();
|
||||
|
||||
if (std.mem.eql(u8, os_tagname, "macos")) {
|
||||
os_tagname = "darwin";
|
||||
if (arch.isAARCH64()) {
|
||||
target.os_version_min = std.zig.CrossTarget.OsVersion{ .semver = .{ .major = 11, .minor = 0, .patch = 0 } };
|
||||
} else if (arch.isX86()) {
|
||||
target.os_version_min = std.zig.CrossTarget.OsVersion{ .semver = .{ .major = 10, .minor = 14, .patch = 0 } };
|
||||
}
|
||||
} else if (target.isLinux()) {
|
||||
target.setGnuLibCVersion(2, 27, 0);
|
||||
}
|
||||
|
||||
const triplet = triplet_buf[0 .. os_name.len + cpu_arch_name.len + 1];
|
||||
std.mem.copy(
|
||||
u8,
|
||||
&triplet_buf,
|
||||
os_tagname,
|
||||
);
|
||||
var osname = triplet_buf[0..os_tagname.len];
|
||||
triplet_buf[osname.len] = '-';
|
||||
|
||||
std.mem.copy(u8, triplet_buf[osname.len + 1 ..], @tagName(target.getCpuArch()));
|
||||
var cpuArchName = triplet_buf[osname.len + 1 ..][0..@tagName(target.getCpuArch()).len];
|
||||
std.mem.replaceScalar(u8, cpuArchName, '_', '-');
|
||||
if (std.mem.eql(u8, cpuArchName, "x86-64")) {
|
||||
std.mem.copy(u8, cpuArchName, "x64");
|
||||
cpuArchName = cpuArchName[0..3];
|
||||
}
|
||||
|
||||
var triplet = triplet_buf[0 .. osname.len + cpuArchName.len + 1];
|
||||
|
||||
const output_dir_base = try std.fmt.bufPrint(&output_dir_buf, "{s}{s}", .{ bin_label, triplet });
|
||||
const output_dir = b.pathFromRoot(output_dir_base);
|
||||
output_dir = b.pathFromRoot(output_dir_base);
|
||||
const bun_executable_name = if (mode == std.builtin.Mode.Debug) "bun-debug" else "bun";
|
||||
|
||||
if (target.getOsTag() == .wasi) {
|
||||
exe.enable_wasmtime = true;
|
||||
exe = b.addExecutable(bun_executable_name, "src/main_wasi.zig");
|
||||
exe.linkage = .dynamic;
|
||||
exe.setOutputDir(output_dir);
|
||||
} else if (target.getCpuArch().isWasm()) {
|
||||
const lib = b.addExecutable(bun_executable_name, "src/main_wasm.zig");
|
||||
lib.single_threaded = true;
|
||||
// exe.want_lto = true;
|
||||
// exe.linkLibrary(lib);
|
||||
|
||||
if (mode == std.builtin.Mode.Debug) {
|
||||
// exception_handling
|
||||
target.cpu_features_add.addFeature(2);
|
||||
} else {
|
||||
// lib.strip = true;
|
||||
}
|
||||
|
||||
lib.setOutputDir(output_dir);
|
||||
lib.want_lto = true;
|
||||
b.install_path = lib.getOutputSource().getPath(b);
|
||||
|
||||
std.debug.print("Build: ./{s}\n", .{b.install_path});
|
||||
b.default_step.dependOn(&lib.step);
|
||||
b.verbose_link = true;
|
||||
lib.setTarget(target);
|
||||
lib.setBuildMode(mode);
|
||||
|
||||
std.fs.deleteTreeAbsolute(std.fs.path.join(b.allocator, &.{ cwd, lib.getOutputSource().getPath(b) }) catch unreachable) catch {};
|
||||
|
||||
lib.strip = false;
|
||||
lib.install();
|
||||
|
||||
const run_cmd = lib.run();
|
||||
run_cmd.step.dependOn(b.getInstallStep());
|
||||
if (b.args) |args| {
|
||||
run_cmd.addArgs(args);
|
||||
}
|
||||
|
||||
const run_step = b.step("run", "Run the app");
|
||||
run_step.dependOn(&run_cmd.step);
|
||||
|
||||
return;
|
||||
} else {
|
||||
exe = b.addExecutable(bun_executable_name, "src/main.zig");
|
||||
}
|
||||
|
||||
exe = b.addExecutable(bun_executable_name, "src/main.zig");
|
||||
// exe.setLibCFile("libc.txt");
|
||||
exe.linkLibC();
|
||||
exe.addPackagePath("clap", "src/deps/zig-clap/clap.zig");
|
||||
// exe.linkLibCpp();
|
||||
|
||||
exe.setOutputDir(output_dir);
|
||||
|
||||
const cwd_dir = std.fs.cwd();
|
||||
|
||||
const runtime_hash = read: {
|
||||
const runtime_out_file = try cwd_dir.openFile("src/runtime.out.js", .{ .read = true });
|
||||
defer runtime_out_file.close();
|
||||
break :read std.hash.Wyhash.hash(0, try runtime_out_file.readToEndAlloc(b.allocator, try runtime_out_file.getEndPos()));
|
||||
};
|
||||
|
||||
const runtime_version_file = cwd_dir.createFile("src/runtime.version", .{ .truncate = true }) catch std.debug.panic("Failed to create src/runtime.version", .{});
|
||||
defer runtime_version_file.close();
|
||||
runtime_version_file.writer().print("{x}", .{runtime_hash}) catch unreachable;
|
||||
|
||||
const fallback_hash = read: {
|
||||
const fallback_out_file = try cwd_dir.openFile("src/fallback.out.js", .{ .read = true });
|
||||
defer fallback_out_file.close();
|
||||
break :read std.hash.Wyhash.hash(0, try fallback_out_file.readToEndAlloc(b.allocator, try fallback_out_file.getEndPos()));
|
||||
};
|
||||
|
||||
const fallback_version_file = cwd_dir.createFile("src/fallback.version", .{ .truncate = true }) catch std.debug.panic("Failed to create src/fallback.version", .{});
|
||||
defer fallback_version_file.close();
|
||||
|
||||
fallback_version_file.writer().print("{x}", .{fallback_hash}) catch unreachable;
|
||||
updateRuntime() catch {};
|
||||
|
||||
exe.setTarget(target);
|
||||
exe.setBuildMode(mode);
|
||||
b.install_path = output_dir;
|
||||
|
||||
const javascript = b.addExecutable("spjs", "src/main_javascript.zig");
|
||||
const typings_exe = b.addExecutable("typescript-decls", "src/javascript/jsc/typescript.zig");
|
||||
|
||||
exe.setMainPkgPath(b.pathFromRoot("."));
|
||||
javascript.setMainPkgPath(b.pathFromRoot("."));
|
||||
var typings_exe = b.addExecutable("typescript-decls", "src/javascript/jsc/typescript.zig");
|
||||
typings_exe.setMainPkgPath(b.pathFromRoot("."));
|
||||
|
||||
// exe.want_lto = true;
|
||||
if (!target.getCpuArch().isWasm()) {
|
||||
b.default_step.dependOn(&exe.step);
|
||||
defer b.default_step.dependOn(&b.addLog("Output: {s}/{s}\n", .{ output_dir, bun_executable_name }).step);
|
||||
defer b.default_step.dependOn(&b.addLog(
|
||||
"Build {s} v{} - v{}\n",
|
||||
.{
|
||||
triplet,
|
||||
target.getOsVersionMin().semver,
|
||||
target.getOsVersionMax().semver,
|
||||
},
|
||||
).step);
|
||||
|
||||
const bindings_dir = std.fs.path.join(
|
||||
var obj_step = b.step("obj", "Build bun as a .o file");
|
||||
var obj = b.addObject(bun_executable_name, exe.root_src.?.path);
|
||||
|
||||
{
|
||||
obj.setTarget(target);
|
||||
addPicoHTTP(obj, false);
|
||||
obj.setMainPkgPath(b.pathFromRoot("."));
|
||||
|
||||
try addInternalPackages(
|
||||
obj,
|
||||
b.allocator,
|
||||
&.{
|
||||
cwd,
|
||||
"src",
|
||||
"javascript",
|
||||
"jsc",
|
||||
"bindings-obj",
|
||||
},
|
||||
) catch unreachable;
|
||||
|
||||
var bindings_dir_ = cwd_dir.openDir(bindings_dir, .{ .iterate = true }) catch std.debug.panic("Error opening bindings directory. Please make sure you ran `make jsc`. {s} should exist", .{bindings_dir});
|
||||
var bindings_walker = bindings_dir_.walk(b.allocator) catch std.debug.panic("Error reading bindings directory {s}", .{bindings_dir});
|
||||
|
||||
var bindings_files = std.ArrayList([]const u8).init(b.allocator);
|
||||
|
||||
while (bindings_walker.next() catch unreachable) |entry| {
|
||||
if (std.mem.eql(u8, std.fs.path.extension(entry.basename), ".o")) {
|
||||
bindings_files.append(bindings_dir_.realpathAlloc(b.allocator, entry.path) catch unreachable) catch unreachable;
|
||||
}
|
||||
}
|
||||
|
||||
// // References:
|
||||
// // - https://github.com/mceSystems/node-jsc/blob/master/deps/jscshim/webkit.gyp
|
||||
// // - https://github.com/mceSystems/node-jsc/blob/master/deps/jscshim/docs/webkit_fork_and_compilation.md#webkit-port-and-compilation
|
||||
// const flags = [_][]const u8{
|
||||
// "-Isrc/JavaScript/jsc/WebKit/WebKitBuild/Release/JavaScriptCore/PrivateHeaders",
|
||||
// "-Isrc/JavaScript/jsc/WebKit/WebKitBuild/Release/WTF/Headers",
|
||||
// "-Isrc/javascript/jsc/WebKit/WebKitBuild/Release/ICU/Headers",
|
||||
// "-DSTATICALLY_LINKED_WITH_JavaScriptCore=1",
|
||||
// "-DSTATICALLY_LINKED_WITH_WTF=1",
|
||||
// "-DBUILDING_WITH_CMAKE=1",
|
||||
// "-DNOMINMAX",
|
||||
// "-DENABLE_INSPECTOR_ALTERNATE_DISPATCHERS=0",
|
||||
// "-DBUILDING_JSCONLY__",
|
||||
// "-DASSERT_ENABLED=0", // missing symbol errors like this will happen "JSC::DFG::DoesGCCheck::verifyCanGC(JSC::VM&)"
|
||||
// "-Isrc/JavaScript/jsc/WebKit/WebKitBuild/Release/", // config.h,
|
||||
// "-Isrc/JavaScript/jsc/bindings/",
|
||||
// "-Isrc/javascript/jsc/WebKit/Source/bmalloc",
|
||||
// "-std=gnu++17",
|
||||
// if (target.getOsTag() == .macos) "-DUSE_FOUNDATION=1" else "",
|
||||
// if (target.getOsTag() == .macos) "-DUSE_CF_RETAIN_PTR=1" else "",
|
||||
// };
|
||||
const headers_step = b.step("headers-obj", "JSC headers Step #1");
|
||||
const headers_obj: *std.build.LibExeObjStep = b.addObject("headers", "src/javascript/jsc/bindings/bindings-generator.zig");
|
||||
headers_obj.setMainPkgPath(javascript.main_pkg_path.?);
|
||||
headers_step.dependOn(&headers_obj.step);
|
||||
target,
|
||||
);
|
||||
|
||||
{
|
||||
b.default_step.dependOn(&b.addLog(
|
||||
"Build {s} v{} - v{}",
|
||||
obj_step.dependOn(&b.addLog(
|
||||
"Build {s} v{} - v{}\n",
|
||||
.{
|
||||
triplet,
|
||||
target.getOsVersionMin().semver,
|
||||
target.getOsVersionMax().semver,
|
||||
obj.target.getOsVersionMin().semver,
|
||||
obj.target.getOsVersionMax().semver,
|
||||
},
|
||||
).step);
|
||||
}
|
||||
b.default_step.dependOn(&exe.step);
|
||||
|
||||
{
|
||||
const steps = [_]*std.build.LibExeObjStep{ exe, javascript, typings_exe };
|
||||
obj_step.dependOn(&obj.step);
|
||||
|
||||
// const single_threaded = b.option(bool, "single-threaded", "Build single-threaded") orelse false;
|
||||
obj.setOutputDir(output_dir);
|
||||
obj.setBuildMode(mode);
|
||||
obj.linkLibC();
|
||||
obj.linkLibCpp();
|
||||
|
||||
for (steps) |step| {
|
||||
step.linkLibC();
|
||||
step.linkLibCpp();
|
||||
addPicoHTTP(step);
|
||||
addMimalloc(step);
|
||||
obj.strip = false;
|
||||
obj.bundle_compiler_rt = true;
|
||||
obj.omit_frame_pointer = false;
|
||||
|
||||
step.addObjectFile(panicIfNotFound("src/deps/libJavaScriptCore.a"));
|
||||
step.addObjectFile(panicIfNotFound("src/deps/libWTF.a"));
|
||||
step.addObjectFile(panicIfNotFound("src/deps/libcrypto.a"));
|
||||
step.addObjectFile(panicIfNotFound("src/deps/libbmalloc.a"));
|
||||
step.addObjectFile(panicIfNotFound("src/deps/libarchive.a"));
|
||||
step.addObjectFile(panicIfNotFound("src/deps/libs2n.a"));
|
||||
step.addObjectFile(panicIfNotFound("src/deps/zlib/libz.a"));
|
||||
b.default_step.dependOn(&obj.step);
|
||||
|
||||
// step.single_threaded = single_threaded;
|
||||
|
||||
if (target.getOsTag() == .macos) {
|
||||
const homebrew_prefix = comptime if (std.Target.current.cpu.arch == .aarch64)
|
||||
"/opt/homebrew/"
|
||||
else
|
||||
"/usr/local/";
|
||||
|
||||
// We must link ICU statically
|
||||
step.addObjectFile(panicIfNotFound(homebrew_prefix ++ "opt/icu4c/lib/libicudata.a"));
|
||||
step.addObjectFile(panicIfNotFound(homebrew_prefix ++ "opt/icu4c/lib/libicui18n.a"));
|
||||
step.addObjectFile(panicIfNotFound(homebrew_prefix ++ "opt/icu4c/lib/libicuuc.a"));
|
||||
step.addObjectFile(panicIfNotFound(homebrew_prefix ++ "opt/libiconv/lib/libiconv.a"));
|
||||
|
||||
// icucore is a weird macOS only library
|
||||
step.linkSystemLibrary("icucore");
|
||||
step.addLibPath(panicIfNotFound(homebrew_prefix ++ "opt/icu4c/lib"));
|
||||
step.addIncludeDir(panicIfNotFound(homebrew_prefix ++ "opt/icu4c/include"));
|
||||
} else {
|
||||
step.linkSystemLibrary("icuuc");
|
||||
step.linkSystemLibrary("icudata");
|
||||
step.linkSystemLibrary("icui18n");
|
||||
step.addObjectFile(panicIfNotFound("src/deps/libiconv.a"));
|
||||
}
|
||||
|
||||
for (bindings_files.items) |binding| {
|
||||
step.addObjectFile(
|
||||
binding,
|
||||
);
|
||||
}
|
||||
}
|
||||
if (target.getOsTag() == .linux) {
|
||||
// obj.want_lto = tar;
|
||||
obj.link_emit_relocs = true;
|
||||
obj.link_eh_frame_hdr = true;
|
||||
obj.link_function_sections = true;
|
||||
}
|
||||
|
||||
{
|
||||
var obj_step = b.step("obj", "Build Bun as a .o file");
|
||||
var obj = b.addObject(bun_executable_name, exe.root_src.?.path);
|
||||
|
||||
obj.setTarget(target);
|
||||
obj.setBuildMode(mode);
|
||||
|
||||
addPicoHTTP(obj);
|
||||
addMimalloc(obj);
|
||||
obj.addPackagePath("clap", "src/deps/zig-clap/clap.zig");
|
||||
|
||||
{
|
||||
obj_step.dependOn(&b.addLog(
|
||||
"Build {s} v{} - v{}\n",
|
||||
.{
|
||||
triplet,
|
||||
obj.target.getOsVersionMin().semver,
|
||||
obj.target.getOsVersionMax().semver,
|
||||
},
|
||||
).step);
|
||||
}
|
||||
|
||||
obj_step.dependOn(&obj.step);
|
||||
|
||||
obj.setOutputDir(output_dir);
|
||||
|
||||
obj.linkLibC();
|
||||
obj.linkLibCpp();
|
||||
|
||||
obj.strip = false;
|
||||
obj.bundle_compiler_rt = true;
|
||||
|
||||
if (target.getOsTag() == .linux) {
|
||||
// obj.want_lto = tar;
|
||||
obj.link_emit_relocs = true;
|
||||
obj.link_function_sections = true;
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
headers_obj.setTarget(target);
|
||||
headers_obj.setBuildMode(mode);
|
||||
headers_obj.setOutputDir(output_dir);
|
||||
|
||||
headers_obj.addPackagePath("clap", "src/deps/zig-clap/clap.zig");
|
||||
headers_obj.linkLibC();
|
||||
headers_obj.linkLibCpp();
|
||||
headers_obj.bundle_compiler_rt = true;
|
||||
|
||||
if (target.getOsTag() == .linux) {
|
||||
// obj.want_lto = tar;
|
||||
headers_obj.link_emit_relocs = true;
|
||||
headers_obj.link_function_sections = true;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
b.default_step.dependOn(&exe.step);
|
||||
var log_step = b.addLog("Destination: {s}/{s}\n", .{ output_dir, bun_executable_name });
|
||||
log_step.step.dependOn(&obj.step);
|
||||
}
|
||||
|
||||
javascript.strip = false;
|
||||
javascript.packages = std.ArrayList(std.build.Pkg).fromOwnedSlice(b.allocator, b.allocator.dupe(std.build.Pkg, exe.packages.items) catch unreachable);
|
||||
|
||||
javascript.setOutputDir(output_dir);
|
||||
javascript.setBuildMode(mode);
|
||||
|
||||
const run_cmd = exe.run();
|
||||
run_cmd.step.dependOn(b.getInstallStep());
|
||||
if (b.args) |args| {
|
||||
run_cmd.addArgs(args);
|
||||
{
|
||||
const headers_step = b.step("headers-obj", "Build JavaScriptCore headers");
|
||||
var headers_obj: *std.build.LibExeObjStep = b.addObject("headers", "src/bindgen.zig");
|
||||
defer headers_step.dependOn(&headers_obj.step);
|
||||
try configureObjectStep(headers_obj, target, obj.main_pkg_path.?);
|
||||
}
|
||||
|
||||
const run_step = b.step("run", "Run the app");
|
||||
run_step.dependOn(&run_cmd.step);
|
||||
{
|
||||
const headers_step = b.step("httpbench-obj", "Build HTTPBench tool (object files)");
|
||||
var headers_obj: *std.build.LibExeObjStep = b.addObject("httpbench", "misctools/http_bench.zig");
|
||||
defer headers_step.dependOn(&headers_obj.step);
|
||||
try configureObjectStep(headers_obj, target, obj.main_pkg_path.?);
|
||||
}
|
||||
|
||||
var log_step = b.addLog("Destination: {s}/{s}\n", .{ output_dir, bun_executable_name });
|
||||
log_step.step.dependOn(&exe.step);
|
||||
{
|
||||
const headers_step = b.step("fetch-obj", "Build fetch (object files)");
|
||||
var headers_obj: *std.build.LibExeObjStep = b.addObject("fetch", "misctools/fetch.zig");
|
||||
defer headers_step.dependOn(&headers_obj.step);
|
||||
try configureObjectStep(headers_obj, target, obj.main_pkg_path.?);
|
||||
}
|
||||
|
||||
{
|
||||
const headers_step = b.step("tgz-obj", "Build tgz (object files)");
|
||||
var headers_obj: *std.build.LibExeObjStep = b.addObject("tgz", "misctools/tgz.zig");
|
||||
defer headers_step.dependOn(&headers_obj.step);
|
||||
try configureObjectStep(headers_obj, target, obj.main_pkg_path.?);
|
||||
}
|
||||
|
||||
{
|
||||
const headers_step = b.step("test", "Build test");
|
||||
|
||||
var test_file = b.option([]const u8, "test-file", "Input file for test");
|
||||
var test_bin_ = b.option([]const u8, "test-bin", "Emit bin to");
|
||||
var test_filter = b.option([]const u8, "test-filter", "Filter for test");
|
||||
|
||||
var headers_obj: *std.build.LibExeObjStep = b.addTest(test_file orelse "src/main.zig");
|
||||
headers_obj.setFilter(test_filter);
|
||||
if (test_bin_) |test_bin| {
|
||||
headers_obj.name = std.fs.path.basename(test_bin);
|
||||
if (std.fs.path.dirname(test_bin)) |dir| headers_obj.setOutputDir(dir);
|
||||
}
|
||||
|
||||
try configureObjectStep(headers_obj, target, obj.main_pkg_path.?);
|
||||
try linkObjectFiles(b, headers_obj, target);
|
||||
{
|
||||
var before = b.addLog("\x1b[" ++ color_map.get("magenta").? ++ "\x1b[" ++ color_map.get("b").? ++ "[{s} tests]" ++ "\x1b[" ++ color_map.get("d").? ++ " ----\n\n" ++ "\x1b[0m", .{"bun"});
|
||||
var after = b.addLog("\x1b[" ++ color_map.get("d").? ++ "–––---\n\n" ++ "\x1b[0m", .{});
|
||||
headers_step.dependOn(&before.step);
|
||||
headers_step.dependOn(&headers_obj.step);
|
||||
headers_step.dependOn(&after.step);
|
||||
}
|
||||
|
||||
for (headers_obj.packages.items) |pkg_| {
|
||||
const pkg: std.build.Pkg = pkg_;
|
||||
if (std.mem.eql(u8, pkg.name, "clap")) continue;
|
||||
var test_ = b.addTestSource(pkg.path);
|
||||
|
||||
test_.setMainPkgPath(obj.main_pkg_path.?);
|
||||
test_.setTarget(target);
|
||||
try linkObjectFiles(b, test_, target);
|
||||
if (pkg.dependencies) |children| {
|
||||
test_.packages = std.ArrayList(std.build.Pkg).init(b.allocator);
|
||||
try test_.packages.appendSlice(children);
|
||||
}
|
||||
|
||||
var before = b.addLog("\x1b[" ++ color_map.get("magenta").? ++ "\x1b[" ++ color_map.get("b").? ++ "[{s} tests]" ++ "\x1b[" ++ color_map.get("d").? ++ " ----\n\n" ++ "\x1b[0m", .{pkg.name});
|
||||
var after = b.addLog("\x1b[" ++ color_map.get("d").? ++ "–––---\n\n" ++ "\x1b[0m", .{});
|
||||
headers_step.dependOn(&before.step);
|
||||
headers_step.dependOn(&test_.step);
|
||||
headers_step.dependOn(&after.step);
|
||||
}
|
||||
}
|
||||
|
||||
var typings_cmd: *std.build.RunStep = typings_exe.run();
|
||||
typings_cmd.cwd = cwd;
|
||||
@@ -383,12 +376,90 @@ pub fn build(b: *std.build.Builder) !void {
|
||||
typings_cmd.step.dependOn(&typings_exe.step);
|
||||
|
||||
typings_exe.linkLibC();
|
||||
|
||||
typings_exe.linkLibCpp();
|
||||
typings_exe.setMainPkgPath(cwd);
|
||||
|
||||
var typings_step = b.step("types", "Build TypeScript types");
|
||||
typings_step.dependOn(&typings_cmd.step);
|
||||
|
||||
var javascript_cmd = b.step("spjs", "Build standalone JavaScript runtime. Must run \"make jsc\" first.");
|
||||
javascript_cmd.dependOn(&javascript.step);
|
||||
}
|
||||
|
||||
pub var original_make_fn: ?fn (step: *std.build.Step) anyerror!void = null;
|
||||
|
||||
// Due to limitations in std.build.Builder
|
||||
// we cannot use this with debugging
|
||||
// so I am leaving this here for now, with the eventual intent to switch to std.build.Builder
|
||||
// but it is dead code
|
||||
pub fn linkObjectFiles(b: *std.build.Builder, obj: *std.build.LibExeObjStep, target: anytype) !void {
|
||||
var dirs_to_search = std.BoundedArray([]const u8, 32).init(0) catch unreachable;
|
||||
const arm_brew_prefix: []const u8 = "/opt/homebrew";
|
||||
const x86_brew_prefix: []const u8 = "/usr/local";
|
||||
try dirs_to_search.append(b.env_map.get("BUN_DEPS_OUT_DIR") orelse b.env_map.get("BUN_DEPS_DIR") orelse @as([]const u8, b.pathFromRoot("src/deps")));
|
||||
if (target.getOsTag() == .macos) {
|
||||
if (target.getCpuArch().isAARCH64()) {
|
||||
try dirs_to_search.append(comptime arm_brew_prefix ++ "/opt/icu4c/lib/");
|
||||
} else {
|
||||
try dirs_to_search.append(comptime x86_brew_prefix ++ "/opt/icu4c/lib/");
|
||||
}
|
||||
}
|
||||
|
||||
if (b.env_map.get("JSC_LIB")) |jsc| {
|
||||
try dirs_to_search.append(jsc);
|
||||
}
|
||||
|
||||
var added = std.AutoHashMap(u64, void).init(b.allocator);
|
||||
|
||||
const files_we_care_about = std.ComptimeStringMap([]const u8, .{
|
||||
.{ "libmimalloc.o", "libmimalloc.o" },
|
||||
.{ "libz.a", "libz.a" },
|
||||
.{ "libarchive.a", "libarchive.a" },
|
||||
.{ "libssl.a", "libssl.a" },
|
||||
.{ "picohttpparser.o", "picohttpparser.o" },
|
||||
.{ "libcrypto.boring.a", "libcrypto.boring.a" },
|
||||
.{ "libicuuc.a", "libicuuc.a" },
|
||||
.{ "libicudata.a", "libicudata.a" },
|
||||
.{ "libicui18n.a", "libicui18n.a" },
|
||||
.{ "libJavaScriptCore.a", "libJavaScriptCore.a" },
|
||||
.{ "libWTF.a", "libWTF.a" },
|
||||
.{ "libbmalloc.a", "libbmalloc.a" },
|
||||
.{ "libbacktrace.a", "libbacktrace.a" },
|
||||
});
|
||||
|
||||
for (dirs_to_search.slice()) |deps_path| {
|
||||
var deps_dir = std.fs.cwd().openDir(deps_path, .{ .iterate = true }) catch @panic("Failed to open dependencies directory");
|
||||
var iterator = deps_dir.iterate();
|
||||
|
||||
while (iterator.next() catch null) |entr| {
|
||||
const entry: std.fs.Dir.Entry = entr;
|
||||
if (files_we_care_about.get(entry.name)) |obj_name| {
|
||||
var has_added = try added.getOrPut(std.hash.Wyhash.hash(0, obj_name));
|
||||
if (!has_added.found_existing) {
|
||||
var paths = [_][]const u8{ deps_path, obj_name };
|
||||
obj.addObjectFile(try std.fs.path.join(b.allocator, &paths));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn configureObjectStep(obj: *std.build.LibExeObjStep, target: anytype, main_pkg_path: []const u8) !void {
|
||||
obj.setMainPkgPath(main_pkg_path);
|
||||
obj.setTarget(target);
|
||||
|
||||
try addInternalPackages(obj, std.heap.page_allocator, target);
|
||||
addPicoHTTP(obj, false);
|
||||
|
||||
obj.strip = false;
|
||||
obj.setOutputDir(output_dir);
|
||||
obj.setBuildMode(mode);
|
||||
obj.linkLibC();
|
||||
obj.linkLibCpp();
|
||||
obj.bundle_compiler_rt = true;
|
||||
|
||||
if (target.getOsTag() == .linux) {
|
||||
// obj.want_lto = tar;
|
||||
obj.link_emit_relocs = true;
|
||||
obj.link_eh_frame_hdr = true;
|
||||
obj.link_function_sections = true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,90 +1,149 @@
|
||||
# This is terribly complicated
|
||||
# It's because:
|
||||
# 1. bun run has to have dynamic completions
|
||||
# 2. there are global options
|
||||
# 3. bun {install add remove} gets special options
|
||||
# 4. I don't know how to write fish completions well
|
||||
# Contributions very welcome!!
|
||||
|
||||
function __fish__get_bun_bins
|
||||
string split ' ' (bun getcompletes b)
|
||||
string split ' ' (bun getcompletes b)
|
||||
end
|
||||
|
||||
function __fish__get_bun_scripts
|
||||
set -lx SHELL bash
|
||||
set -lx MAX_DESCRIPTION_LEN 40
|
||||
string trim (string split '\n' (string split '\t' (bun getcompletes z)))
|
||||
set -lx SHELL bash
|
||||
set -lx MAX_DESCRIPTION_LEN 40
|
||||
string trim (string split '\n' (string split '\t' (bun getcompletes z)))
|
||||
end
|
||||
|
||||
function __fish__get_bun_packages
|
||||
if test (commandline -ct) != ""
|
||||
set -lx SHELL fish
|
||||
string split ' ' (bun getcompletes a (commandline -ct))
|
||||
end
|
||||
end
|
||||
|
||||
function __history_completions
|
||||
set -l tokens (commandline --current-process --tokenize)
|
||||
history --prefix (commandline) | string replace -r \^$tokens[1]\\s\* "" | string replace -r \^$tokens[2]\\s\* "" | string split ' '
|
||||
end
|
||||
|
||||
function __fish__get_bun_bun_js_files
|
||||
string split ' ' (bun getcompletes j)
|
||||
string split ' ' (bun getcompletes j)
|
||||
end
|
||||
|
||||
function bun_fish_is_nth_token --description 'Test if current token is on Nth place' --argument-names n
|
||||
set -l tokens (commandline -poc)
|
||||
set -l tokens (string replace -r --filter '^([^-].*)' '$1' -- $tokens)
|
||||
test (count $tokens) -eq "$n"
|
||||
set -l tokens (commandline -poc)
|
||||
set -l tokens (string replace -r --filter '^([^-].*)' '$1' -- $tokens)
|
||||
test (count $tokens) -eq "$n"
|
||||
end
|
||||
|
||||
function __bun_command_count --argument-names n
|
||||
set -l cmds (commandline -poc)
|
||||
set -l cmds (commandline -poc)
|
||||
|
||||
test (count cmds) -eq "$n"
|
||||
test (count cmds) -eq "$n"
|
||||
end
|
||||
|
||||
function __bun_last_cmd --argument-names n
|
||||
set -l cmds (commandline -poc)
|
||||
set -l cmds (commandline -poc)
|
||||
|
||||
test "(cmds[-1])" = "$n"
|
||||
test "(cmds[-1])" = "$n"
|
||||
end
|
||||
|
||||
set -l bun_builtin_cmds dev create help bun upgrade discord run
|
||||
set -l bun_builtin_cmds_without_run dev create help bun upgrade discord
|
||||
set -l bun_builtin_cmds_without_bun dev create help upgrade run discord
|
||||
set -l bun_builtin_cmds_without_create dev help bun upgrade discord run
|
||||
set -l bun_install_boolean_flags yarn production optional development no-save dry-run force no-cache silent verbose
|
||||
set -l bun_install_boolean_flags_descriptions "Write a yarn.lock file (yarn v1)" "Don't install devDependencies" "Add dependency to optionalDependencies" "Add dependency to devDependencies" "Don't install devDependencies" "Don't install anything" "Always request the latest versions from the registry & reinstall all dependenices" "Ignore manifest cache entirely" "Don't output anything" "Excessively verbose logging"
|
||||
|
||||
set -l bun_builtin_cmds dev create help bun upgrade discord run install remove add
|
||||
set -l bun_builtin_cmds_without_run dev create help bun upgrade discord install remove add
|
||||
set -l bun_builtin_cmds_without_bun dev create help upgrade run discord install remove add
|
||||
set -l bun_builtin_cmds_without_create dev help bun upgrade discord run install remove add
|
||||
set -l bun_builtin_cmds_without_install create dev help bun upgrade discord run remove add
|
||||
set -l bun_builtin_cmds_without_remove create dev help bun upgrade discord run install add
|
||||
set -l bun_builtin_cmds_without_add create dev help bun upgrade discord run remove install
|
||||
set -l bun_builtin_cmds_without_pm create dev help bun upgrade discord run
|
||||
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_run; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts); and __fish_use_subcommand" -a '(__fish__get_bun_scripts)' -d 'script'
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_run; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_use_subcommand" -a '(__fish__get_bun_scripts)' -d 'script'
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_run; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts); and __fish_seen_subcommand_from run" -a '(__fish__get_bun_bins)' -d 'package bin'
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_run; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_seen_subcommand_from run" -a '(__fish__get_bun_bins)' -d 'package bin'
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_run; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts); and __fish_seen_subcommand_from run" -a '(__fish__get_bun_scripts)' -d 'script'
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_run; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_seen_subcommand_from run" -a '(__fish__get_bun_scripts)' -d 'script'
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_run; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts); and __fish_seen_subcommand_from run" -a '(__fish__get_bun_bun_js_files)' -d 'Bun.js'
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_run; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_seen_subcommand_from run" -a '(__fish__get_bun_bun_js_files)' -d 'Bun.js'
|
||||
complete -c bun \
|
||||
-n "bun_fish_is_nth_token 1; and not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts) and __fish_use_subcommand" -a 'run' -f -d 'Run a script or bin'
|
||||
-n "bun_fish_is_nth_token 1; and not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts) and __fish_use_subcommand" -a 'run' -f -d 'Run a script or bin'
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts);" --no-files -s 'u' -l 'origin' -r -d 'Server URL. Rewrites import paths'
|
||||
-n "not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts) install remove add;" --no-files -s 'u' -l 'origin' -r -d 'Server URL. Rewrites import paths'
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts);" --no-files -s 'p' -l 'port' -r -d 'Port number to start server from'
|
||||
-n "not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts) install remove add;" --no-files -s 'p' -l 'port' -r -d 'Port number to start server from'
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts);" --no-files -s 'd' -l 'define' -r -d 'Substitute K:V while parsing, e.g. --define process.env.NODE_ENV:\"development\"'
|
||||
-n "not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts) install remove add;" --no-files -s 'd' -l 'define' -r -d 'Substitute K:V while parsing, e.g. --define process.env.NODE_ENV:\"development\"'
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts);" --no-files -s 'e' -l 'external' -r -d 'Exclude module from transpilation (can use * wildcards). ex: -e react'
|
||||
-n "not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts) install remove add;" --no-files -s 'e' -l 'external' -r -d 'Exclude module from transpilation (can use * wildcards). ex: -e react'
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts);" --no-files -l 'use' -r -d 'Use a framework (ex: next)'
|
||||
-n "not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts) install remove add;" --no-files -l 'use' -r -d 'Use a framework (ex: next)'
|
||||
complete -c bun \
|
||||
-n "bun_fish_is_nth_token 1; and not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts) and __fish_use_subcommand" -a 'dev' -d 'Start dev server'
|
||||
-n "bun_fish_is_nth_token 1; and not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts) and __fish_use_subcommand" -a 'dev' -d 'Start dev server'
|
||||
complete -c bun \
|
||||
-n "bun_fish_is_nth_token 1; and not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts) and __bun_command_count 1 and __fish_use_subcommand" -a 'create' -f -d 'Create a new project'
|
||||
-n "bun_fish_is_nth_token 1; and not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts) and __bun_command_count 1 and __fish_use_subcommand" -a 'create' -f -d 'Create a new project'
|
||||
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_create next react; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts); and __fish_seen_subcommand_from create;" -a 'next' -d 'new Next.js project'
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_create next react; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_seen_subcommand_from create;" -a 'next' -d 'new Next.js project'
|
||||
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_create next react; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts); and __fish_seen_subcommand_from create;" -a 'react' -d 'new React project'
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_create next react; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_seen_subcommand_from create;" -a 'react' -d 'new React project'
|
||||
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts); and __fish_use_subcommand" -a 'upgrade' -d 'Upgrade Bun to the latest version' -x
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_use_subcommand" -a 'upgrade' -d 'Upgrade bun to the latest version' -x
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts); and __fish_use_subcommand" -a '--help' -d 'See all commands and flags' -x
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_use_subcommand" -a '--help' -d 'See all commands and flags' -x
|
||||
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts); and __fish_use_subcommand" -l "version" -s "v" -a '--version' -d 'Bun\'s version' -x
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_use_subcommand" -l "version" -s "v" -a '--version' -d 'bun\'s version' -x
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts); and __fish_use_subcommand" -a 'discord' -d 'Open Bun\'s Discord server' -x
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_use_subcommand" -a 'discord' -d 'Open bun\'s Discord server' -x
|
||||
|
||||
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_bun; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts); __fish_use_subcommand" -a 'bun' -d 'Generate a new bundle'
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_bun; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); __fish_use_subcommand" -a 'bun' -d 'Generate a new bundle'
|
||||
|
||||
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_bun; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts); and __fish_seen_subcommand_from bun" -F -d 'Bundle this'
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_bun; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_seen_subcommand_from bun" -F -d 'Bundle this'
|
||||
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_create; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts); and __fish_seen_subcommand_from react; or __fish_seen_subcommand_from next" -F -d "Create in directory"
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_create; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts); and __fish_seen_subcommand_from react; or __fish_seen_subcommand_from next" -F -d "Create in directory"
|
||||
|
||||
|
||||
|
||||
complete -c bun \
|
||||
-n "bun_fish_is_nth_token 1; and not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts) and __bun_command_count 1 and __fish_use_subcommand" -a 'install' -f -d 'Install packages from package.json'
|
||||
|
||||
complete -c bun \
|
||||
-n "bun_fish_is_nth_token 1; and not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts) and __bun_command_count 1 and __fish_use_subcommand" -a 'add' -F -d 'Add a package to package.json'
|
||||
|
||||
complete -c bun \
|
||||
-n "bun_fish_is_nth_token 1; and not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts) and __bun_command_count 1 and __fish_use_subcommand" -a 'remove' -F -d 'Remove a package from package.json'
|
||||
|
||||
complete -c bun \
|
||||
-n "bun_fish_is_nth_token 1; and not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts) and __bun_command_count 1 and __fish_use_subcommand add remove" -F
|
||||
|
||||
|
||||
for i in (seq (count $bun_install_boolean_flags))
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_pm; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_seen_subcommand_from install add remove;" -l "$bun_install_boolean_flags[$i]" -d "$bun_install_boolean_flags_descriptions[$i]"
|
||||
end
|
||||
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_pm; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_seen_subcommand_from install add remove;" -l 'cwd' -d 'Change working directory'
|
||||
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_pm; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_seen_subcommand_from install add remove;" -l 'cache-dir' -d 'Choose a cache directory (default: $HOME/.bun/install/cache)'
|
||||
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_pm; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_seen_subcommand_from add;" -d 'Popular' -a '(__fish__get_bun_packages)'
|
||||
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_pm; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_seen_subcommand_from add;" -d 'History' -a '(__history_completions)'
|
||||
|
||||
complete -c bun --no-files
|
||||
@@ -15,12 +15,52 @@ _bun() {
|
||||
IFS=$'\n' scripts_list=($(SHELL=zsh bun getcompletes i))
|
||||
compadd $scripts_list && ret=0
|
||||
|
||||
main_commands=('bun:"Generate a bundle" create:"Create a new project" dev:"Start a dev server" help:"Show command help" run:"Run a script or package bin" upgrade:"Upgrade to the latest version of Bun"')
|
||||
main_commands=('add\:"Add a dependency to package.json" bun\:"Generate a bundle" create\:"Create a new project" dev\:"Start a dev server" help\:"Show command help" install\:"Install packages from package.json" remove\:"Remove a dependency from package.json" run\:"Run a script or package bin" upgrade\:"Upgrade to the latest version of bun"')
|
||||
main_commands=($main_commands)
|
||||
_alternative "args:Bun:(($main_commands))"
|
||||
_alternative "args:bun:(($main_commands))"
|
||||
;;
|
||||
args)
|
||||
case $line[1] in
|
||||
add)
|
||||
|
||||
# ---- Command: add
|
||||
_arguments -s -C \
|
||||
'1: :->cmd1' \
|
||||
'*: :->package' \
|
||||
'--version[Show version and exit]' \
|
||||
'-V[Show version and exit]' \
|
||||
'--cwd[Change directory]:cwd' \
|
||||
'--help[Show command help]' \
|
||||
'-h[Show command help]' \
|
||||
'--registry[Change default registry (default: \$BUN_CONFIG_REGISTRY || \$npm_config_registry)]:registry' \
|
||||
'--token[Authentication token used for npm registry requests (default: \$npm_config_token)]:token' \
|
||||
'-y[Write a yarn.lock file (yarn v1)]' \
|
||||
'--yarn[Write a yarn.lock file (yarn v1)]' \
|
||||
'--production[Don'"'"'t install devDependencies]' \
|
||||
'--optional[Add dependency to optionalDependencies]' \
|
||||
'--development[Add dependency to devDependencies]' \
|
||||
'-d[Add dependency to devDependencies]' \
|
||||
'-p[Don'"'"'t install devDependencies]' \
|
||||
'--no-save[]' \
|
||||
'--dry-run[Don'"'"'t install anything]' \
|
||||
'--force[Always request the latest versions from the registry & reinstall all dependenices]' \
|
||||
'--lockfile[Store & load a lockfile at a specific filepath]:lockfile' \
|
||||
'--cache-dir[Store & load cached data from a specific directory path]:cache-dir' \
|
||||
'--no-cache[Ignore manifest cache entirely]' \
|
||||
'--silent[Don'"'"'t output anything]' \
|
||||
'--verbose[Excessively verbose logging]' \
|
||||
'--cwd[Set a specific cwd]:cwd' \
|
||||
'--backend[Platform-specific optimizations for installing dependencies]:backend:("clonefile" "copyfile" "hardlink" "clonefile_each_dir")' \
|
||||
'--link-native-bins[Link "bin" from a matching platform-specific dependency instead. Default: esbuild, turbo]:link-native-bins' &&
|
||||
ret=0
|
||||
|
||||
case $state in
|
||||
package)
|
||||
_bun_add_param_package_completion
|
||||
;;
|
||||
esac
|
||||
|
||||
;;
|
||||
bun)
|
||||
|
||||
# ---- Command: bun
|
||||
@@ -164,6 +204,53 @@ _bun() {
|
||||
ret=0
|
||||
|
||||
;;
|
||||
install)
|
||||
|
||||
# ---- Command: help install
|
||||
_arguments -s -C \
|
||||
'1: :->cmd1' \
|
||||
'2: :->cmd2' \
|
||||
'--version[Show version and exit]' \
|
||||
'-V[Show version and exit]' \
|
||||
'--cwd[Change directory]:cwd' \
|
||||
'--help[Show command help]' \
|
||||
'-h[Show command help]' \
|
||||
'--all[]' &&
|
||||
ret=0
|
||||
|
||||
;;
|
||||
|
||||
remove)
|
||||
|
||||
# ---- Command: help remove
|
||||
_arguments -s -C \
|
||||
'1: :->cmd1' \
|
||||
'2: :->cmd2' \
|
||||
'--version[Show version and exit]' \
|
||||
'-V[Show version and exit]' \
|
||||
'--cwd[Change directory]:cwd' \
|
||||
'--help[Show command help]' \
|
||||
'-h[Show command help]' \
|
||||
'--all[]' &&
|
||||
ret=0
|
||||
|
||||
;;
|
||||
|
||||
run)
|
||||
|
||||
# ---- Command: help run
|
||||
_arguments -s -C \
|
||||
'1: :->cmd1' \
|
||||
'2: :->cmd2' \
|
||||
'--version[Show version and exit]' \
|
||||
'-V[Show version and exit]' \
|
||||
'--cwd[Change directory]:cwd' \
|
||||
'--help[Show command help]' \
|
||||
'-h[Show command help]' \
|
||||
'--all[]' &&
|
||||
ret=0
|
||||
|
||||
;;
|
||||
|
||||
create)
|
||||
|
||||
@@ -275,6 +362,70 @@ _bun() {
|
||||
;;
|
||||
|
||||
esac
|
||||
;;
|
||||
install)
|
||||
|
||||
# ---- Command: install
|
||||
_arguments -s -C \
|
||||
'1: :->cmd1' \
|
||||
'--version[Show version and exit]' \
|
||||
'-V[Show version and exit]' \
|
||||
'--help[Show command help]' \
|
||||
'-h[Show command help]' \
|
||||
'--registry[Change default registry (default: \$BUN_CONFIG_REGISTRY || \$npm_config_registry)]:registry' \
|
||||
'--token[Authentication token used for npm registry requests (default: \$npm_config_token)]:token' \
|
||||
'-y[Write a yarn.lock file (yarn v1)]' \
|
||||
'--yarn[Write a yarn.lock file (yarn v1)]' \
|
||||
'--production[Don'"'"'t install devDependencies]' \
|
||||
'-p[Don'"'"'t install devDependencies]' \
|
||||
'--no-save[]' \
|
||||
'--dry-run[Don'"'"'t install anything]' \
|
||||
'--force[Always request the latest versions from the registry & reinstall all dependenices]' \
|
||||
'--lockfile[Store & load a lockfile at a specific filepath]:lockfile' \
|
||||
'--cache-dir[Store & load cached data from a specific directory path]:cache-dir' \
|
||||
'--no-cache[Ignore manifest cache entirely]' \
|
||||
'--silent[Don'"'"'t output anything]' \
|
||||
'--verbose[Excessively verbose logging]' \
|
||||
'--cwd[Set a specific cwd]:cwd' \
|
||||
'--backend[Platform-specific optimizations for installing dependencies]:backend:("clonefile" "copyfile" "hardlink" "clonefile_each_dir")' \
|
||||
'--link-native-bins[Link "bin" from a matching platform-specific dependency instead. Default: esbuild, turbo]:link-native-bins' &&
|
||||
ret=0
|
||||
|
||||
;;
|
||||
|
||||
remove)
|
||||
|
||||
# ---- Command: remove
|
||||
_arguments -s -C \
|
||||
'1: :->cmd1' \
|
||||
'*: :->package' \
|
||||
'--version[Show version and exit]' \
|
||||
'-V[Show version and exit]' \
|
||||
'--help[Show command help]' \
|
||||
'-h[Show command help]' \
|
||||
'--registry[Change default registry (default: \$BUN_CONFIG_REGISTRY || \$npm_config_registry)]:registry' \
|
||||
'--token[Authentication token used for npm registry requests (default: \$npm_config_token)]:token' \
|
||||
'-y[Write a yarn.lock file (yarn v1)]' \
|
||||
'--yarn[Write a yarn.lock file (yarn v1)]' \
|
||||
'--production[Don'"'"'t install devDependencies]' \
|
||||
'-p[Don'"'"'t install devDependencies]' \
|
||||
'--no-save[]' \
|
||||
'--dry-run[Don'"'"'t install anything]' \
|
||||
'--force[Always request the latest versions from the registry & reinstall all dependenices]' \
|
||||
'--lockfile[Store & load a lockfile at a specific filepath]:lockfile' \
|
||||
'--cache-dir[Store & load cached data from a specific directory path]:cache-dir' \
|
||||
'--no-cache[Ignore manifest cache entirely]' \
|
||||
'--silent[Don'"'"'t output anything]' \
|
||||
'--verbose[Excessively verbose logging]' \
|
||||
'--backend[Platform-specific optimizations for installing dependencies]:backend:("clonefile" "copyfile" "hardlink" "clonefile_each_dir")' \
|
||||
'--link-native-bins[Link "bin" from a matching platform-specific dependency instead. Default: esbuild, turbo]:link-native-bins' &&
|
||||
ret=0
|
||||
|
||||
case $state in
|
||||
package) ;;
|
||||
|
||||
esac
|
||||
|
||||
;;
|
||||
run)
|
||||
|
||||
@@ -347,6 +498,39 @@ _bun_run_param_script_completion() {
|
||||
fi
|
||||
}
|
||||
|
||||
_set_remove() {
|
||||
comm -23 <(echo $1 | sort | tr " " "\n") <(echo $2 | sort | tr " " "\n") 2>/dev/null
|
||||
}
|
||||
|
||||
_bun_add_param_package_completion() {
|
||||
|
||||
IFS=$'\n' inexact=($(history -n bun | grep -E "^bun add " | cut -c 9- | uniq))
|
||||
IFS=$'\n' exact=($($inexact | grep -E "^$words[$CURRENT]"))
|
||||
IFS=$'\n' packages=($(SHELL=zsh bun getcompletes a $words[$CURRENT]))
|
||||
|
||||
to_print=$inexact
|
||||
if [ ! -z "$exact" -a "$exact" != " " ]; then
|
||||
to_print=$exact
|
||||
fi
|
||||
|
||||
if [ ! -z "$to_print" -a "$to_print" != " " ]; then
|
||||
if [ ! -z "$packages" -a "$packages" != " " ]; then
|
||||
_describe -1 -t to_print 'History' to_print
|
||||
_describe -1 -t packages "Popular" packages
|
||||
return
|
||||
fi
|
||||
|
||||
_describe -1 -t to_print 'History' to_print
|
||||
return
|
||||
fi
|
||||
|
||||
if [ ! -z "$packages" -a "$packages" != " " ]; then
|
||||
_describe -1 -t packages "Popular" packages
|
||||
return
|
||||
fi
|
||||
|
||||
}
|
||||
|
||||
__bun_dynamic_comp() {
|
||||
local comp=""
|
||||
|
||||
|
||||
@@ -58,7 +58,7 @@ subcommands:
|
||||
summary: Use a framework, e.g. "next"
|
||||
|
||||
upgrade:
|
||||
summary: Upgrade to the latest version of Bun
|
||||
summary: Upgrade to the latest version of bun
|
||||
|
||||
dev:
|
||||
summary: Start a dev server
|
||||
@@ -102,6 +102,122 @@ subcommands:
|
||||
- &port name: port
|
||||
type: int
|
||||
summary: Port number
|
||||
install:
|
||||
summary: Install packages from package.json
|
||||
options:
|
||||
- name: registry
|
||||
type: string
|
||||
summary: "Change default registry (default: $BUN_CONFIG_REGISTRY || $npm_config_registry)"
|
||||
- name: token
|
||||
type: string
|
||||
summary: "Authentication token used for npm registry requests (default: $npm_config_token)"
|
||||
- y -- "Write a yarn.lock file (yarn v1)"
|
||||
- yarn -- "Write a yarn.lock file (yarn v1)"
|
||||
- production -- "Don't install devDependencies"
|
||||
- p -- "Don't install devDependencies"
|
||||
- no-save --
|
||||
- dry-run -- "Don't install anything"
|
||||
- force -- "Always request the latest versions from the registry & reinstall all dependenices"
|
||||
- name: lockfile
|
||||
type: string
|
||||
summary: "Store & load a lockfile at a specific filepath"
|
||||
- name: cache-dir
|
||||
type: string
|
||||
summary: "Store & load cached data from a specific directory path"
|
||||
- no-cache -- "Ignore manifest cache entirely"
|
||||
- silent -- "Don't output anything"
|
||||
- verbose -- "Excessively verbose logging"
|
||||
- name: cwd
|
||||
type: string
|
||||
summary: "Set a specific cwd"
|
||||
- name: backend
|
||||
summary: "Platform-specific optimizations for installing dependencies"
|
||||
type: string
|
||||
enum: ["clonefile", "copyfile", "hardlink", "clonefile_each_dir"]
|
||||
- name: link-native-bins
|
||||
summary: 'Link "bin" from a matching platform-specific dependency instead. Default: esbuild, turbo'
|
||||
add:
|
||||
summary: Add a dependency to package.json
|
||||
options:
|
||||
- name: registry
|
||||
type: string
|
||||
summary: "Change default registry (default: $BUN_CONFIG_REGISTRY || $npm_config_registry)"
|
||||
- name: token
|
||||
type: string
|
||||
summary: "Authentication token used for npm registry requests (default: $npm_config_token)"
|
||||
- y -- "Write a yarn.lock file (yarn v1)"
|
||||
- yarn -- "Write a yarn.lock file (yarn v1)"
|
||||
- production -- "Don't install devDependencies"
|
||||
- optional -- "Add dependency to optionalDependencies"
|
||||
- development -- "Add dependency to devDependencies"
|
||||
- d -- "Add dependency to devDependencies"
|
||||
- p -- "Don't install devDependencies"
|
||||
- no-save --
|
||||
- dry-run -- "Don't install anything"
|
||||
- force -- "Always request the latest versions from the registry & reinstall all dependenices"
|
||||
- no-cache -- "Ignore manifest cache entirely"
|
||||
- silent -- "Don't output anything"
|
||||
- verbose -- "Excessively verbose logging"
|
||||
- name: lockfile
|
||||
type: string
|
||||
summary: "Store & load a lockfile at a specific filepath"
|
||||
- name: cache-dir
|
||||
type: string
|
||||
summary: "Store & load cached data from a specific directory path"
|
||||
|
||||
- name: cwd
|
||||
type: string
|
||||
summary: "Set a specific cwd"
|
||||
- name: backend
|
||||
summary: "Platform-specific optimizations for installing dependencies"
|
||||
type: string
|
||||
enum: ["clonefile", "copyfile", "hardlink", "clonefile_each_dir"]
|
||||
- name: link-native-bins
|
||||
summary: 'Link "bin" from a matching platform-specific dependency instead. Default: esbuild, turbo'
|
||||
parameters:
|
||||
- name: package
|
||||
multiple: true
|
||||
type: string
|
||||
required: true
|
||||
remove:
|
||||
summary: Remove a dependency from package.json
|
||||
options:
|
||||
- name: registry
|
||||
type: string
|
||||
summary: "Change default registry (default: $BUN_CONFIG_REGISTRY || $npm_config_registry)"
|
||||
- name: token
|
||||
type: string
|
||||
summary: "Authentication token used for npm registry requests (default: $npm_config_token)"
|
||||
- y -- "Write a yarn.lock file (yarn v1)"
|
||||
- yarn -- "Write a yarn.lock file (yarn v1)"
|
||||
- production -- "Don't install devDependencies"
|
||||
- p -- "Don't install devDependencies"
|
||||
- no-save --
|
||||
- dry-run -- "Don't install anything"
|
||||
- force -- "Always request the latest versions from the registry & reinstall all dependenices"
|
||||
- name: lockfile
|
||||
type: string
|
||||
summary: "Store & load a lockfile at a specific filepath"
|
||||
- name: cache-dir
|
||||
type: string
|
||||
summary: "Store & load cached data from a specific directory path"
|
||||
- no-cache -- "Ignore manifest cache entirely"
|
||||
- silent -- "Don't output anything"
|
||||
- verbose -- "Excessively verbose logging"
|
||||
- name: cwd
|
||||
type: string
|
||||
summary: "Set a specific cwd"
|
||||
- name: backend
|
||||
summary: "Platform-specific optimizations for installing dependencies"
|
||||
type: string
|
||||
enum: ["clonefile", "copyfile", "hardlink", "clonefile_each_dir"]
|
||||
- name: link-native-bins
|
||||
summary: 'Link "bin" from a matching platform-specific dependency instead. Default: esbuild, turbo'
|
||||
parameters:
|
||||
- name: package
|
||||
multiple: true
|
||||
type: string
|
||||
required: true
|
||||
|
||||
parameters:
|
||||
- name: sasdasdds
|
||||
|
||||
44
docs/bun-flavored-toml.md
Normal file
44
docs/bun-flavored-toml.md
Normal file
@@ -0,0 +1,44 @@
|
||||
# Bun-flavored TOML
|
||||
|
||||
[TOML](https://toml.io/) is a minimal configuration file format designed to be easy for humans to read.
|
||||
|
||||
Bun implements a TOML parser with a few tweaks designed for better interopability with INI files and with JavaScript.
|
||||
|
||||
### ; and # are comments
|
||||
|
||||
In Bun-flavored TOML, comments start with `#` or `;`
|
||||
|
||||
```ini
|
||||
# This is a comment
|
||||
; This is also a comment
|
||||
```
|
||||
|
||||
This matches the behavior of INI files.
|
||||
|
||||
In TOML, comments start with `#`
|
||||
|
||||
```toml
|
||||
# This is a comment
|
||||
```
|
||||
|
||||
### String escape characters
|
||||
|
||||
Bun-flavored adds a few more escape sequences to TOML to work better with JavaScript strings.
|
||||
|
||||
```
|
||||
# Bun-flavored TOML extras
|
||||
\x{XX} - ASCII (U+00XX)
|
||||
\u{x+} - unicode (U+0000000X) - (U+XXXXXXXX)
|
||||
\v - vertical tab
|
||||
|
||||
# Regular TOML
|
||||
\b - backspace (U+0008)
|
||||
\t - tab (U+0009)
|
||||
\n - linefeed (U+000A)
|
||||
\f - form feed (U+000C)
|
||||
\r - carriage return (U+000D)
|
||||
\" - quote (U+0022)
|
||||
\\ - backslash (U+005C)
|
||||
\uXXXX - unicode (U+XXXX)
|
||||
\UXXXXXXXX - unicode (U+XXXXXXXX)
|
||||
```
|
||||
@@ -1,16 +1,19 @@
|
||||
#!/bin/bash
|
||||
|
||||
# The important part of this test: make sure that Bun.js successfully loads
|
||||
# The most likely reason for this test to fail is that something broke in the JavaScriptCore <> Bun integration
|
||||
killall -9 $(basename $BUN_BIN) || echo "";
|
||||
# The important part of this test: make sure that bun.js successfully loads
|
||||
# The most likely reason for this test to fail is that something broke in the JavaScriptCore <> bun integration
|
||||
killall -9 $(basename $BUN_BIN) || echo ""
|
||||
|
||||
rm -rf /tmp/next-app;
|
||||
mkdir -p /tmp/next-app;
|
||||
$BUN_BIN create next /tmp/next-app;
|
||||
cd /tmp/next-app;
|
||||
rm -rf /tmp/next-app
|
||||
mkdir -p /tmp/next-app
|
||||
$BUN_BIN create next /tmp/next-app
|
||||
|
||||
if (($?)); then
|
||||
echo "bun create failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cd /tmp/next-app
|
||||
BUN_CRASH_WITHOUT_JIT=1 $BUN_BIN --port 8087 &
|
||||
sleep 0.005
|
||||
|
||||
sleep 0.1
|
||||
curl --fail http://localhost:8087/ && killall -9 $(basename $BUN_BIN) && echo "✅ bun create next passed."
|
||||
exit $?
|
||||
|
||||
|
||||
@@ -5,6 +5,13 @@ killall -9 $(basename $BUN_BIN) || echo ""
|
||||
rm -rf /tmp/react-app
|
||||
mkdir -p /tmp/react-app
|
||||
$BUN_BIN create react /tmp/react-app
|
||||
|
||||
|
||||
if (($?)); then
|
||||
echo "bun create failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cd /tmp/react-app
|
||||
BUN_CRASH_WITHOUT_JIT=1 $BUN_BIN --port 8087 &
|
||||
sleep 0.005
|
||||
|
||||
53
integration/apps/bun-dev-index-html.sh
Normal file
53
integration/apps/bun-dev-index-html.sh
Normal file
@@ -0,0 +1,53 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
killall -9 $(basename $BUN_BIN) || echo ""
|
||||
|
||||
dir=$(mktemp -d --suffix=bun-dev-check)
|
||||
|
||||
index_content="<html><body>index.html</body></html>"
|
||||
bacon_content="<html><body>bacon.html</body></html>"
|
||||
js_content="console.log('hi')"
|
||||
|
||||
echo $index_content >"$dir/index.html"
|
||||
echo $js_content >"$dir/index.js"
|
||||
echo $bacon_content >"$dir/bacon.html"
|
||||
|
||||
cd $dir
|
||||
|
||||
$BUN_BIN --port 8087 &
|
||||
sleep 0.005
|
||||
|
||||
if [ "$(curl --fail http://localhost:8087/)" != "$index_content" ]; then
|
||||
echo "ERR: Expected '$index_content', got '$(curl --fail http://localhost:8087/)'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$(curl --fail http://localhost:8087/index)" != "$index_content" ]; then
|
||||
echo "ERR: Expected '$index_content', got '$(curl --fail http://localhost:8087/index)'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$(curl --fail http://localhost:8087/index.html)" != "$index_content" ]; then
|
||||
echo "ERR: Expected '$index_content', got '$(curl --fail http://localhost:8087/index.html)'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$(curl --fail http://localhost:8087/foo/foo)" != "$index_content" ]; then
|
||||
echo "ERR: Expected '$index_content', got '$(curl --fail http://localhost:8087/index.html)'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$(curl --fail http://localhost:8087/bacon)" != "$bacon_content" ]; then
|
||||
echo "ERR: Expected '$index_content', got '$(curl --fail http://localhost:8087/bacon)'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$(curl --fail http://localhost:8087/bacon.html)" != "$bacon_content" ]; then
|
||||
echo "ERR: Expected '$index_content', got '$(curl --fail http://localhost:8087/bacon.html)'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
killall -9 $(basename $BUN_BIN) || echo ""
|
||||
echo "✅ bun dev index html check passed."
|
||||
55
integration/apps/bun-dev.sh
Normal file
55
integration/apps/bun-dev.sh
Normal file
@@ -0,0 +1,55 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
killall -9 $(basename $BUN_BIN) || echo ""
|
||||
|
||||
dir=$(mktemp -d --suffix=bun-dev-check)
|
||||
|
||||
index_content="<html><body>index.html</body></html>"
|
||||
bacon_content="<html><body>bacon.html</body></html>"
|
||||
js_content="console.log('hi')"
|
||||
|
||||
mkdir -p $dir/public
|
||||
|
||||
echo $index_content >"$dir/public/index.html"
|
||||
echo $js_content >"$dir/index.js"
|
||||
echo $bacon_content >"$dir/public/bacon.html"
|
||||
|
||||
cd $dir
|
||||
|
||||
$BUN_BIN --port 8087 &
|
||||
sleep 0.005
|
||||
|
||||
if [ "$(curl --fail http://localhost:8087/)" != "$index_content" ]; then
|
||||
echo "ERR: Expected '$index_content', got '$(curl --fail http://localhost:8087/)'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$(curl --fail http://localhost:8087/index)" != "$index_content" ]; then
|
||||
echo "ERR: Expected '$index_content', got '$(curl --fail http://localhost:8087/index)'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$(curl --fail http://localhost:8087/index.html)" != "$index_content" ]; then
|
||||
echo "ERR: Expected '$index_content', got '$(curl --fail http://localhost:8087/index.html)'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$(curl --fail http://localhost:8087/foo/foo)" != "$index_content" ]; then
|
||||
echo "ERR: Expected '$index_content', got '$(curl --fail http://localhost:8087/index.html)'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$(curl --fail http://localhost:8087/bacon)" != "$bacon_content" ]; then
|
||||
echo "ERR: Expected '$index_content', got '$(curl --fail http://localhost:8087/bacon)'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$(curl --fail http://localhost:8087/bacon.html)" != "$bacon_content" ]; then
|
||||
echo "ERR: Expected '$index_content', got '$(curl --fail http://localhost:8087/bacon.html)'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
killall -9 $(basename $BUN_BIN) || echo ""
|
||||
echo "✅ bun dev index html check passed."
|
||||
79
integration/apps/bun-install.sh
Normal file
79
integration/apps/bun-install.sh
Normal file
@@ -0,0 +1,79 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
dir=$(mktemp -d --suffix=bun-install-test-1)
|
||||
|
||||
cd $dir
|
||||
${NPM_CLIENT:-$(which bun)} add react react-dom @types/react
|
||||
|
||||
echo "console.log(typeof require(\"react\").createElement);" >index.js
|
||||
chmod +x index.js
|
||||
|
||||
JS_RUNTIME=${JS_RUNTIME:-"$(which bun)"}
|
||||
|
||||
if [ "$JS_RUNTIME" == "node" ]; then
|
||||
result="$(node ./index.js)"
|
||||
fi
|
||||
|
||||
if [ "$JS_RUNTIME" != "node" ]; then
|
||||
result="$($JS_RUNTIME run ./index.js)"
|
||||
fi
|
||||
|
||||
echo "console.log(typeof require(\"react-dom\").render);" >index.js
|
||||
chmod +x index.js
|
||||
|
||||
JS_RUNTIME=${JS_RUNTIME:-"$(which bun)"}
|
||||
|
||||
if [ "$JS_RUNTIME" == "node" ]; then
|
||||
result="$(node ./index.js)"
|
||||
fi
|
||||
|
||||
if [ "$JS_RUNTIME" != "node" ]; then
|
||||
result="$($JS_RUNTIME run ./index.js)"
|
||||
fi
|
||||
|
||||
if [ "$result" != "function" ]; then
|
||||
echo "ERR: Expected 'function', got '$result'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
${NPM_CLIENT:-$(which bun)} remove react-dom
|
||||
|
||||
if [ -d "node_modules/react-dom" ]; then
|
||||
echo "ERR: react-dom module still exists in $dir"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
yarn_dot_lock=$(${NPM_CLIENT:-$(which bun)} bun.lockb)
|
||||
|
||||
if echo "$yarn_dot_lock" | grep -q "react-dom"; then
|
||||
echo "ERR: react-dom module still exists in lockfile"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
${NPM_CLIENT:-$(which bun)} remove @types/react
|
||||
|
||||
yarn_dot_lock=$(${NPM_CLIENT:-$(which bun)} bun.lockb)
|
||||
|
||||
if echo "$yarn_dot_lock" | grep -q "@types/react"; then
|
||||
echo "ERR: @types/react module still exists in lockfile"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if echo "$yarn_dot_lock" | grep -q "@types/react"; then
|
||||
echo "ERR: @types/react module still exists in $dir"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
${NPM_CLIENT:-$(which bun)} remove react
|
||||
|
||||
if [ -d "node_modules/react" ]; then
|
||||
echo "ERR: react module still exists in $dir"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -d "bun.lockb" ]; then
|
||||
echo "ERR: empty bun.lockb should be deleted"
|
||||
exit 1
|
||||
fi
|
||||
@@ -0,0 +1 @@
|
||||
node_modules/always-bundled-module
|
||||
@@ -0,0 +1,10 @@
|
||||
module.exports = {
|
||||
default: 0xdeadbeef,
|
||||
default() {
|
||||
return "ok";
|
||||
},
|
||||
default: true,
|
||||
ok() {
|
||||
return true;
|
||||
},
|
||||
};
|
||||
@@ -0,0 +1,5 @@
|
||||
const __esModule = true;
|
||||
|
||||
export const foo = () => __esModule;
|
||||
|
||||
export { __esModule, foo as default };
|
||||
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"name": "always-bundled-module",
|
||||
"version": "1.0.0"
|
||||
}
|
||||
13
integration/bunjs-only-snippets/bundled/entrypoint.ts
Normal file
13
integration/bunjs-only-snippets/bundled/entrypoint.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import "i-am-bundled/cjs";
|
||||
import "i-am-bundled/esm";
|
||||
import "always-bundled-module/esm";
|
||||
import "always-bundled-module/cjs";
|
||||
import { foo } from "i-am-bundled/esm";
|
||||
import { foo as foo2 } from "always-bundled-module/esm";
|
||||
import cJS from "always-bundled-module/cjs";
|
||||
|
||||
foo();
|
||||
foo2();
|
||||
cJS();
|
||||
|
||||
export default cJS();
|
||||
12
integration/bunjs-only-snippets/bundled/package.json
Normal file
12
integration/bunjs-only-snippets/bundled/package.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"name": "to-bundle",
|
||||
"scripts": {
|
||||
"prebundle": "rm -rf node_modules; cp -r to_bundle_node_modules node_modules; ln -s always-bundled-module node_modules/always-bundled-module",
|
||||
"bundle": "${BUN_BIN:-$(which bun)} bun ./entrypoint.ts"
|
||||
},
|
||||
"bun": {
|
||||
"alwaysBundle": [
|
||||
"always-bundled-module"
|
||||
]
|
||||
}
|
||||
}
|
||||
10
integration/bunjs-only-snippets/bundled/to_bundle_node_modules/i-am-bundled/cjs.js
generated
Normal file
10
integration/bunjs-only-snippets/bundled/to_bundle_node_modules/i-am-bundled/cjs.js
generated
Normal file
@@ -0,0 +1,10 @@
|
||||
module.exports = {
|
||||
default: 0xdeadbeef,
|
||||
default() {
|
||||
return "ok";
|
||||
},
|
||||
default: true,
|
||||
ok() {
|
||||
return true;
|
||||
},
|
||||
};
|
||||
5
integration/bunjs-only-snippets/bundled/to_bundle_node_modules/i-am-bundled/esm.js
generated
Normal file
5
integration/bunjs-only-snippets/bundled/to_bundle_node_modules/i-am-bundled/esm.js
generated
Normal file
@@ -0,0 +1,5 @@
|
||||
const __esModule = true;
|
||||
|
||||
export const foo = () => __esModule;
|
||||
|
||||
export { __esModule, foo as default };
|
||||
4
integration/bunjs-only-snippets/bundled/to_bundle_node_modules/i-am-bundled/package.json
generated
Normal file
4
integration/bunjs-only-snippets/bundled/to_bundle_node_modules/i-am-bundled/package.json
generated
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"name": "i-am-bundled",
|
||||
"version": "1.0.0"
|
||||
}
|
||||
6
integration/bunjs-only-snippets/bundled/tsconfig.json
Normal file
6
integration/bunjs-only-snippets/bundled/tsconfig.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"paths": {},
|
||||
"baseUrl": "."
|
||||
}
|
||||
}
|
||||
58
integration/bunjs-only-snippets/console-log.js
Normal file
58
integration/bunjs-only-snippets/console-log.js
Normal file
@@ -0,0 +1,58 @@
|
||||
console.log("Hello World!");
|
||||
console.log(123);
|
||||
console.log(-123);
|
||||
console.log(123.567);
|
||||
console.log(-123.567);
|
||||
console.log(true);
|
||||
console.log(false);
|
||||
console.log(null);
|
||||
console.log(undefined);
|
||||
console.log(Symbol("Symbol Description"));
|
||||
console.log(new Date(2021, 12, 30, 666, 777, 888, 999));
|
||||
console.log([123, 456, 789]);
|
||||
console.log({ a: 123, b: 456, c: 789 });
|
||||
console.log({
|
||||
a: {
|
||||
b: {
|
||||
c: 123,
|
||||
},
|
||||
bacon: true,
|
||||
},
|
||||
});
|
||||
|
||||
console.log(new Promise(() => {}));
|
||||
|
||||
class Foo {}
|
||||
|
||||
console.log(() => {});
|
||||
console.log(Foo);
|
||||
console.log(new Foo());
|
||||
console.log(function foooo() {});
|
||||
|
||||
console.log(/FooRegex/);
|
||||
|
||||
console.error("uh oh");
|
||||
console.time("Check");
|
||||
|
||||
console.log(
|
||||
"Is it a bug or a feature that formatting numbers like %d is colored",
|
||||
123
|
||||
);
|
||||
console.log(globalThis);
|
||||
|
||||
console.log(
|
||||
"String %s should be 2nd word, 456 == %s and percent s %s == %s",
|
||||
"123",
|
||||
"456",
|
||||
"%s",
|
||||
"What",
|
||||
"okay"
|
||||
);
|
||||
|
||||
const infinteLoop = {
|
||||
foo: {},
|
||||
bar: {},
|
||||
};
|
||||
|
||||
infinteLoop.bar = infinteLoop;
|
||||
console.log(infinteLoop, "am");
|
||||
17
integration/bunjs-only-snippets/fetch.js
Normal file
17
integration/bunjs-only-snippets/fetch.js
Normal file
@@ -0,0 +1,17 @@
|
||||
import fs from "fs";
|
||||
|
||||
const urls = ["https://example.com", "http://example.com"];
|
||||
for (let url of urls) {
|
||||
const response = await fetch(url);
|
||||
const text = await response.text();
|
||||
|
||||
if (
|
||||
fs.readFileSync(
|
||||
import.meta.path.substring(0, import.meta.path.lastIndexOf("/")) +
|
||||
"/fetch.js.txt",
|
||||
"utf8"
|
||||
) !== text
|
||||
) {
|
||||
throw new Error("Expected fetch.js.txt to match snapshot");
|
||||
}
|
||||
}
|
||||
46
integration/bunjs-only-snippets/fetch.js.txt
Normal file
46
integration/bunjs-only-snippets/fetch.js.txt
Normal file
@@ -0,0 +1,46 @@
|
||||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Example Domain</title>
|
||||
|
||||
<meta charset="utf-8" />
|
||||
<meta http-equiv="Content-type" content="text/html; charset=utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<style type="text/css">
|
||||
body {
|
||||
background-color: #f0f0f2;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
font-family: -apple-system, system-ui, BlinkMacSystemFont, "Segoe UI", "Open Sans", "Helvetica Neue", Helvetica, Arial, sans-serif;
|
||||
|
||||
}
|
||||
div {
|
||||
width: 600px;
|
||||
margin: 5em auto;
|
||||
padding: 2em;
|
||||
background-color: #fdfdff;
|
||||
border-radius: 0.5em;
|
||||
box-shadow: 2px 3px 7px 2px rgba(0,0,0,0.02);
|
||||
}
|
||||
a:link, a:visited {
|
||||
color: #38488f;
|
||||
text-decoration: none;
|
||||
}
|
||||
@media (max-width: 700px) {
|
||||
div {
|
||||
margin: 0 auto;
|
||||
width: auto;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div>
|
||||
<h1>Example Domain</h1>
|
||||
<p>This domain is for use in illustrative examples in documents. You may use this
|
||||
domain in literature without prior coordination or asking for permission.</p>
|
||||
<p><a href="https://www.iana.org/domains/example">More information...</a></p>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
23
integration/bunjs-only-snippets/fs-stream.js
Normal file
23
integration/bunjs-only-snippets/fs-stream.js
Normal file
@@ -0,0 +1,23 @@
|
||||
import { createReadStream, createWriteStream, readFileSync } from "fs";
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
createReadStream("fs-stream.js")
|
||||
.pipe(createWriteStream("/tmp/fs-stream.copy.js"))
|
||||
.once("error", (err) => reject(err))
|
||||
.once("finish", () => {
|
||||
try {
|
||||
const copied = readFileSync("/tmp/fs-stream.copy.js", "utf8");
|
||||
const real = readFileSync("/tmp/fs-stream.js", "utf8");
|
||||
if (copied !== real) {
|
||||
reject(
|
||||
new Error("fs-stream.js is not the same as fs-stream.copy.js")
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
resolve(true);
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
});
|
||||
67
integration/bunjs-only-snippets/fs.test.js
Normal file
67
integration/bunjs-only-snippets/fs.test.js
Normal file
@@ -0,0 +1,67 @@
|
||||
import { describe, it, expect } from "bun:test";
|
||||
import { mkdirSync, existsSync, readFileSync, writeFileSync } from "node:fs";
|
||||
|
||||
describe("mkdirSync", () => {
|
||||
it("should create a directory", () => {
|
||||
const tempdir = `/tmp/fs.test.js/${Date.now()}/1234/hi`;
|
||||
expect(existsSync(tempdir)).toBe(false);
|
||||
expect(tempdir.includes(mkdirSync(tempdir, { recursive: true }))).toBe(
|
||||
true
|
||||
);
|
||||
expect(existsSync(tempdir)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("readFileSync", () => {
|
||||
it("works", () => {
|
||||
const text = readFileSync(import.meta.dir + "/readFileSync.txt", "utf8");
|
||||
expect(text).toBe("File read successfully");
|
||||
});
|
||||
|
||||
it("returning Uint8Array works", () => {
|
||||
const text = readFileSync(import.meta.dir + "/readFileSync.txt");
|
||||
const encoded = [
|
||||
70, 105, 108, 101, 32, 114, 101, 97, 100, 32, 115, 117, 99, 99, 101, 115,
|
||||
115, 102, 117, 108, 108, 121,
|
||||
];
|
||||
for (let i = 0; i < encoded.length; i++) {
|
||||
expect(text[i]).toBe(encoded[i]);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("writeFileSync", () => {
|
||||
it("works", () => {
|
||||
const path = `/tmp/${Date.now()}.writeFileSync.txt`;
|
||||
writeFileSync(path, "File written successfully", "utf8");
|
||||
|
||||
expect(readFileSync(path, "utf8")).toBe("File written successfully");
|
||||
});
|
||||
|
||||
it("returning Uint8Array works", () => {
|
||||
const buffer = new Uint8Array([
|
||||
70, 105, 108, 101, 32, 119, 114, 105, 116, 116, 101, 110, 32, 115, 117,
|
||||
99, 99, 101, 115, 115, 102, 117, 108, 108, 121,
|
||||
]);
|
||||
const path = `/tmp/${Date.now()}.blob.writeFileSync.txt`;
|
||||
writeFileSync(path, buffer);
|
||||
const out = readFileSync(path);
|
||||
|
||||
for (let i = 0; i < buffer.length; i++) {
|
||||
expect(buffer[i]).toBe(out[i]);
|
||||
}
|
||||
});
|
||||
it("returning ArrayBuffer works", () => {
|
||||
const buffer = new Uint8Array([
|
||||
70, 105, 108, 101, 32, 119, 114, 105, 116, 116, 101, 110, 32, 115, 117,
|
||||
99, 99, 101, 115, 115, 102, 117, 108, 108, 121,
|
||||
]);
|
||||
const path = `/tmp/${Date.now()}.blob2.writeFileSync.txt`;
|
||||
writeFileSync(path, buffer.buffer);
|
||||
const out = readFileSync(path);
|
||||
|
||||
for (let i = 0; i < buffer.length; i++) {
|
||||
expect(buffer[i]).toBe(out[i]);
|
||||
}
|
||||
});
|
||||
});
|
||||
13
integration/bunjs-only-snippets/import-meta.test.js
Normal file
13
integration/bunjs-only-snippets/import-meta.test.js
Normal file
@@ -0,0 +1,13 @@
|
||||
import { it, expect } from "bun:test";
|
||||
|
||||
const { path, dir } = import.meta;
|
||||
|
||||
it("import.meta.dir", () => {
|
||||
expect(dir.endsWith("/bun/integration/bunjs-only-snippets")).toBe(true);
|
||||
});
|
||||
|
||||
it("import.meta.path", () => {
|
||||
expect(
|
||||
path.endsWith("/bun/integration/bunjs-only-snippets/import-meta.test.js")
|
||||
).toBe(true);
|
||||
});
|
||||
7
integration/bunjs-only-snippets/macro-check.js
Normal file
7
integration/bunjs-only-snippets/macro-check.js
Normal file
@@ -0,0 +1,7 @@
|
||||
export function keepSecondArgument(args) {
|
||||
return args.arguments[1];
|
||||
}
|
||||
|
||||
export function bacon(args) {
|
||||
return args.arguments[1];
|
||||
}
|
||||
76
integration/bunjs-only-snippets/microtask.js
Normal file
76
integration/bunjs-only-snippets/microtask.js
Normal file
@@ -0,0 +1,76 @@
|
||||
// You can verify this test is correct by copy pasting this into a browser's console and checking it doesn't throw an error.
|
||||
var run = 0;
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 0) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 3) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
});
|
||||
});
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 1) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 4) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 6) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 2) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 5) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 7) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
resolve(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
{
|
||||
var passed = false;
|
||||
try {
|
||||
queueMicrotask(1234);
|
||||
} catch (exception) {
|
||||
passed = exception instanceof TypeError;
|
||||
}
|
||||
|
||||
if (!passed)
|
||||
throw new Error(
|
||||
"queueMicrotask should throw a TypeError if the argument is not a function"
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
var passed = false;
|
||||
try {
|
||||
queueMicrotask();
|
||||
} catch (exception) {
|
||||
passed = exception instanceof TypeError;
|
||||
}
|
||||
|
||||
if (!passed)
|
||||
throw new Error(
|
||||
"queueMicrotask should throw a TypeError if the argument is empty"
|
||||
);
|
||||
}
|
||||
457
integration/bunjs-only-snippets/path.test.js
Normal file
457
integration/bunjs-only-snippets/path.test.js
Normal file
@@ -0,0 +1,457 @@
|
||||
const { file } = import.meta;
|
||||
|
||||
import { describe, it, expect } from "bun:test";
|
||||
import * as path from "node:path";
|
||||
import assert from "assert";
|
||||
|
||||
const strictEqual = (...args) => {
|
||||
assert.strictEqual(...args);
|
||||
expect(true).toBe(true);
|
||||
};
|
||||
|
||||
it("path.basename", () => {
|
||||
strictEqual(path.basename(file), "path.test.js");
|
||||
strictEqual(path.basename(file, ".js"), "path.test");
|
||||
strictEqual(path.basename(".js", ".js"), "");
|
||||
strictEqual(path.basename(""), "");
|
||||
strictEqual(path.basename("/dir/basename.ext"), "basename.ext");
|
||||
strictEqual(path.basename("/basename.ext"), "basename.ext");
|
||||
strictEqual(path.basename("basename.ext"), "basename.ext");
|
||||
strictEqual(path.basename("basename.ext/"), "basename.ext");
|
||||
strictEqual(path.basename("basename.ext//"), "basename.ext");
|
||||
strictEqual(path.basename("aaa/bbb", "/bbb"), "bbb");
|
||||
strictEqual(path.basename("aaa/bbb", "a/bbb"), "bbb");
|
||||
strictEqual(path.basename("aaa/bbb", "bbb"), "bbb");
|
||||
strictEqual(path.basename("aaa/bbb//", "bbb"), "bbb");
|
||||
strictEqual(path.basename("aaa/bbb", "bb"), "b");
|
||||
strictEqual(path.basename("aaa/bbb", "b"), "bb");
|
||||
strictEqual(path.basename("/aaa/bbb", "/bbb"), "bbb");
|
||||
strictEqual(path.basename("/aaa/bbb", "a/bbb"), "bbb");
|
||||
strictEqual(path.basename("/aaa/bbb", "bbb"), "bbb");
|
||||
strictEqual(path.basename("/aaa/bbb//", "bbb"), "bbb");
|
||||
strictEqual(path.basename("/aaa/bbb", "bb"), "b");
|
||||
strictEqual(path.basename("/aaa/bbb", "b"), "bb");
|
||||
strictEqual(path.basename("/aaa/bbb"), "bbb");
|
||||
strictEqual(path.basename("/aaa/"), "aaa");
|
||||
strictEqual(path.basename("/aaa/b"), "b");
|
||||
strictEqual(path.basename("/a/b"), "b");
|
||||
strictEqual(path.basename("//a"), "a");
|
||||
strictEqual(path.basename("a", "a"), "");
|
||||
|
||||
// // On Windows a backslash acts as a path separator.
|
||||
strictEqual(path.win32.basename("\\dir\\basename.ext"), "basename.ext");
|
||||
strictEqual(path.win32.basename("\\basename.ext"), "basename.ext");
|
||||
strictEqual(path.win32.basename("basename.ext"), "basename.ext");
|
||||
strictEqual(path.win32.basename("basename.ext\\"), "basename.ext");
|
||||
strictEqual(path.win32.basename("basename.ext\\\\"), "basename.ext");
|
||||
strictEqual(path.win32.basename("foo"), "foo");
|
||||
strictEqual(path.win32.basename("aaa\\bbb", "\\bbb"), "bbb");
|
||||
strictEqual(path.win32.basename("aaa\\bbb", "a\\bbb"), "bbb");
|
||||
strictEqual(path.win32.basename("aaa\\bbb", "bbb"), "bbb");
|
||||
strictEqual(path.win32.basename("aaa\\bbb\\\\\\\\", "bbb"), "bbb");
|
||||
strictEqual(path.win32.basename("aaa\\bbb", "bb"), "b");
|
||||
strictEqual(path.win32.basename("aaa\\bbb", "b"), "bb");
|
||||
strictEqual(path.win32.basename("C:"), "");
|
||||
strictEqual(path.win32.basename("C:."), ".");
|
||||
strictEqual(path.win32.basename("C:\\"), "");
|
||||
strictEqual(path.win32.basename("C:\\dir\\base.ext"), "base.ext");
|
||||
strictEqual(path.win32.basename("C:\\basename.ext"), "basename.ext");
|
||||
strictEqual(path.win32.basename("C:basename.ext"), "basename.ext");
|
||||
strictEqual(path.win32.basename("C:basename.ext\\"), "basename.ext");
|
||||
strictEqual(path.win32.basename("C:basename.ext\\\\"), "basename.ext");
|
||||
strictEqual(path.win32.basename("C:foo"), "foo");
|
||||
strictEqual(path.win32.basename("file:stream"), "file:stream");
|
||||
strictEqual(path.win32.basename("a", "a"), "");
|
||||
|
||||
// On unix a backslash is just treated as any other character.
|
||||
strictEqual(
|
||||
path.posix.basename("\\dir\\basename.ext"),
|
||||
"\\dir\\basename.ext"
|
||||
);
|
||||
strictEqual(path.posix.basename("\\basename.ext"), "\\basename.ext");
|
||||
strictEqual(path.posix.basename("basename.ext"), "basename.ext");
|
||||
strictEqual(path.posix.basename("basename.ext\\"), "basename.ext\\");
|
||||
strictEqual(path.posix.basename("basename.ext\\\\"), "basename.ext\\\\");
|
||||
strictEqual(path.posix.basename("foo"), "foo");
|
||||
|
||||
// POSIX filenames may include control characters
|
||||
// c.f. http://www.dwheeler.com/essays/fixing-unix-linux-filenames.html
|
||||
const controlCharFilename = `Icon${String.fromCharCode(13)}`;
|
||||
strictEqual(
|
||||
path.posix.basename(`/a/b/${controlCharFilename}`),
|
||||
controlCharFilename
|
||||
);
|
||||
});
|
||||
|
||||
it("path.join", () => {
|
||||
const failures = [];
|
||||
const backslashRE = /\\/g;
|
||||
|
||||
const joinTests = [
|
||||
[
|
||||
[path.posix.join],
|
||||
// Arguments result
|
||||
[
|
||||
[[".", "x/b", "..", "/b/c.js"], "x/b/c.js"],
|
||||
// [[], '.'],
|
||||
[["/.", "x/b", "..", "/b/c.js"], "/x/b/c.js"],
|
||||
[["/foo", "../../../bar"], "/bar"],
|
||||
[["foo", "../../../bar"], "../../bar"],
|
||||
[["foo/", "../../../bar"], "../../bar"],
|
||||
[["foo/x", "../../../bar"], "../bar"],
|
||||
[["foo/x", "./bar"], "foo/x/bar"],
|
||||
[["foo/x/", "./bar"], "foo/x/bar"],
|
||||
[["foo/x/", ".", "bar"], "foo/x/bar"],
|
||||
[["./"], "./"],
|
||||
[[".", "./"], "./"],
|
||||
[[".", ".", "."], "."],
|
||||
[[".", "./", "."], "."],
|
||||
[[".", "/./", "."], "."],
|
||||
[[".", "/////./", "."], "."],
|
||||
[["."], "."],
|
||||
[["", "."], "."],
|
||||
[["", "foo"], "foo"],
|
||||
[["foo", "/bar"], "foo/bar"],
|
||||
[["", "/foo"], "/foo"],
|
||||
[["", "", "/foo"], "/foo"],
|
||||
[["", "", "foo"], "foo"],
|
||||
[["foo", ""], "foo"],
|
||||
[["foo/", ""], "foo/"],
|
||||
[["foo", "", "/bar"], "foo/bar"],
|
||||
[["./", "..", "/foo"], "../foo"],
|
||||
[["./", "..", "..", "/foo"], "../../foo"],
|
||||
[[".", "..", "..", "/foo"], "../../foo"],
|
||||
[["", "..", "..", "/foo"], "../../foo"],
|
||||
[["/"], "/"],
|
||||
[["/", "."], "/"],
|
||||
[["/", ".."], "/"],
|
||||
[["/", "..", ".."], "/"],
|
||||
[[""], "."],
|
||||
[["", ""], "."],
|
||||
[[" /foo"], " /foo"],
|
||||
[[" ", "foo"], " /foo"],
|
||||
[[" ", "."], " "],
|
||||
[[" ", "/"], " /"],
|
||||
[[" ", ""], " "],
|
||||
[["/", "foo"], "/foo"],
|
||||
[["/", "/foo"], "/foo"],
|
||||
[["/", "//foo"], "/foo"],
|
||||
[["/", "", "/foo"], "/foo"],
|
||||
[["", "/", "foo"], "/foo"],
|
||||
[["", "/", "/foo"], "/foo"],
|
||||
],
|
||||
],
|
||||
];
|
||||
|
||||
// // Windows-specific join tests
|
||||
// joinTests.push([
|
||||
// path.win32.join,
|
||||
// joinTests[0][1].slice(0).concat([
|
||||
// // Arguments result
|
||||
// // UNC path expected
|
||||
// [["//foo/bar"], "\\\\foo\\bar\\"],
|
||||
// [["\\/foo/bar"], "\\\\foo\\bar\\"],
|
||||
// [["\\\\foo/bar"], "\\\\foo\\bar\\"],
|
||||
// // UNC path expected - server and share separate
|
||||
// [["//foo", "bar"], "\\\\foo\\bar\\"],
|
||||
// [["//foo/", "bar"], "\\\\foo\\bar\\"],
|
||||
// [["//foo", "/bar"], "\\\\foo\\bar\\"],
|
||||
// // UNC path expected - questionable
|
||||
// [["//foo", "", "bar"], "\\\\foo\\bar\\"],
|
||||
// [["//foo/", "", "bar"], "\\\\foo\\bar\\"],
|
||||
// [["//foo/", "", "/bar"], "\\\\foo\\bar\\"],
|
||||
// // UNC path expected - even more questionable
|
||||
// [["", "//foo", "bar"], "\\\\foo\\bar\\"],
|
||||
// [["", "//foo/", "bar"], "\\\\foo\\bar\\"],
|
||||
// [["", "//foo/", "/bar"], "\\\\foo\\bar\\"],
|
||||
// // No UNC path expected (no double slash in first component)
|
||||
// [["\\", "foo/bar"], "\\foo\\bar"],
|
||||
// [["\\", "/foo/bar"], "\\foo\\bar"],
|
||||
// [["", "/", "/foo/bar"], "\\foo\\bar"],
|
||||
// // No UNC path expected (no non-slashes in first component -
|
||||
// // questionable)
|
||||
// [["//", "foo/bar"], "\\foo\\bar"],
|
||||
// [["//", "/foo/bar"], "\\foo\\bar"],
|
||||
// [["\\\\", "/", "/foo/bar"], "\\foo\\bar"],
|
||||
// [["//"], "\\"],
|
||||
// // No UNC path expected (share name missing - questionable).
|
||||
// [["//foo"], "\\foo"],
|
||||
// [["//foo/"], "\\foo\\"],
|
||||
// [["//foo", "/"], "\\foo\\"],
|
||||
// [["//foo", "", "/"], "\\foo\\"],
|
||||
// // No UNC path expected (too many leading slashes - questionable)
|
||||
// [["///foo/bar"], "\\foo\\bar"],
|
||||
// [["////foo", "bar"], "\\foo\\bar"],
|
||||
// [["\\\\\\/foo/bar"], "\\foo\\bar"],
|
||||
// // Drive-relative vs drive-absolute paths. This merely describes the
|
||||
// // status quo, rather than being obviously right
|
||||
// [["c:"], "c:."],
|
||||
// [["c:."], "c:."],
|
||||
// [["c:", ""], "c:."],
|
||||
// [["", "c:"], "c:."],
|
||||
// [["c:.", "/"], "c:.\\"],
|
||||
// [["c:.", "file"], "c:file"],
|
||||
// [["c:", "/"], "c:\\"],
|
||||
// [["c:", "file"], "c:\\file"],
|
||||
// ]),
|
||||
// ]);
|
||||
joinTests.forEach((test) => {
|
||||
if (!Array.isArray(test[0])) test[0] = [test[0]];
|
||||
test[0].forEach((join) => {
|
||||
test[1].forEach((test) => {
|
||||
const actual = join.apply(null, test[0]);
|
||||
const expected = test[1];
|
||||
// For non-Windows specific tests with the Windows join(), we need to try
|
||||
// replacing the slashes since the non-Windows specific tests' `expected`
|
||||
// use forward slashes
|
||||
let actualAlt;
|
||||
let os;
|
||||
if (join === path.win32.join) {
|
||||
actualAlt = actual.replace(backslashRE, "/");
|
||||
os = "win32";
|
||||
} else {
|
||||
os = "posix";
|
||||
}
|
||||
if (actual !== expected && actualAlt !== expected) {
|
||||
const delimiter = test[0].map(JSON.stringify).join(",");
|
||||
const message = `path.${os}.join(${delimiter})\n expect=${JSON.stringify(
|
||||
expected
|
||||
)}\n actual=${JSON.stringify(actual)}`;
|
||||
failures.push(`\n${message}`);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
strictEqual(failures.length, 0, failures.join(""));
|
||||
});
|
||||
|
||||
it("path.relative", () => {
|
||||
const failures = [];
|
||||
|
||||
const relativeTests = [
|
||||
// [
|
||||
// path.win32.relative,
|
||||
// // Arguments result
|
||||
// [
|
||||
// ["c:/blah\\blah", "d:/games", "d:\\games"],
|
||||
// ["c:/aaaa/bbbb", "c:/aaaa", ".."],
|
||||
// ["c:/aaaa/bbbb", "c:/cccc", "..\\..\\cccc"],
|
||||
// ["c:/aaaa/bbbb", "c:/aaaa/bbbb", ""],
|
||||
// ["c:/aaaa/bbbb", "c:/aaaa/cccc", "..\\cccc"],
|
||||
// ["c:/aaaa/", "c:/aaaa/cccc", "cccc"],
|
||||
// ["c:/", "c:\\aaaa\\bbbb", "aaaa\\bbbb"],
|
||||
// ["c:/aaaa/bbbb", "d:\\", "d:\\"],
|
||||
// ["c:/AaAa/bbbb", "c:/aaaa/bbbb", ""],
|
||||
// ["c:/aaaaa/", "c:/aaaa/cccc", "..\\aaaa\\cccc"],
|
||||
// ["C:\\foo\\bar\\baz\\quux", "C:\\", "..\\..\\..\\.."],
|
||||
// [
|
||||
// "C:\\foo\\test",
|
||||
// "C:\\foo\\test\\bar\\package.json",
|
||||
// "bar\\package.json",
|
||||
// ],
|
||||
// ["C:\\foo\\bar\\baz-quux", "C:\\foo\\bar\\baz", "..\\baz"],
|
||||
// ["C:\\foo\\bar\\baz", "C:\\foo\\bar\\baz-quux", "..\\baz-quux"],
|
||||
// ["\\\\foo\\bar", "\\\\foo\\bar\\baz", "baz"],
|
||||
// ["\\\\foo\\bar\\baz", "\\\\foo\\bar", ".."],
|
||||
// ["\\\\foo\\bar\\baz-quux", "\\\\foo\\bar\\baz", "..\\baz"],
|
||||
// ["\\\\foo\\bar\\baz", "\\\\foo\\bar\\baz-quux", "..\\baz-quux"],
|
||||
// ["C:\\baz-quux", "C:\\baz", "..\\baz"],
|
||||
// ["C:\\baz", "C:\\baz-quux", "..\\baz-quux"],
|
||||
// ["\\\\foo\\baz-quux", "\\\\foo\\baz", "..\\baz"],
|
||||
// ["\\\\foo\\baz", "\\\\foo\\baz-quux", "..\\baz-quux"],
|
||||
// ["C:\\baz", "\\\\foo\\bar\\baz", "\\\\foo\\bar\\baz"],
|
||||
// ["\\\\foo\\bar\\baz", "C:\\baz", "C:\\baz"],
|
||||
// ],
|
||||
// ],
|
||||
[
|
||||
path.posix.relative,
|
||||
// Arguments result
|
||||
[
|
||||
["/var/lib", "/var", ".."],
|
||||
["/var/lib", "/bin", "../../bin"],
|
||||
["/var/lib", "/var/lib", ""],
|
||||
["/var/lib", "/var/apache", "../apache"],
|
||||
["/var/", "/var/lib", "lib"],
|
||||
["/", "/var/lib", "var/lib"],
|
||||
["/foo/test", "/foo/test/bar/package.json", "bar/package.json"],
|
||||
["/Users/a/web/b/test/mails", "/Users/a/web/b", "../.."],
|
||||
["/foo/bar/baz-quux", "/foo/bar/baz", "../baz"],
|
||||
["/foo/bar/baz", "/foo/bar/baz-quux", "../baz-quux"],
|
||||
["/baz-quux", "/baz", "../baz"],
|
||||
["/baz", "/baz-quux", "../baz-quux"],
|
||||
["/page1/page2/foo", "/", "../../.."],
|
||||
],
|
||||
],
|
||||
];
|
||||
|
||||
relativeTests.forEach((test) => {
|
||||
const relative = test[0];
|
||||
test[1].forEach((test) => {
|
||||
const actual = relative(test[0], test[1]);
|
||||
const expected = test[2];
|
||||
if (actual !== expected) {
|
||||
const os = relative === path.win32.relative ? "win32" : "posix";
|
||||
const message = `path.${os}.relative(${test
|
||||
.slice(0, 2)
|
||||
.map(JSON.stringify)
|
||||
.join(",")})\n expect=${JSON.stringify(
|
||||
expected
|
||||
)}\n actual=${JSON.stringify(actual)}`;
|
||||
failures.push(`\n${message}`);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
strictEqual(failures.length, 0, failures.join(""));
|
||||
expect(true).toBe(true);
|
||||
});
|
||||
|
||||
it("path.normalize", () => {
|
||||
// strictEqual(
|
||||
// path.win32.normalize("./fixtures///b/../b/c.js"),
|
||||
// "fixtures\\b\\c.js"
|
||||
// );
|
||||
// strictEqual(path.win32.normalize("/foo/../../../bar"), "\\bar");
|
||||
// strictEqual(path.win32.normalize("a//b//../b"), "a\\b");
|
||||
// strictEqual(path.win32.normalize("a//b//./c"), "a\\b\\c");
|
||||
// strictEqual(path.win32.normalize("a//b//."), "a\\b");
|
||||
// strictEqual(
|
||||
// path.win32.normalize("//server/share/dir/file.ext"),
|
||||
// "\\\\server\\share\\dir\\file.ext"
|
||||
// );
|
||||
// strictEqual(path.win32.normalize("/a/b/c/../../../x/y/z"), "\\x\\y\\z");
|
||||
// strictEqual(path.win32.normalize("C:"), "C:.");
|
||||
// strictEqual(path.win32.normalize("C:..\\abc"), "C:..\\abc");
|
||||
// strictEqual(path.win32.normalize("C:..\\..\\abc\\..\\def"), "C:..\\..\\def");
|
||||
// strictEqual(path.win32.normalize("C:\\."), "C:\\");
|
||||
// strictEqual(path.win32.normalize("file:stream"), "file:stream");
|
||||
// strictEqual(path.win32.normalize("bar\\foo..\\..\\"), "bar\\");
|
||||
// strictEqual(path.win32.normalize("bar\\foo..\\.."), "bar");
|
||||
// strictEqual(path.win32.normalize("bar\\foo..\\..\\baz"), "bar\\baz");
|
||||
// strictEqual(path.win32.normalize("bar\\foo..\\"), "bar\\foo..\\");
|
||||
// strictEqual(path.win32.normalize("bar\\foo.."), "bar\\foo..");
|
||||
// strictEqual(path.win32.normalize("..\\foo..\\..\\..\\bar"), "..\\..\\bar");
|
||||
// strictEqual(
|
||||
// path.win32.normalize("..\\...\\..\\.\\...\\..\\..\\bar"),
|
||||
// "..\\..\\bar"
|
||||
// );
|
||||
// strictEqual(
|
||||
// path.win32.normalize("../../../foo/../../../bar"),
|
||||
// "..\\..\\..\\..\\..\\bar"
|
||||
// );
|
||||
// strictEqual(
|
||||
// path.win32.normalize("../../../foo/../../../bar/../../"),
|
||||
// "..\\..\\..\\..\\..\\..\\"
|
||||
// );
|
||||
// strictEqual(
|
||||
// path.win32.normalize("../foobar/barfoo/foo/../../../bar/../../"),
|
||||
// "..\\..\\"
|
||||
// );
|
||||
// strictEqual(
|
||||
// path.win32.normalize("../.../../foobar/../../../bar/../../baz"),
|
||||
// "..\\..\\..\\..\\baz"
|
||||
// );
|
||||
// strictEqual(path.win32.normalize("foo/bar\\baz"), "foo\\bar\\baz");
|
||||
|
||||
strictEqual(
|
||||
path.posix.normalize("./fixtures///b/../b/c.js"),
|
||||
"fixtures/b/c.js"
|
||||
);
|
||||
strictEqual(path.posix.normalize("/foo/../../../bar"), "/bar");
|
||||
strictEqual(path.posix.normalize("a//b//../b"), "a/b");
|
||||
strictEqual(path.posix.normalize("a//b//./c"), "a/b/c");
|
||||
strictEqual(path.posix.normalize("a//b//."), "a/b");
|
||||
strictEqual(path.posix.normalize("/a/b/c/../../../x/y/z"), "/x/y/z");
|
||||
strictEqual(path.posix.normalize("///..//./foo/.//bar"), "/foo/bar");
|
||||
strictEqual(path.posix.normalize("bar/foo../../"), "bar/");
|
||||
strictEqual(path.posix.normalize("bar/foo../.."), "bar");
|
||||
strictEqual(path.posix.normalize("bar/foo../../baz"), "bar/baz");
|
||||
strictEqual(path.posix.normalize("bar/foo../"), "bar/foo../");
|
||||
strictEqual(path.posix.normalize("bar/foo.."), "bar/foo..");
|
||||
console.log("A");
|
||||
strictEqual(path.posix.normalize("../foo../../../bar"), "../../bar");
|
||||
console.log("B");
|
||||
strictEqual(path.posix.normalize("../.../.././.../../../bar"), "../../bar");
|
||||
strictEqual(
|
||||
path.posix.normalize("../../../foo/../../../bar"),
|
||||
"../../../../../bar"
|
||||
);
|
||||
strictEqual(
|
||||
path.posix.normalize("../../../foo/../../../bar/../../"),
|
||||
"../../../../../../"
|
||||
);
|
||||
strictEqual(
|
||||
path.posix.normalize("../foobar/barfoo/foo/../../../bar/../../"),
|
||||
"../../"
|
||||
);
|
||||
strictEqual(
|
||||
path.posix.normalize("../.../../foobar/../../../bar/../../baz"),
|
||||
"../../../../baz"
|
||||
);
|
||||
strictEqual(path.posix.normalize("foo/bar\\baz"), "foo/bar\\baz");
|
||||
});
|
||||
|
||||
it("path.resolve", () => {
|
||||
const failures = [];
|
||||
const slashRE = /\//g;
|
||||
const backslashRE = /\\/g;
|
||||
|
||||
const resolveTests = [
|
||||
// [
|
||||
// path.win32.resolve,
|
||||
// // Arguments result
|
||||
// [
|
||||
// [["c:/blah\\blah", "d:/games", "c:../a"], "c:\\blah\\a"],
|
||||
// [["c:/ignore", "d:\\a/b\\c/d", "\\e.exe"], "d:\\e.exe"],
|
||||
// [["c:/ignore", "c:/some/file"], "c:\\some\\file"],
|
||||
// [["d:/ignore", "d:some/dir//"], "d:\\ignore\\some\\dir"],
|
||||
// [["."], process.cwd()],
|
||||
// [["//server/share", "..", "relative\\"], "\\\\server\\share\\relative"],
|
||||
// [["c:/", "//"], "c:\\"],
|
||||
// [["c:/", "//dir"], "c:\\dir"],
|
||||
// [["c:/", "//server/share"], "\\\\server\\share\\"],
|
||||
// [["c:/", "//server//share"], "\\\\server\\share\\"],
|
||||
// [["c:/", "///some//dir"], "c:\\some\\dir"],
|
||||
// [
|
||||
// ["C:\\foo\\tmp.3\\", "..\\tmp.3\\cycles\\root.js"],
|
||||
// "C:\\foo\\tmp.3\\cycles\\root.js",
|
||||
// ],
|
||||
// ],
|
||||
// ],
|
||||
[
|
||||
path.posix.resolve,
|
||||
// Arguments result
|
||||
[
|
||||
[["/var/lib", "../", "file/"], "/var/file"],
|
||||
[["/var/lib", "/../", "file/"], "/file"],
|
||||
[["a/b/c/", "../../.."], process.cwd()],
|
||||
[["."], process.cwd()],
|
||||
[["/some/dir", ".", "/absolute/"], "/absolute"],
|
||||
[
|
||||
["/foo/tmp.3/", "../tmp.3/cycles/root.js"],
|
||||
"/foo/tmp.3/cycles/root.js",
|
||||
],
|
||||
],
|
||||
],
|
||||
];
|
||||
const isWindows = false;
|
||||
resolveTests.forEach(([resolve, tests]) => {
|
||||
tests.forEach(([test, expected]) => {
|
||||
const actual = resolve.apply(null, test);
|
||||
let actualAlt;
|
||||
const os = resolve === path.win32.resolve ? "win32" : "posix";
|
||||
if (resolve === path.win32.resolve && !isWindows)
|
||||
actualAlt = actual.replace(backslashRE, "/");
|
||||
else if (resolve !== path.win32.resolve && isWindows)
|
||||
actualAlt = actual.replace(slashRE, "\\");
|
||||
|
||||
const message = `path.${os}.resolve(${test
|
||||
.map(JSON.stringify)
|
||||
.join(",")})\n expect=${JSON.stringify(
|
||||
expected
|
||||
)}\n actual=${JSON.stringify(actual)}`;
|
||||
if (actual !== expected && actualAlt !== expected) failures.push(message);
|
||||
});
|
||||
});
|
||||
strictEqual(failures.length, 0, failures.join("\n"));
|
||||
});
|
||||
91
integration/bunjs-only-snippets/process-nexttick.js
Normal file
91
integration/bunjs-only-snippets/process-nexttick.js
Normal file
@@ -0,0 +1,91 @@
|
||||
// You can verify this test is correct by copy pasting this into a browser's console and checking it doesn't throw an error.
|
||||
var run = 0;
|
||||
|
||||
var queueMicrotask = process.nextTick;
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 0) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 3) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
});
|
||||
});
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 1) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 4) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 6) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 2) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 5) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 7) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
resolve(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
{
|
||||
var passed = false;
|
||||
try {
|
||||
queueMicrotask(1234);
|
||||
} catch (exception) {
|
||||
passed = exception instanceof TypeError;
|
||||
}
|
||||
|
||||
if (!passed)
|
||||
throw new Error(
|
||||
"queueMicrotask should throw a TypeError if the argument is not a function"
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
var passed = false;
|
||||
try {
|
||||
queueMicrotask();
|
||||
} catch (exception) {
|
||||
passed = exception instanceof TypeError;
|
||||
}
|
||||
|
||||
if (!passed)
|
||||
throw new Error(
|
||||
"queueMicrotask should throw a TypeError if the argument is empty"
|
||||
);
|
||||
}
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
process.nextTick(
|
||||
(first, second) => {
|
||||
console.log(first, second);
|
||||
if (first !== 12345 || second !== "hello")
|
||||
reject(new Error("process.nextTick called with wrong arguments"));
|
||||
resolve(true);
|
||||
},
|
||||
12345,
|
||||
"hello"
|
||||
);
|
||||
});
|
||||
48
integration/bunjs-only-snippets/process.js
Normal file
48
integration/bunjs-only-snippets/process.js
Normal file
@@ -0,0 +1,48 @@
|
||||
// this property isn't implemented yet but it should at least return a string
|
||||
const isNode = !process.isBun;
|
||||
|
||||
if (!isNode && process.title !== "bun")
|
||||
throw new Error("process.title is not 'bun'");
|
||||
|
||||
if (typeof process.env.USER !== "string")
|
||||
throw new Error("process.env is not an object");
|
||||
|
||||
if (process.env.USER.length === 0)
|
||||
throw new Error("process.env is missing a USER property");
|
||||
|
||||
if (process.platform !== "darwin" && process.platform !== "linux")
|
||||
throw new Error("process.platform is invalid");
|
||||
|
||||
if (isNode) throw new Error("process.isBun is invalid");
|
||||
|
||||
// partially to test it doesn't crash due to various strange types
|
||||
process.env.BACON = "yummy";
|
||||
if (process.env.BACON !== "yummy") {
|
||||
throw new Error("process.env is not writable");
|
||||
}
|
||||
|
||||
delete process.env.BACON;
|
||||
if (typeof process.env.BACON !== "undefined") {
|
||||
throw new Error("process.env is not deletable");
|
||||
}
|
||||
|
||||
process.env.BACON = "yummy";
|
||||
if (process.env.BACON !== "yummy") {
|
||||
throw new Error("process.env is not re-writable");
|
||||
}
|
||||
|
||||
if (JSON.parse(JSON.stringify(process.env)).BACON !== "yummy") {
|
||||
throw new Error("process.env is not serializable");
|
||||
}
|
||||
|
||||
if (typeof JSON.parse(JSON.stringify(process.env)).toJSON !== "undefined") {
|
||||
throw new Error("process.env should call toJSON to hide its internal state");
|
||||
}
|
||||
|
||||
var { env, ...proces } = process;
|
||||
console.log(JSON.stringify(proces, null, 2));
|
||||
console.log(proces);
|
||||
|
||||
console.log("CWD", process.cwd());
|
||||
console.log("SET CWD", process.chdir("../"));
|
||||
console.log("CWD", process.cwd());
|
||||
1
integration/bunjs-only-snippets/readFileSync.txt
Normal file
1
integration/bunjs-only-snippets/readFileSync.txt
Normal file
@@ -0,0 +1 @@
|
||||
File read successfully
|
||||
9
integration/bunjs-only-snippets/readdir.js
Normal file
9
integration/bunjs-only-snippets/readdir.js
Normal file
@@ -0,0 +1,9 @@
|
||||
const { readdirSync } = require("fs");
|
||||
|
||||
const count = parseInt(process.env.ITERATIONS || "1", 10) || 1;
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
readdirSync(".");
|
||||
}
|
||||
|
||||
console.log(readdirSync("."));
|
||||
@@ -1,7 +1,7 @@
|
||||
const interval = 0.5;
|
||||
const interval = 0.01;
|
||||
const now = performance.now();
|
||||
console.time("Slept");
|
||||
Bun.sleep(interval);
|
||||
Bun.sleepSync(interval);
|
||||
const elapsed = performance.now() - now;
|
||||
if (elapsed < interval) {
|
||||
throw new Error("Didn't sleep");
|
||||
|
||||
51
integration/bunjs-only-snippets/some-fs.js
Normal file
51
integration/bunjs-only-snippets/some-fs.js
Normal file
@@ -0,0 +1,51 @@
|
||||
const { mkdirSync, existsSync } = require("fs");
|
||||
|
||||
var performance = globalThis.performance;
|
||||
if (!performance) {
|
||||
try {
|
||||
performance = require("perf_hooks").performance;
|
||||
} catch (e) {}
|
||||
}
|
||||
|
||||
const count = parseInt(process.env.ITERATIONS || "1", 10) || 1;
|
||||
var tempdir = `/tmp/some-fs-test/dir/${Date.now()}/hi`;
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
tempdir += `/${i.toString(36)}`;
|
||||
}
|
||||
|
||||
if (existsSync(tempdir)) {
|
||||
throw new Error(
|
||||
`existsSync reports ${tempdir} exists, but it probably does not`
|
||||
);
|
||||
}
|
||||
|
||||
var origTempDir = tempdir;
|
||||
var iterations = new Array(count * count).fill("");
|
||||
var total = 0;
|
||||
for (let i = 0; i < count; i++) {
|
||||
for (let j = 0; j < count; j++) {
|
||||
iterations[total++] = `${origTempDir}/${j.toString(36)}-${i.toString(36)}`;
|
||||
}
|
||||
}
|
||||
tempdir = origTempDir;
|
||||
mkdirSync(origTempDir, { recursive: true });
|
||||
const recurse = { recursive: false };
|
||||
const start = performance.now();
|
||||
for (let i = 0; i < total; i++) {
|
||||
mkdirSync(iterations[i], recurse);
|
||||
}
|
||||
|
||||
console.log("MKDIR " + total + " depth took:", performance.now() - start, "ms");
|
||||
|
||||
if (!existsSync(tempdir)) {
|
||||
throw new Error(
|
||||
"Expected directory to exist after mkdirSync, but it doesn't"
|
||||
);
|
||||
}
|
||||
|
||||
if (mkdirSync(tempdir, { recursive: true })) {
|
||||
throw new Error(
|
||||
"mkdirSync shouldn't return directory name on existing directories"
|
||||
);
|
||||
}
|
||||
30
integration/bunjs-only-snippets/toml-fixture.toml
Normal file
30
integration/bunjs-only-snippets/toml-fixture.toml
Normal file
@@ -0,0 +1,30 @@
|
||||
|
||||
framework = "next"
|
||||
origin = "http://localhost:5000"
|
||||
inline.array = [1234, 4, 5, 6]
|
||||
|
||||
|
||||
[macros]
|
||||
react-relay = { "graphql" = "node_modules/bun-macro-relay/bun-macro-relay.tsx" }
|
||||
|
||||
[bundle.packages]
|
||||
"@emotion/react" = true
|
||||
|
||||
|
||||
[dev]
|
||||
foo = 123
|
||||
"foo.bar" = "baz"
|
||||
"abba.baba" = "baba"
|
||||
dabba = -123
|
||||
doo = 123.456
|
||||
one.two.three = 4
|
||||
|
||||
[[array]]
|
||||
entry_one = "one"
|
||||
entry_two = "two"
|
||||
|
||||
[[array]]
|
||||
entry_one = "three"
|
||||
|
||||
[[array.nested]]
|
||||
entry_one = "four"
|
||||
17
integration/bunjs-only-snippets/toml.test.js
Normal file
17
integration/bunjs-only-snippets/toml.test.js
Normal file
@@ -0,0 +1,17 @@
|
||||
import { describe, it, expect } from "bun:test";
|
||||
|
||||
it("syntax", async () => {
|
||||
const toml = (await import("./toml-fixture.toml")).default;
|
||||
expect(toml.framework).toBe("next");
|
||||
expect(toml.bundle.packages["@emotion/react"]).toBe(true);
|
||||
expect(toml.array[0].entry_one).toBe("one");
|
||||
expect(toml.array[0].entry_two).toBe("two");
|
||||
expect(toml.array[1].entry_one).toBe("three");
|
||||
expect(toml.array[1].entry_two).toBe(undefined);
|
||||
expect(toml.array[1].nested[0].entry_one).toBe("four");
|
||||
expect(toml.dev.one.two.three).toBe(4);
|
||||
expect(toml.dev.foo).toBe(123);
|
||||
expect(toml.inline.array[0]).toBe(1234);
|
||||
expect(toml.inline.array[1]).toBe(4);
|
||||
expect(toml.dev["foo.bar"]).toBe("baz");
|
||||
});
|
||||
140
integration/bunjs-only-snippets/transpiler.test.js
Normal file
140
integration/bunjs-only-snippets/transpiler.test.js
Normal file
@@ -0,0 +1,140 @@
|
||||
import { expect, it, describe } from "bun:test";
|
||||
|
||||
describe("Bun.Transpiler", () => {
|
||||
const transpiler = new Bun.Transpiler({
|
||||
loader: "tsx",
|
||||
define: {
|
||||
"process.env.NODE_ENV": JSON.stringify("development"),
|
||||
},
|
||||
macro: {
|
||||
react: {
|
||||
bacon: `${import.meta.dir}/macro-check.js`,
|
||||
},
|
||||
},
|
||||
platform: "browser",
|
||||
});
|
||||
|
||||
const code = `import { useParams } from "remix";
|
||||
import type { LoaderFunction, ActionFunction } from "remix";
|
||||
|
||||
export const loader: LoaderFunction = async ({
|
||||
params
|
||||
}) => {
|
||||
console.log(params.postId);
|
||||
};
|
||||
|
||||
export const action: ActionFunction = async ({
|
||||
params
|
||||
}) => {
|
||||
console.log(params.postId);
|
||||
};
|
||||
|
||||
export default function PostRoute() {
|
||||
const params = useParams();
|
||||
console.log(params.postId);
|
||||
}
|
||||
|
||||
`;
|
||||
|
||||
describe("scanImports", () => {
|
||||
it("reports import paths, excluding types", () => {
|
||||
const imports = transpiler.scanImports(code);
|
||||
expect(imports.filter(({ path }) => path === "remix")).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("scan", () => {
|
||||
it("reports all export names", () => {
|
||||
const { imports, exports } = transpiler.scan(code);
|
||||
|
||||
expect(exports[0]).toBe("action");
|
||||
expect(exports[2]).toBe("loader");
|
||||
expect(exports[1]).toBe("default");
|
||||
|
||||
expect(exports).toHaveLength(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe("transform", () => {
|
||||
it("supports macros", async () => {
|
||||
const out = await transpiler.transform(`
|
||||
import {keepSecondArgument} from 'macro:${
|
||||
import.meta.dir
|
||||
}/macro-check.js';
|
||||
|
||||
export default keepSecondArgument("Test failed", "Test passed");
|
||||
`);
|
||||
expect(out.includes("Test failed")).toBe(false);
|
||||
expect(out.includes("Test passed")).toBe(true);
|
||||
|
||||
// ensure both the import and the macro function call are removed
|
||||
expect(out.includes("keepSecondArgument")).toBe(false);
|
||||
});
|
||||
|
||||
it("sync supports macros", () => {
|
||||
const out = transpiler.transformSync(`
|
||||
import {keepSecondArgument} from 'macro:${
|
||||
import.meta.dir
|
||||
}/macro-check.js';
|
||||
|
||||
export default keepSecondArgument("Test failed", "Test passed");
|
||||
`);
|
||||
expect(out.includes("Test failed")).toBe(false);
|
||||
expect(out.includes("Test passed")).toBe(true);
|
||||
|
||||
expect(out.includes("keepSecondArgument")).toBe(false);
|
||||
});
|
||||
|
||||
const importLines = [
|
||||
"import {createElement, bacon} from 'react';",
|
||||
"import {bacon, createElement} from 'react';",
|
||||
];
|
||||
describe("sync supports macros remap", () => {
|
||||
for (let importLine of importLines) {
|
||||
it(importLine, () => {
|
||||
const out = transpiler.transformSync(`
|
||||
${importLine}
|
||||
|
||||
export default bacon("Test failed", "Test passed");
|
||||
export function hi() { createElement("hi"); }
|
||||
`);
|
||||
|
||||
expect(out.includes("Test failed")).toBe(false);
|
||||
expect(out.includes("Test passed")).toBe(true);
|
||||
|
||||
expect(out.includes("bacon")).toBe(false);
|
||||
expect(out.includes("createElement")).toBe(true);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
it("macro remap removes import statement if its the only used one", () => {
|
||||
const out = transpiler.transformSync(`
|
||||
import {bacon} from 'react';
|
||||
|
||||
export default bacon("Test failed", "Test passed");
|
||||
`);
|
||||
|
||||
expect(out.includes("Test failed")).toBe(false);
|
||||
expect(out.includes("Test passed")).toBe(true);
|
||||
|
||||
expect(out.includes("bacon")).toBe(false);
|
||||
expect(out.includes("import")).toBe(false);
|
||||
});
|
||||
|
||||
it("removes types", () => {
|
||||
expect(code.includes("ActionFunction")).toBe(true);
|
||||
expect(code.includes("LoaderFunction")).toBe(true);
|
||||
const out = transpiler.transformSync(code);
|
||||
|
||||
expect(out.includes("ActionFunction")).toBe(false);
|
||||
expect(out.includes("LoaderFunction")).toBe(false);
|
||||
const { exports } = transpiler.scan(out);
|
||||
|
||||
expect(exports[0]).toBe("action");
|
||||
expect(exports[2]).toBe("loader");
|
||||
expect(exports[1]).toBe("default");
|
||||
expect(exports).toHaveLength(3);
|
||||
});
|
||||
});
|
||||
});
|
||||
4
integration/macro/assert.tsx
Normal file
4
integration/macro/assert.tsx
Normal file
@@ -0,0 +1,4 @@
|
||||
// This logs the result at build time
|
||||
export function unreachable(call) {
|
||||
throw new Error(call.arguments[0].toString() || "unreachable");
|
||||
}
|
||||
8
integration/macro/fetchSync.tsx
Normal file
8
integration/macro/fetchSync.tsx
Normal file
@@ -0,0 +1,8 @@
|
||||
export async function fetchSync(ctx) {
|
||||
const str = ctx.arguments[0].toString();
|
||||
|
||||
const response = await fetch(str);
|
||||
const text = await response.text();
|
||||
|
||||
return <string value={text} />;
|
||||
}
|
||||
5
integration/macro/hello-fetch-macro.tsx
Normal file
5
integration/macro/hello-fetch-macro.tsx
Normal file
@@ -0,0 +1,5 @@
|
||||
import { fetchSync } from "macro:./fetchSync.tsx";
|
||||
|
||||
const synchronousFetch = fetchSync(`https://example.com`);
|
||||
|
||||
console.log(synchronousFetch);
|
||||
30
integration/macro/loadMocks.tsx
Normal file
30
integration/macro/loadMocks.tsx
Normal file
@@ -0,0 +1,30 @@
|
||||
import { unreachable } from "macro:./assert";
|
||||
|
||||
if (process.env.NODE_ENV !== "test")
|
||||
unreachable("This module should only be imported in tests");
|
||||
|
||||
export const mockData = {
|
||||
Copilot: {
|
||||
id: "Copilot",
|
||||
name: "Copilot",
|
||||
description: "Copilot",
|
||||
icon: "https://s3.amazonaws.com/copilot-public/images/icons/Copilot.png",
|
||||
color: "#00AEEF",
|
||||
type: "service",
|
||||
tags: ["copilot"],
|
||||
categories: ["copilot"],
|
||||
links: [
|
||||
{
|
||||
id: "Copilot",
|
||||
name: "Copilot",
|
||||
url: "https://copilot.io",
|
||||
description: "Copilot",
|
||||
icon: "https://s3.amazonaws.com/copilot-public/images/icons/Copilot.png",
|
||||
color: "#00AEEF",
|
||||
type: "service",
|
||||
tags: ["copilot"],
|
||||
categories: ["copilot"],
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
@@ -35,14 +35,16 @@ if (!USE_EXISTING_PROCESS) {
|
||||
console.error("❌ bun error", err);
|
||||
process.exit(1);
|
||||
});
|
||||
waitSpawn = new Promise((resolve, reject) => {
|
||||
bunProcess.once("spawn", (code) => {
|
||||
console.log("Spawned");
|
||||
resolve();
|
||||
if (!process.env.CI) {
|
||||
waitSpawn = new Promise((resolve, reject) => {
|
||||
bunProcess.once("spawn", (code) => {
|
||||
console.log("Spawned");
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
process.on("beforeExit", () => {
|
||||
bunProcess?.kill(0);
|
||||
bunProcess && bunProcess.kill(0);
|
||||
});
|
||||
}
|
||||
const isDebug = bunExec.endsWith("-debug");
|
||||
@@ -71,8 +73,27 @@ function writeSnapshot(name, code) {
|
||||
);
|
||||
}
|
||||
|
||||
const baseOptions = {
|
||||
dumpio: !!process.env.CI_DEBUG,
|
||||
|
||||
args: [
|
||||
"--disable-gpu",
|
||||
"--disable-dev-shm-usage",
|
||||
"--disable-setuid-sandbox",
|
||||
"--no-sandbox",
|
||||
"--ignore-certificate-errors",
|
||||
"--use-fake-ui-for-media-stream",
|
||||
"--use-fake-device-for-media-stream",
|
||||
"--disable-sync",
|
||||
],
|
||||
executablePath: process.env.BROWSER_EXECUTABLE,
|
||||
headless: true,
|
||||
};
|
||||
|
||||
async function main() {
|
||||
const launchOptions = USE_EXISTING_PROCESS ? { devtools: true } : undefined;
|
||||
const launchOptions = USE_EXISTING_PROCESS
|
||||
? { ...baseOptions, devtools: !process.env.CI }
|
||||
: baseOptions;
|
||||
const browser = await puppeteer.launch(launchOptions);
|
||||
const promises = [];
|
||||
let allTestsPassed = true;
|
||||
|
||||
@@ -1,84 +1,85 @@
|
||||
import snippets from "./snippets.json";
|
||||
const fail = true;
|
||||
// import snippets from "./snippets.json";
|
||||
|
||||
globalThis.console.assert = (condition, ...content) => {
|
||||
if (!condition) {
|
||||
throw new Error(content.join(" "));
|
||||
}
|
||||
};
|
||||
globalThis.getModuleScriptSrc = async (name) => {
|
||||
const response = await fetch(name, {
|
||||
cache: "force-cache",
|
||||
});
|
||||
// globalThis.console.assert = (condition, ...content) => {
|
||||
// if (!condition) {
|
||||
// throw new Error(content.join(" "));
|
||||
// }
|
||||
// };
|
||||
// globalThis.getModuleScriptSrc = async (name) => {
|
||||
// const response = await fetch(name, {
|
||||
// cache: "force-cache",
|
||||
// });
|
||||
|
||||
if (response.ok) {
|
||||
return await response.text();
|
||||
} else {
|
||||
throw new Error(`Failed to get module script ${name}`);
|
||||
}
|
||||
};
|
||||
// if (response.ok) {
|
||||
// return await response.text();
|
||||
// } else {
|
||||
// throw new Error(`Failed to get module script ${name}`);
|
||||
// }
|
||||
// };
|
||||
|
||||
globalThis.runTest = async (name) => {
|
||||
testSuccess = false;
|
||||
var Namespace = await import(name);
|
||||
var testFunction = Namespace.test;
|
||||
// globalThis.runTest = async (name) => {
|
||||
// testSuccess = false;
|
||||
// var Namespace = await import(name);
|
||||
// var testFunction = Namespace.test;
|
||||
|
||||
if (
|
||||
!("test" in Namespace) &&
|
||||
"default" in Namespace &&
|
||||
typeof Namespace.default === "function"
|
||||
) {
|
||||
Namespace = Namespace.default();
|
||||
testFunction = Namespace.test;
|
||||
}
|
||||
// if (
|
||||
// !("test" in Namespace) &&
|
||||
// "default" in Namespace &&
|
||||
// typeof Namespace.default === "function"
|
||||
// ) {
|
||||
// Namespace = Namespace.default();
|
||||
// testFunction = Namespace.test;
|
||||
// }
|
||||
|
||||
if (!testFunction) {
|
||||
throw new Error("No test function found in " + name);
|
||||
}
|
||||
// if (!testFunction) {
|
||||
// throw new Error("No test function found in " + name);
|
||||
// }
|
||||
|
||||
if (typeof testFunction !== "function") {
|
||||
throw new Error(
|
||||
`Expected (await import(\"${name}\"")) to have a test function.\nReceived: ${Object.keys(
|
||||
Namespace
|
||||
).join(", ")} `
|
||||
);
|
||||
}
|
||||
// if (typeof testFunction !== "function") {
|
||||
// throw new Error(
|
||||
// `Expected (await import(\"${name}\"")) to have a test function.\nReceived: ${Object.keys(
|
||||
// Namespace
|
||||
// ).join(", ")} `
|
||||
// );
|
||||
// }
|
||||
|
||||
if (globalThis.BUN_DEBUG_MODE) {
|
||||
try {
|
||||
await testFunction();
|
||||
if (!testSuccess) {
|
||||
throw new Error("Test failed");
|
||||
}
|
||||
} catch (exception) {
|
||||
console.error(exception);
|
||||
debugger;
|
||||
throw exception;
|
||||
}
|
||||
} else {
|
||||
await testFunction();
|
||||
if (!testSuccess) {
|
||||
throw new Error("Test failed");
|
||||
}
|
||||
}
|
||||
};
|
||||
// if (globalThis.BUN_DEBUG_MODE) {
|
||||
// try {
|
||||
// await testFunction();
|
||||
// if (!testSuccess) {
|
||||
// throw new Error("Test failed");
|
||||
// }
|
||||
// } catch (exception) {
|
||||
// console.error(exception);
|
||||
// debugger;
|
||||
// throw exception;
|
||||
// }
|
||||
// } else {
|
||||
// await testFunction();
|
||||
// if (!testSuccess) {
|
||||
// throw new Error("Test failed");
|
||||
// }
|
||||
// }
|
||||
// };
|
||||
|
||||
var testSuccess = false;
|
||||
globalThis.testDone = () => {
|
||||
testSuccess = true;
|
||||
};
|
||||
// var testSuccess = false;
|
||||
// globalThis.testDone = () => {
|
||||
// testSuccess = true;
|
||||
// };
|
||||
|
||||
let fail = 0;
|
||||
for (let snippet of snippets) {
|
||||
try {
|
||||
await runTest("../snippets/" + snippet.substring(1));
|
||||
console.log("✅", snippet);
|
||||
} catch (exception) {
|
||||
console.error(`❌ ${snippet}`);
|
||||
console.error(exception);
|
||||
// let fail = 0;
|
||||
// for (let snippet of snippets) {
|
||||
// try {
|
||||
// await runTest("../snippets/" + snippet.substring(1));
|
||||
// console.log("✅", snippet);
|
||||
// } catch (exception) {
|
||||
// console.error(`❌ ${snippet}`);
|
||||
// console.error(exception);
|
||||
|
||||
fail++;
|
||||
}
|
||||
}
|
||||
// fail++;
|
||||
// }
|
||||
// }
|
||||
|
||||
if (fail) throw new Error(`❌ browser test failed (${fail})`);
|
||||
|
||||
|
||||
BIN
integration/scripts/bun.lockb
Executable file
BIN
integration/scripts/bun.lockb
Executable file
Binary file not shown.
5
integration/scripts/package.json
Normal file
5
integration/scripts/package.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"puppeteer": "^10.2.0"
|
||||
}
|
||||
}
|
||||
@@ -24,5 +24,8 @@
|
||||
"/jsx-entities.jsx",
|
||||
"/optional-chain-with-function.js",
|
||||
"/template-literal.js",
|
||||
"/number-literal-bug.js"
|
||||
"/number-literal-bug.js",
|
||||
"/caught-require.js",
|
||||
"/package-json-utf8.js",
|
||||
"/multiple-var.js"
|
||||
]
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
import {
|
||||
__HMRModule as HMR
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
import {
|
||||
__HMRClient as Bun
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshModule as FastHMR
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshRuntime as FastRefresh
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
Bun.activate(true);
|
||||
|
||||
var hmr = new HMR(3474597122, "array-args-with-default-values.js"), exports = hmr.exports;
|
||||
var hmr = new FastHMR(3474597122, "array-args-with-default-values.js", FastRefresh), exports = hmr.exports;
|
||||
(hmr._load = function() {
|
||||
var lines;
|
||||
const data = () => lines.map(([a = null, b = null, c = null, d = null]) => ({
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user