Compare commits

...

196 Commits

Author SHA1 Message Date
Ashcon Partovi
4ca0d96837 Fix tinycc 2024-07-23 16:36:23 -07:00
Ashcon Partovi
2091551c92 Latest changes 2024-07-23 15:39:27 -07:00
Ashcon Partovi
a2bc49a991 Changes: 2024-07-23 10:05:47 -07:00
Ashcon Partovi
398e93249e Continue 2024-07-22 21:33:33 -07:00
Ashcon Partovi
1ec44688b7 Changes 2024-07-22 21:33:33 -07:00
Ashcon Partovi
7aa2360542 Changes 2024-07-22 21:33:33 -07:00
Ashcon Partovi
e87c599e6a Build script continued 2024-07-22 21:33:33 -07:00
Ashcon Partovi
d60da3d186 Unified build script 2024-07-22 21:33:33 -07:00
Jarred Sumner
3ef84816a6 Update WebKit 2024-07-22 20:47:53 -07:00
Jarred Sumner
6e9b592c56 try using LLVM 18 on macOS (#12727)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2024-07-22 20:44:29 -07:00
Jarred Sumner
a6b5543bd8 Don't set fuse-ld=lld in boringssl script 2024-07-22 15:41:41 -07:00
Jarred Sumner
a4759eb147 Bump minimum macOS build to 13.0 2024-07-22 14:50:23 -07:00
Dariush Alipour
732ed2b7df Fix: test coverage node_modules exclusion in Windows (#12691) 2024-07-22 13:25:42 -07:00
Jarred Sumner
63fab9a82b Update fetch.md 2024-07-22 04:06:00 -07:00
Jarred Sumner
ff17b427c8 Update fetch.md 2024-07-22 04:02:48 -07:00
Jarred Sumner
ca44df7c88 Update fetch.md 2024-07-22 03:57:29 -07:00
huseeiin
9daa7ea555 Update bun.d.ts (#12719) 2024-07-22 03:55:14 -07:00
Jarred Sumner
2f0020f00f Update fetch.md 2024-07-22 03:52:33 -07:00
Jarred Sumner
599d27d93e Update fetch.md 2024-07-22 03:50:22 -07:00
Jarred Sumner
696f209ec1 Update fetch.md 2024-07-22 03:49:32 -07:00
Jarred Sumner
1a6ead667b Introduce bun --fetch-preconnect <url> ./my-script.ts (#12698) 2024-07-22 03:41:59 -07:00
Jarred Sumner
bbf2f5d716 Experiment: disable C++ static destructors (#12652)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2024-07-21 21:32:52 -07:00
ippsav
9574044083 Fix expect.toThrow(expect.any()) matcher to correctly handle ExpectAny objects (#12670) 2024-07-21 20:35:18 -07:00
Jarred Sumner
822b725bec Fix BUN-2M9, take two 2024-07-21 08:49:00 -07:00
Jarred Sumner
dc775f75f0 Fix BUN-2M9 2024-07-21 07:40:57 -07:00
Jarred Sumner
738947bdec Deflake node-tls-connect test 2024-07-20 02:36:08 -07:00
Jarred Sumner
b7efeafc03 Deflake node-tls-connect test 2024-07-19 23:42:23 -07:00
Jarred Sumner
f5d1a17a5c Fix crash in bun exec cd (#12676)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2024-07-19 22:57:52 -07:00
Jarred Sumner
03024e6b4e Fix truncating in BigIntStats (#12643)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2024-07-19 20:00:32 -07:00
Jarred Sumner
1d61676c7b Check for file or directory 2024-07-18 13:56:21 -07:00
Jarred Sumner
23fb63f45c Fixes #12360 (#12364) 2024-07-18 09:10:15 -07:00
Jarred Sumner
0be71edf3f Upload .dSYM file 2024-07-18 03:19:48 -07:00
Jarred Sumner
6b50deb7b7 Move dev dependencies to "devDependencies"
Spammy vulnerability scanning software can't tell we aren't using these in the "bun" npm package.
2024-07-18 01:13:36 -07:00
Jarred Sumner
6ad3e6a5e3 Fixes #2532 (#12633)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2024-07-17 20:53:12 -07:00
HibanaSama
b1dce1e241 build(windows): fix esbuild errors when bundling node-fallbacks (#12628) 2024-07-17 19:40:25 -07:00
Jarred Sumner
cc42052039 node-fetch polyfill shouldn't break when web globals are overriden (#12634) 2024-07-17 18:57:03 -07:00
Jarred Sumner
ecf5aea071 Ensure undici primordials are pristine (#12635) 2024-07-17 18:56:22 -07:00
Jarred Sumner
79d21a0d02 Bump internal bun versions 2024-07-17 17:21:14 -07:00
dave caruso
43949151b1 fix(bundler): importing modules with trailing slash no longer uses a builtin (#12632) 2024-07-17 17:17:00 -07:00
Ciro Spaciari
16aad326e4 fix(setSystemTime) fix number parameter behavior (#12630)
Co-authored-by: Dylan Conway <35280289+dylan-conway@users.noreply.github.com>
2024-07-17 17:09:24 -07:00
Jarred Sumner
1a6f2d38da Github actions 2024-07-17 03:31:12 -07:00
Jarred Sumner
c6149d36b3 Bump 2024-07-17 02:40:42 -07:00
Jarred Sumner
34e493f945 Experiment: disable -fPIC and relro (#12582)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2024-07-17 02:33:46 -07:00
dave caruso
866b301626 bundler: make import() calls visit the options object (#12617) 2024-07-16 23:21:34 -07:00
dave caruso
cabc0fa0e6 fix typescript namespace merging with functions and classes (#12610) 2024-07-16 19:39:27 -07:00
HibanaSama
d703354fcd docs: close details block (#12533) 2024-07-16 17:50:47 -07:00
Ciro Spaciari
37036f2eb0 fix(serve) fix abrupt close when downloading data (#12581)
Co-authored-by: cirospaciari <cirospaciari@users.noreply.github.com>
2024-07-16 16:39:37 -07:00
190n
ff0dc62314 Accept undefined as explicit second argument for path.*.basename (#12609) 2024-07-16 16:37:21 -07:00
Ciro Spaciari
f05f13780e fix(CryptoHasher) check of .empty/null/undefined in update (#12607)
Co-authored-by: cirospaciari <cirospaciari@users.noreply.github.com>
2024-07-16 16:33:35 -07:00
190n
4d74855fd7 Prevent unref from hanging on uninitialized dgram socket (#12585)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-07-16 16:31:07 -07:00
Jarred Sumner
5088a360b5 Avoid stale reference to Body.Value when erroring (#12579) 2024-07-16 12:33:19 -07:00
dave caruso
891b1907ae feat(bundler): implement non-isolated hashes (#12576) 2024-07-15 20:34:15 -07:00
dave caruso
ae988642fb fix .use_count integer underflow (#12584) 2024-07-15 18:36:42 -07:00
190n
75e442c170 Change .mjs to .mts during TypeScript module resolution (fixes #12471) (#12580) 2024-07-15 18:12:18 -07:00
Jarred Sumner
8808af1c99 Replace some Identifier::fromString usages with vm->propertyNames (#12575) 2024-07-15 16:15:56 -07:00
Jarred Sumner
b9d2a03ffc Make creating a BufferList (used in node:stream) slightly faster (#12577) 2024-07-15 16:15:10 -07:00
Jarred Sumner
157b56cca5 Update launch.json 2024-07-15 15:12:57 -07:00
Eric L. Goldstein
caaeae123a Add documentation for mock.restore() (#12553) 2024-07-14 21:20:33 -07:00
Ivan Baksheev
20235a0d22 Add packages option to remove all dependencies from bundle (#12562) 2024-07-14 15:20:27 -07:00
Dylan Conway
ae19489250 bump 2024-07-12 19:45:10 -07:00
Dylan Conway
242c48f302 bump 2024-07-12 19:42:18 -07:00
dave caruso
110849355c make the windows binary smaller (#12523)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-07-12 00:20:28 -07:00
Jarred Sumner
36fd3115f1 Try icf=safe (#12524) 2024-07-11 22:37:04 -07:00
Jarred Sumner
7d9b876968 Use armv8a+crc 2024-07-11 20:59:37 -07:00
Jarred Sumner
40f0da1254 Use armv8-a 2024-07-11 20:30:21 -07:00
Jarred Sumner
aea3964abd Set -march instead of -mcpu 2024-07-11 20:14:43 -07:00
Jarred Sumner
780bff781d Set the cpu model in the right place 2024-07-11 19:00:06 -07:00
Jarred Sumner
c6a2ab5165 Revert "Don't set mtune"
This reverts commit ef1c660708.
2024-07-11 18:58:24 -07:00
Jarred Sumner
ef1c660708 Don't set mtune 2024-07-11 18:40:52 -07:00
Ciro Spaciari
11f8d3cb24 fix(server) fix abrupt stop (#12472)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
Co-authored-by: cirospaciari <cirospaciari@users.noreply.github.com>
2024-07-11 18:22:23 -07:00
dave caruso
3ac9c3cc1c make bun static link the c redistributable (#12521)
Co-authored-by: paperdave <paperdave@users.noreply.github.com>
2024-07-11 17:35:35 -07:00
Jarred Sumner
aa0f54cb93 Fixes #12076 (#12504) 2024-07-11 16:59:14 -07:00
huseeiin
8a3f882ef7 Update README.md (#12512) 2024-07-11 07:24:13 -07:00
Jarred Sumner
b7dd57ac32 Fix zig build error on Windows 2024-07-11 00:08:05 -07:00
Jarred Sumner
cf1c7772f3 See if .dSYM will upload (#12502) 2024-07-10 23:53:17 -07:00
Jarred Sumner
329d5e2af5 Disable this debug log 2024-07-10 23:25:55 -07:00
Jarred Sumner
0098678a1d Upload .dSYM file in CI 2024-07-10 23:04:06 -07:00
Jarred Sumner
bf4c2caa11 Bump 2024-07-10 22:48:28 -07:00
Jarred Sumner
226f42e04a Rewrite js_ast.NewBaseStore (#12388)
Co-authored-by: dave caruso <me@paperdave.net>
2024-07-10 21:57:40 -07:00
Ciro Spaciari
96d19fcfe2 fix(fetch.tls.test) make test more reliable (#12499) 2024-07-10 21:52:34 -07:00
Dylan Conway
58483426cd fix(install): call GetFinalPathNameByHandle on cwd for postinstall scripts (#12497) 2024-07-10 21:09:14 -07:00
Dylan Conway
25f7ef7338 Revert "Nest test results under describe scopes (#12189)"
This reverts commit 6a43f7f52d.
2024-07-10 21:05:47 -07:00
Jarred Sumner
412806bb22 Make expect().toThrow faster (#12494) 2024-07-10 20:46:29 -07:00
Ciro Spaciari
4c87406391 fix(ssl) fix ssl shutdown (#12492)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-07-10 20:29:54 -07:00
Dylan Conway
5f7b96b58f fix(install): optional peer dependency bugfix (#12485)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
Co-authored-by: dylan-conway <dylan-conway@users.noreply.github.com>
2024-07-10 20:04:32 -07:00
Jarred Sumner
f1151a84ad On Windows, fix fs.writeFile(1, fs.writeFile(2, fs.writeFile(\\nul (#12410)
Co-authored-by: Dylan Conway <dylan.conway567@gmail.com>
2024-07-10 19:49:36 -07:00
Zack Radisic
cdc68a2237 .npmrc follow up (#12390)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
Co-authored-by: Dylan Conway <35280289+dylan-conway@users.noreply.github.com>
2024-07-10 19:35:21 -07:00
Jarred Sumner
e866793eb3 Fix Windows assertion failures 2024-07-10 18:08:04 -07:00
Dylan Conway
cf9c418bcb revert 2024-07-10 17:22:16 -07:00
Jarred Sumner
138ef1328e webcrypto tests are slow 2024-07-10 17:18:39 -07:00
Dylan Conway
e5e6d7ca43 comma 2024-07-10 17:03:19 -07:00
Jarred Sumner
cb81fc5445 Make ${encoding}Slice & ${encoding}Write work on Uint8Array (#12491) 2024-07-10 16:58:01 -07:00
Dylan Conway
d8caf7f9fa install all at once 2024-07-10 16:48:40 -07:00
Dylan Conway
6f8ceb0ea9 windows: bump llvm to 18.1.8 (#12490) 2024-07-10 16:45:54 -07:00
dave caruso
02b589b2ce fix a crash in remapping stacks (#12477)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-07-10 16:39:05 -07:00
Jarred Sumner
55d59ebf1f Try disabling the vs2022 build tools workaround 2024-07-10 02:01:29 -07:00
Jarred Sumner
e1bc6c55d5 Speculative fix for crash in visitChildren in BufferList (#12427) 2024-07-10 01:44:52 -07:00
Dylan Conway
e42dede529 upgrade webkit (#12474)
Co-authored-by: dylan-conway <dylan-conway@users.noreply.github.com>
2024-07-10 01:38:26 -07:00
arnab
73ef93ffa3 Fix spelling - following (#12479) 2024-07-09 23:35:06 -07:00
Derrick Farris
475f71a2a1 fix(jest): beforeEach, afterEach not called for test.todo (#12406) 2024-07-09 23:15:27 -07:00
Ciro Spaciari
af6035ce36 fix(server) wait for readFile on abort/process exit cases (#12441)
Co-authored-by: cirospaciari <cirospaciari@users.noreply.github.com>
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-07-09 22:38:32 -07:00
Jarred Sumner
bfa395d1d5 Warn when installing a global binary and global bin is not in path (#12454) 2024-07-09 22:32:56 -07:00
Jarred Sumner
76bb5b8619 Set __DARWIN_NON_CANCELABLE (#12354)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2024-07-09 17:48:31 -07:00
Imgodmaoyouknow
6354e608a7 chore(docs): fix wrongly written at binary-data.md (#12451) 2024-07-09 17:39:01 -07:00
Le Michel
28d9527189 Update cache.md (#12231) 2024-07-09 17:26:18 -07:00
Victor Homyakov
fbcd843c58 Make JSX in react-hello-world.node.jsx the same as others (#12259) 2024-07-09 17:25:54 -07:00
lmmfranco
6b0c2383d5 Adding proper bash quote escaping on install.sh (#5002)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-07-09 17:10:43 -07:00
silverwind
f1a748fcab Add -u alias to bun test (#10097)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-07-09 16:53:35 -07:00
Vladimir Pouzanov
87296405a7 Remove the case-insensitive bit from the docs. (#4858) 2024-07-09 16:47:48 -07:00
张新伟
4dfbabd590 chore(docs): add notes for ubuntu developers (#12296)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-07-09 16:31:26 -07:00
Jarred Sumner
ea1135a464 [bundows] Skip unnecessary GetFinalPathNameByHandle (#10338)
Co-authored-by: Ashcon Partovi <ashcon@partovi.net>
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2024-07-09 16:21:13 -07:00
Dmitrii
f1755df6f0 docs: update drizzle-kit cli command in drizzle.md (#11666) 2024-07-09 16:18:42 -07:00
Bjön Limell
b01f67857f Added missing commands to bun.bash (#7181) 2024-07-09 16:15:34 -07:00
Nikola Ristić
9fe7ea340d Add a note about bun add --exact alias to docs (#6968) 2024-07-09 16:09:24 -07:00
Ivan Baksheev
68ba6b9e79 docs: sync Database.run docs with types (#9993) 2024-07-09 15:47:15 -07:00
patricio
a703d2d019 fix(docs): correct value for BUN_CONFIG_NO_CLEAR_TERMINAL_ON_RELOAD (#9647) 2024-07-09 15:37:45 -07:00
dave caruso
5137213f86 heavy revision on heap_breakdown's safety (#12445) 2024-07-09 14:29:00 -07:00
Danny Lin
c98da7daf7 docs: Simplify Homebrew install command (#4595) 2024-07-09 13:32:30 -07:00
Jarred Sumner
25252c9b46 Use memmove in path handling code (#12413)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2024-07-09 11:22:54 -07:00
Imgodmaoyouknow
21ff566d69 chore(docs): fix wrongly written at binary-data.md (#12452) 2024-07-09 01:44:47 -07:00
Dylan Conway
a36a01e235 fix(--watch): ref or create new module specifier strings (#12442) 2024-07-08 19:55:21 -07:00
Jarred Sumner
9ae870546b On Windows, support Bun.stdin, Bun.stdout, Bun.stderr in Bun.write when the other argument is a file (#12411)
Co-authored-by: dave caruso <me@paperdave.net>
2024-07-08 18:03:48 -07:00
Jarred Sumner
a4b0817cd3 Print list of CPU features in crash reports (#12350)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2024-07-08 15:08:07 -07:00
Jarred Sumner
c2a5451e93 Fix argument validation with callbacks in node:fs (#12412) 2024-07-07 20:18:07 -07:00
Jarred Sumner
150ae032e8 Flip conditional 2024-07-07 09:47:30 -07:00
Jarred Sumner
37ee951448 Add unusable postgres client behind a canary-or-debug-only feature flag (#11920)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2024-07-07 09:43:48 -07:00
Jarred Sumner
b8c70ba6cf Deflake node-tls-connect test 2024-07-07 09:28:53 -07:00
Jarred Sumner
cbcf9506d9 Update CONTRIBUTING.md 2024-07-06 20:36:09 -07:00
Jarred Sumner
92bd629e60 Support promises in profile method in bun:jsc (#12165) 2024-07-06 20:24:21 -07:00
Jarred Sumner
e7031b07ae Fix memory leak in withFileTypes: true in fs.readdir (#12393) 2024-07-06 20:22:55 -07:00
Jarred Sumner
41a5ebe09f Fix memory leak in new Request(request) (#12387) 2024-07-05 22:11:09 -07:00
Jarred Sumner
cd97c21038 Handle OOM in btoa (#12353)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2024-07-05 22:02:07 -07:00
dave caruso
57d22908d1 fix(transpiler): Fix non-inlined nested namespaces (#12386) 2024-07-05 21:59:23 -07:00
dave caruso
749c51d71a simpler version of simplifyUnusedExpr rewrite (#12384) 2024-07-05 20:20:45 -07:00
Noah Friedman
80bbad6568 setup-bun action bumped to v2 in docs (#12315) 2024-07-05 18:54:53 -07:00
Jarred Sumner
da1b3d2007 Check that ccache points to a regular file instead of a non-empty string (#12382) 2024-07-05 18:33:00 -07:00
Andrew Johnston
050a4b5c71 fix(formdata): handle file names correctly when setting on formdata (#12379)
Co-authored-by: Andrew Johnston <andrew@bun.sh>
2024-07-05 18:29:12 -07:00
Jarred Sumner
6f52b649da Make debug log more useful 2024-07-05 17:28:48 -07:00
Zach Olivare
bbc621adff docs(argv): Correct cli file name (#12373) 2024-07-05 16:05:12 -07:00
Jarred Sumner
71c223e111 Handle EINTR in usockets (#12357)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2024-07-05 15:40:10 -07:00
Jarred Sumner
ee25618197 Use bun.ComptimeStringMap instead of std.StaticStringMap (#12351)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2024-07-04 23:05:51 -07:00
Vadzim
4f3ef07455 Fix crash on aborted timer (#12348) 2024-07-04 16:20:59 -07:00
Jarred Sumner
fad58168d2 Configure LTO on Windows (#12290) 2024-07-04 16:20:18 -07:00
Dylan Conway
4fefb8507c respect package.json indentation in bun install (#12328) 2024-07-03 23:10:34 -07:00
Jarred Sumner
39d5c8a8a5 Remove proto installation method from docs
We cannot recommend people install Bun using an installation method that makes Bun take 1 second to print the version number

https://github.com/oven-sh/bun/issues/12294
2024-07-03 20:55:18 -07:00
Jake Boone
6a43f7f52d Nest test results under describe scopes (#12189)
Co-authored-by: Dylan Conway <35280289+dylan-conway@users.noreply.github.com>
2024-07-03 20:46:55 -07:00
Jarred Sumner
c486a049a8 Implement buffer.resolveObjectURL (#12324) 2024-07-03 19:17:20 -07:00
Jarred Sumner
5a0b935231 Bump libarchive (#12314)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2024-07-03 11:56:17 -07:00
dave caruso
688ddbda74 feat(bundler): implement enum inlining / more constant folding (#12144)
Co-authored-by: paperdave <paperdave@users.noreply.github.com>
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2024-07-03 04:23:17 -07:00
Dylan Conway
b9fba61153 fix(install): patching in root package with workspaces (#12313)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-07-03 03:52:15 -07:00
Jarred Sumner
dfca8147df Bump WebKit submodule (#12310)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2024-07-03 03:08:11 -07:00
Umar Faruq Chowdhury
b85c30cd89 Remove unused details section (#12311) 2024-07-03 02:14:56 -07:00
Jarred Sumner
f83e42de20 GitHub actions 2024-07-03 01:46:46 -07:00
Jarred Sumner
927dde7b34 GitHub actions 2024-07-03 01:22:49 -07:00
Jarred Sumner
c85576b15d GitHub actions 2024-07-03 01:21:16 -07:00
Jarred Sumner
db60af1a44 GitHub actions 2024-07-03 01:14:52 -07:00
Jarred Sumner
292035efcb GitHub actions 2024-07-03 01:10:37 -07:00
Jarred Sumner
823d790b1c GitHub actions 2024-07-03 01:07:40 -07:00
Jarred Sumner
5573b2e899 Add comment when updating a submodule 2024-07-03 01:05:49 -07:00
Jarred Sumner
2f0789af7c Always set enable_logs in development 2024-07-01 23:38:03 -07:00
Jarred Sumner
f8e640c018 Remove callconv from a couple functions 2024-07-01 23:35:28 -07:00
Jarred Sumner
b0018465cc WebKit upgrade (#12246)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2024-07-01 23:27:59 -07:00
Justin Ho
dd057613b9 Update Buffer implementation status in nodejs-apis.md (#12274) 2024-07-01 22:08:09 -07:00
Zack Radisic
bf14a09a23 install: fix issues with patching hoisted dependencies in workspaces (#12141)
Co-authored-by: Dylan Conway <35280289+dylan-conway@users.noreply.github.com>
Co-authored-by: dylan-conway <dylan-conway@users.noreply.github.com>
2024-07-01 17:44:18 -07:00
Filip Skokan
81711faebe fix: add Symbol.toStringTag to KeyObject instances (#12278) 2024-07-01 15:13:34 -07:00
Eric L. Goldstein
86fd13643b base64 decode the request body instead of encoding it a second time (#12219) 2024-07-01 11:59:33 -07:00
Zack Radisic
861be5560e Support reading from .npmrc (#11979)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-06-29 18:11:23 -07:00
Jarred Sumner
7f3e6f23f6 Refactor ZigString -> toJS (#12242) 2024-06-29 00:21:42 -07:00
Jarred Sumner
5f34387bea Fix crash in dns.lookup, ensure getaddrinfo() only returns IPv4-only or IPv6-only results when it should, normalize node:dns errors (#12223) 2024-06-28 18:45:10 -07:00
Zack Radisic
e22383dff9 fix bad test/snapshot in glob/scan.test.ts (#12239)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-06-28 18:39:53 -07:00
Dylan Conway
c1a5b4acc5 fix napi.test.ts (#12241)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-06-28 18:38:37 -07:00
Jarred Sumner
61b343cf7d GitHub actions 2024-06-28 18:01:29 -07:00
Jarred Sumner
ec3487867c Update build-windows.yml 2024-06-28 17:42:02 -07:00
Jarred Sumner
3c52344b53 Hardcode this 2024-06-28 17:40:23 -07:00
Jarred Sumner
f37d89afb1 Revert BuildKite for now 2024-06-28 17:38:27 -07:00
Jarred Sumner
a0b5006b78 Re-enable GitHub actions CI (#12240) 2024-06-28 17:31:38 -07:00
Jarred Sumner
1a10f2b46e Bump 2024-06-28 17:23:15 -07:00
Dylan Conway
acc0fe6db4 comment 2024-06-28 17:02:36 -07:00
Dylan Conway
6e89419250 fix(install): bugfix for tarball "overrides" (#12234) 2024-06-28 16:54:35 -07:00
Dylan Conway
d5aa7265df fix(install): bun pm trust with updated dependencies (#12215)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-06-28 16:51:06 -07:00
Ashcon Partovi
fe27a181d3 Increase macOS test parallelism to 3 [skip ci] 2024-06-28 11:05:43 -07:00
Ashcon Partovi
145b9e7d09 Skip no-lto builds on main branch [skip ci] 2024-06-28 10:24:41 -07:00
Ashcon Partovi
fab33be408 Fix release script 2024-06-28 09:56:55 -07:00
Dylan Conway
da27f22622 fix(install): install binaries for packages installed multiple times (#11886)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
Co-authored-by: dylan-conway <dylan-conway@users.noreply.github.com>
2024-06-28 03:24:39 -07:00
Kelvin Luck
20d33e480b Mark semver.order as this:void (#12224) 2024-06-28 02:59:14 -07:00
Ashcon Partovi
066e2f2589 Skip tests on main [no ci] 2024-06-27 16:33:17 -07:00
Per Karlsson
9697a2b058 docs: Update docker.md (#12197) 2024-06-27 16:24:56 -07:00
Isaiah Banks
b37f94d396 Fix typo in pm_trusted_command.zig (#12162) 2024-06-27 16:12:00 -07:00
Jake Boone
eff2ea6271 Fix "lines" and "functions" in coverage threshold guide (#12217) 2024-06-27 16:00:13 -07:00
Ashcon Partovi
d105b048b1 Use Buildkite for CI (#11477) 2024-06-27 14:56:07 -07:00
Ashcon Partovi
d356e27a4d Remove debug workflow
No longer needed
2024-06-26 23:54:51 -07:00
Ashcon Partovi
96e84b276b Add a debug workflow
Github actions only triggers the `status` event if the workflow is in the main branch. This can be removed later.
2024-06-26 18:38:56 -07:00
Jarred Sumner
60ef13e079 Fix assertion failure in Bun.escapeHTML with latin1 input (#12185) 2024-06-26 18:25:02 -07:00
Jarred Sumner
10ce5ddd24 Fixes #12188 2024-06-26 18:07:19 -07:00
Erik Dunteman
cab78045b7 Make console.log(someFunction) print AsyncFunction when appropriate (#12136)
Co-authored-by: Erik Dunteman <erik@MacBook-Pro.attlocal.net>
Co-authored-by: Erik Dunteman <erik@Eriks-MBP.attlocal.net>
2024-06-25 13:00:24 -07:00
Jarred Sumner
d5e3ea0ab7 Make node:v8 getHeapStatistics more plausible (#12139) 2024-06-25 00:17:51 -07:00
625 changed files with 64937 additions and 42701 deletions

30
.buildkite/bootstrap.yml Normal file
View File

@@ -0,0 +1,30 @@
# Uploads the latest CI workflow to Buildkite.
# https://buildkite.com/docs/pipelines/defining-steps
#
# Changes to this file must be manually edited here:
# https://buildkite.com/bun/bun/settings/steps
steps:
- if: "build.pull_request.repository.fork"
block: ":eyes:"
prompt: "Did you review the PR?"
blocked_state: "running"
- label: ":pipeline:"
command: "buildkite-agent pipeline upload .buildkite/ci.yml"
agents:
queue: "build-linux"
- if: "build.branch == 'main' && !build.pull_request.repository.fork"
label: ":github:"
agents:
queue: "test-darwin"
depends_on:
- "darwin-aarch64-build-bun"
- "darwin-x64-build-bun"
- "linux-aarch64-build-bun"
- "linux-x64-build-bun"
- "linux-x64-baseline-build-bun"
- "windows-x64-build-bun"
- "windows-x64-baseline-build-bun"
command:
- ".buildkite/scripts/upload-release.sh"

63
.buildkite/ci.md Normal file
View File

@@ -0,0 +1,63 @@
## CI
How does CI work?
### Building
Bun is built on macOS, Linux, and Windows. The process is split into the following steps, the first 3 of which are able to run in parallel:
#### 1. `build-deps`
Builds the static libaries in `src/deps` and outputs a directory: `build/bun-deps`.
- on Windows, this runs the script: [`scripts/all-dependencies.ps1`](scripts/all-dependencies.ps1)
- on macOS and Linux, this runs the script: [`scripts/all-dependencies.sh`](scripts/all-dependencies.sh)
#### 2. `build-zig`
Builds the Zig object file: `build/bun-zig.o`. Since `zig build` supports cross-compiling, this step is run on macOS aarch64 since we have observed it to be the fastest.
- on macOS and Linux, this runs the script: [`scripts/build-bun-zig.sh`](scripts/build-bun-zig.sh)
#### 3. `build-cpp`
Builds the C++ object file: `build/bun-cpp-objects.a`.
- on Windows, this runs the script: [`scripts/build-bun-cpp.ps1`](scripts/build-bun-cpp.ps1)
- on macOS and Linux, this runs the script: [`scripts/build-bun-cpp.sh`](scripts/build-bun-cpp.sh)
#### 4. `link` / `build-bun`
After the `build-deps`, `build-zig`, and `build-cpp` steps have completed, this step links the Zig object file and C++ object file into a single binary: `bun-<os>-<arch>.zip`.
- on Windows, this runs the script: [`scripts/buildkite-link-bun.ps1`](scripts/buildkite-link-bun.ps1)
- on macOS and Linux, this runs the script: [`scripts/buildkite-link-bun.sh`](scripts/buildkite-link-bun.sh)
To speed up the build, thare are two options:
- `--fast`: This disables the LTO (link-time optimization) step.
- without `--fast`: This runs the LTO step, which is the default. The binaries that are release to Github are always built with LTO.
### Testing
### FAQ
> How do I add a new CI agent?
> How do I add/modify system dependencies?
> How do I SSH into a CI agent?
### Known issues
These are things that we know about, but haven't fixed or optimized yet.
- There is no `scripts/build-bun-zig.ps1` for Windows.
- The `build-deps` step does not cache in CI, so it re-builds each time (though it does use ccache). It attempts to check the `BUN_DEPS_CACHE_DIR` environment variable, but for some reason it doesn't work.
- Windows and Linux machines sometimes take up to 1-2 minutes to start tests. This is because robobun is listening for when the job is scheduled to provision the VM. Instead, it can start provisioning during the link step, or keep a pool of idle VMs around (but it's unclear how more expensive this is).
- There are a limited number of macOS VMs. This is because they are expensive and manually provisioned, mostly through MacStadium. If wait times are too long we can just provision more, or buy some.
- To prevent idle machines, robobun periodically checks for idle machines and terminates them. Before doing this, it checks to see if the machine is connected as an agent to Buildkite. However, sometimes the machine picks up a job in-between this time, and the job is terminated.

1523
.buildkite/ci.yml Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,94 @@
#!/bin/bash
set -eo pipefail
function assert_main() {
if [[ "$BUILDKITE_PULL_REQUEST_REPO" && "$BUILDKITE_REPO" != "$BUILDKITE_PULL_REQUEST_REPO" ]]; then
echo "error: Cannot upload release from a fork"
exit 1
fi
if [ "$BUILDKITE_PULL_REQUEST" != "false" ]; then
echo "error: Cannot upload release from a pull request"
exit 1
fi
if [ "$BUILDKITE_BRANCH" != "main" ]; then
echo "error: Cannot upload release from a branch other than main"
exit 1
fi
}
function assert_buildkite_agent() {
if ! command -v buildkite-agent &> /dev/null; then
echo "error: Cannot find buildkite-agent, please install it:"
echo "https://buildkite.com/docs/agent/v3/install"
exit 1
fi
}
function assert_gh() {
if ! command -v gh &> /dev/null; then
echo "warning: gh is not installed, installing..."
if command -v brew &> /dev/null; then
brew install gh
else
echo "error: Cannot install gh, please install it:"
echo "https://github.com/cli/cli#installation"
exit 1
fi
fi
}
function assert_gh_token() {
local token=$(buildkite-agent secret get GITHUB_TOKEN)
if [ -z "$token" ]; then
echo "error: Cannot find GITHUB_TOKEN secret"
echo ""
echo "hint: Create a secret named GITHUB_TOKEN with a GitHub access token:"
echo "https://buildkite.com/docs/pipelines/buildkite-secrets"
exit 1
fi
export GH_TOKEN="$token"
}
function download_artifact() {
local name=$1
buildkite-agent artifact download "$name" .
if [ ! -f "$name" ]; then
echo "error: Cannot find Buildkite artifact: $name"
exit 1
fi
}
function upload_assets() {
local tag=$1
local files=${@:2}
gh release upload "$tag" $files --clobber --repo "$BUILDKITE_REPO"
}
assert_main
assert_buildkite_agent
assert_gh
assert_gh_token
declare artifacts=(
bun-darwin-aarch64.zip
bun-darwin-aarch64-profile.zip
bun-darwin-x64.zip
bun-darwin-x64-profile.zip
bun-linux-aarch64.zip
bun-linux-aarch64-profile.zip
bun-linux-x64.zip
bun-linux-x64-profile.zip
bun-linux-x64-baseline.zip
bun-linux-x64-baseline-profile.zip
bun-windows-x64.zip
bun-windows-x64-profile.zip
bun-windows-x64-baseline.zip
bun-windows-x64-baseline-profile.zip
)
for artifact in "${artifacts[@]}"; do
download_artifact $artifact
done
upload_assets "canary" "${artifacts[@]}"

View File

@@ -9,7 +9,7 @@ on:
inputs:
runs-on:
type: string
default: macos-12-large
default: macos-13-large
tag:
type: string
required: true
@@ -27,10 +27,12 @@ on:
type: boolean
env:
LLVM_VERSION: 16
LLVM_VERSION: 18
BUN_VERSION: 1.1.8
LC_CTYPE: "en_US.UTF-8"
LC_ALL: "en_US.UTF-8"
# LTO is disabled because we cannot use lld on macOS currently
BUN_ENABLE_LTO: "0"
jobs:
build-submodules:
@@ -53,16 +55,7 @@ jobs:
cat $(echo scripts/build*.sh scripts/all-dependencies.sh | tr " " "\n" | sort)
}
echo "hash=$(print_versions | shasum)" >> $GITHUB_OUTPUT
- if: ${{ !inputs.no-cache }}
name: Restore Cache
id: cache
uses: actions/cache/restore@v4
with:
path: ${{ runner.temp }}/bun-deps
key: bun-${{ inputs.tag }}-deps-${{ steps.hash.outputs.hash }}
# TODO: Figure out how to cache homebrew dependencies
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
name: Install Dependencies
- name: Install Dependencies
env:
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
HOMEBREW_NO_AUTO_UPDATE: 1
@@ -86,24 +79,16 @@ jobs:
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
brew link --overwrite llvm@$LLVM_VERSION
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
name: Clone Submodules
- name: Clone Submodules
run: |
./scripts/update-submodules.sh
- name: Build Submodules
if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
env:
CPU_TARGET: ${{ inputs.cpu }}
BUN_DEPS_OUT_DIR: ${{ runner.temp }}/bun-deps
run: |
mkdir -p $BUN_DEPS_OUT_DIR
./scripts/all-dependencies.sh
- name: Save Cache
if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
uses: actions/cache/save@v4
with:
path: ${{ runner.temp }}/bun-deps
key: ${{ steps.cache.outputs.cache-primary-key }}
- name: Upload bun-${{ inputs.tag }}-deps
uses: actions/upload-artifact@v4
with:
@@ -147,14 +132,6 @@ jobs:
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- if: ${{ !inputs.no-cache }}
name: Restore Cache
uses: actions/cache@v4
with:
path: ${{ runner.temp }}/ccache
key: bun-${{ inputs.tag }}-cpp-${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }}
restore-keys: |
bun-${{ inputs.tag }}-cpp-
- name: Compile
env:
CPU_TARGET: ${{ inputs.cpu }}
@@ -244,18 +221,9 @@ jobs:
with:
name: bun-${{ inputs.tag }}-zig
path: ${{ runner.temp }}/release
- if: ${{ !inputs.no-cache }}
name: Restore Cache
uses: actions/cache@v4
with:
path: ${{ runner.temp }}/ccache
key: bun-${{ inputs.tag }}-cpp-${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }}
restore-keys: |
bun-${{ inputs.tag }}-cpp-
- name: Link
env:
CPU_TARGET: ${{ inputs.cpu }}
CCACHE_DIR: ${{ runner.temp }}/ccache
run: |
SRC_DIR=$PWD
mkdir ${{ runner.temp }}/link-build
@@ -276,6 +244,12 @@ jobs:
chmod +x bun-profile bun
mkdir -p bun-${{ inputs.tag }}-profile/ bun-${{ inputs.tag }}/
mv bun-profile bun-${{ inputs.tag }}-profile/bun-profile
if [ -f bun-profile.dSYM || -d bun-profile.dSYM ]; then
mv bun-profile.dSYM bun-${{ inputs.tag }}-profile/bun-profile.dSYM
fi
if [ -f bun.dSYM || -d bun.dSYM ]; then
mv bun.dSYM bun-${{ inputs.tag }}-profile/bun-profile.dSYM
fi
mv bun bun-${{ inputs.tag }}/bun
zip -r bun-${{ inputs.tag }}-profile.zip bun-${{ inputs.tag }}-profile
zip -r bun-${{ inputs.tag }}.zip bun-${{ inputs.tag }}

View File

@@ -31,19 +31,23 @@ on:
env:
# Must specify exact version of LLVM for Windows
LLVM_VERSION: 16.0.6
LLVM_VERSION: 18.1.8
BUN_VERSION: ${{ inputs.bun-version }}
BUN_GARBAGE_COLLECTOR_LEVEL: 1
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: 1
CI: true
USE_LTO: 1
jobs:
build-submodules:
name: Build Submodules
runs-on: ${{ inputs.runs-on }}
steps:
- name: Install VS2022 BuildTools 17.9.7
run: choco install -y visualstudio2022buildtools --version=117.9.7.0 --params "--add Microsoft.VisualStudio.Component.VC.Tools.x86.x64 --installChannelUri https://aka.ms/vs/17/release/180911598_-255012421/channel"
- name: Install Scoop
run: |
Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
Invoke-RestMethod -Uri https://get.scoop.sh | Invoke-Expression
Join-Path (Resolve-Path ~).Path "scoop\shims" >> $Env:GITHUB_PATH
- name: Setup Git
run: |
git config --global core.autocrlf false
@@ -74,14 +78,11 @@ jobs:
path: bun-deps
key: bun-${{ inputs.tag }}-deps-${{ steps.hash.outputs.hash }}
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
name: Install LLVM
uses: KyleMayes/install-llvm-action@8b37482c5a2997a3ab5dbf6561f8109e2eaa7d3b
with:
version: ${{ env.LLVM_VERSION }}
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
name: Install Ninja
name: Install LLVM and Ninja
run: |
choco install -y ninja
scoop install ninja
scoop install llvm@${{ env.LLVM_VERSION }}
scoop install nasm@2.16.01
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
name: Clone Submodules
run: |
@@ -91,9 +92,9 @@ jobs:
env:
CPU_TARGET: ${{ inputs.cpu }}
CCACHE_DIR: ccache
USE_LTO: 1
run: |
.\scripts\env.ps1 ${{ contains(inputs.tag, '-baseline') && '-Baseline' || '' }}
choco install -y nasm --version=2.16.01
$env:BUN_DEPS_OUT_DIR = (mkdir -Force "./bun-deps")
.\scripts\all-dependencies.ps1
- name: Save Cache
@@ -141,8 +142,11 @@ jobs:
needs: codegen
runs-on: ${{ inputs.runs-on }}
steps:
- name: Install VS2022 BuildTools 17.9.7
run: choco install -y visualstudio2022buildtools --version=117.9.7.0 --params "--add Microsoft.VisualStudio.Component.VC.Tools.x86.x64 --installChannelUri https://aka.ms/vs/17/release/180911598_-255012421/channel"
- name: Install Scoop
run: |
Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
Invoke-RestMethod -Uri https://get.scoop.sh | Invoke-Expression
Join-Path (Resolve-Path ~).Path "scoop\shims" >> $Env:GITHUB_PATH
- name: Setup Git
run: |
git config --global core.autocrlf false
@@ -151,13 +155,10 @@ jobs:
uses: actions/checkout@v4
with:
submodules: recursive
- name: Install LLVM
uses: KyleMayes/install-llvm-action@8b37482c5a2997a3ab5dbf6561f8109e2eaa7d3b
with:
version: ${{ env.LLVM_VERSION }}
- name: Install Ninja
- name: Install LLVM and Ninja
run: |
choco install -y ninja
scoop install ninja
scoop install llvm@${{ env.LLVM_VERSION }}
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
@@ -179,6 +180,7 @@ jobs:
env:
CPU_TARGET: ${{ inputs.cpu }}
CCACHE_DIR: ccache
USE_LTO: 1
run: |
# $CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
$CANARY_REVISION = 0
@@ -188,6 +190,7 @@ jobs:
cd build
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release `
-DNO_CODEGEN=1 `
-DUSE_LTO=1 `
-DNO_CONFIGURE_DEPENDS=1 `
"-DCANARY=${CANARY_REVISION}" `
-DBUN_CPP_ONLY=1 ${{ contains(inputs.tag, '-baseline') && '-DUSE_BASELINE_BUILD=1' || '' }}
@@ -222,8 +225,11 @@ jobs:
- build-zig
- codegen
steps:
- name: Install VS2022 BuildTools 17.9.7
run: choco install -y visualstudio2022buildtools --version=117.9.7.0 --params "--add Microsoft.VisualStudio.Component.VC.Tools.x86.x64 --installChannelUri https://aka.ms/vs/17/release/180911598_-255012421/channel"
- name: Install Scoop
run: |
Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
Invoke-RestMethod -Uri https://get.scoop.sh | Invoke-Expression
Join-Path (Resolve-Path ~).Path "scoop\shims" >> $Env:GITHUB_PATH
- name: Setup Git
run: |
git config --global core.autocrlf false
@@ -232,13 +238,10 @@ jobs:
uses: actions/checkout@v4
with:
submodules: recursive
- name: Install LLVM
uses: KyleMayes/install-llvm-action@8b37482c5a2997a3ab5dbf6561f8109e2eaa7d3b
with:
version: ${{ env.LLVM_VERSION }}
- name: Install Ninja
run: |
choco install -y ninja
scoop install ninja
scoop install llvm@${{ env.LLVM_VERSION }}
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
@@ -286,6 +289,7 @@ jobs:
-DNO_CONFIGURE_DEPENDS=1 `
"-DCANARY=${CANARY_REVISION}" `
-DBUN_LINK_ONLY=1 `
-DUSE_LTO=1 `
"-DBUN_DEPS_OUT_DIR=$(Resolve-Path ../bun-deps)" `
"-DBUN_CPP_ARCHIVE=$(Resolve-Path ../bun-cpp/bun-cpp-objects.a)" `
"-DBUN_ZIG_OBJ_DIR=$(Resolve-Path ../bun-zig)" `

View File

@@ -84,7 +84,7 @@ jobs:
uses: ./.github/workflows/build-darwin.yml
secrets: inherit
with:
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-large' || 'macos-13' }}
tag: darwin-x64
arch: x64
cpu: haswell
@@ -95,7 +95,7 @@ jobs:
uses: ./.github/workflows/build-darwin.yml
secrets: inherit
with:
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-large' || 'macos-13' }}
tag: darwin-x64-baseline
arch: x64
cpu: nehalem
@@ -106,7 +106,7 @@ jobs:
uses: ./.github/workflows/build-darwin.yml
secrets: inherit
with:
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-12' }}
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-13' }}
tag: darwin-aarch64
arch: aarch64
cpu: native
@@ -175,7 +175,7 @@ jobs:
with:
run-id: ${{ inputs.run-id }}
pr-number: ${{ github.event.number }}
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-large' || 'macos-13' }}
tag: darwin-x64
darwin-x64-baseline-test:
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
@@ -186,7 +186,7 @@ jobs:
with:
run-id: ${{ inputs.run-id }}
pr-number: ${{ github.event.number }}
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-large' || 'macos-13' }}
tag: darwin-x64-baseline
darwin-aarch64-test:
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
@@ -197,7 +197,7 @@ jobs:
with:
run-id: ${{ inputs.run-id }}
pr-number: ${{ github.event.number }}
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-12' }}
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-13' }}
tag: darwin-aarch64
windows-x64-test:
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}

View File

@@ -90,7 +90,7 @@ jobs:
uses: ./.github/workflows/build-darwin.yml
secrets: inherit
with:
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-large' || 'macos-13' }}
tag: darwin-x64
arch: x64
cpu: haswell
@@ -100,7 +100,7 @@ jobs:
uses: ./.github/workflows/build-darwin.yml
secrets: inherit
with:
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-large' || 'macos-13' }}
tag: darwin-x64-baseline
arch: x64
cpu: nehalem
@@ -110,7 +110,7 @@ jobs:
uses: ./.github/workflows/build-darwin.yml
secrets: inherit
with:
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-12' }}
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-13' }}
tag: darwin-aarch64
arch: aarch64
cpu: native

View File

@@ -14,10 +14,11 @@ on:
type: string
description: The workflow ID to download artifacts (skips the build step)
pull_request:
paths-ignore:
- .vscode/**/*
- docs/**/*
- examples/**/*
paths:
- ".github/workflows/lint-cpp.yml"
- "**/*.cpp"
- "src/deps/**/*"
- "CMakeLists.txt"
jobs:
lint-cpp:

View File

@@ -0,0 +1,89 @@
name: Comment on updated submodule
on:
pull_request_target:
paths:
- "src/generated_versions_list.zig"
- ".github/workflows/on-submodule-update.yml"
jobs:
comment:
name: Comment
runs-on: ubuntu-latest
if: ${{ github.repository_owner == 'oven-sh' }}
permissions:
contents: read
pull-requests: write
issues: write
steps:
- name: Checkout current
uses: actions/checkout@v4
with:
sparse-checkout: |
src
- name: Hash generated versions list
id: hash
run: |
echo "hash=$(sha256sum src/generated_versions_list.zig | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
- name: Checkout base
uses: actions/checkout@v4
with:
ref: ${{ github.base_ref }}
sparse-checkout: |
src
- name: Hash base
id: base
run: |
echo "base=$(sha256sum src/generated_versions_list.zig | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
- name: Compare
id: compare
run: |
if [ "${{ steps.hash.outputs.hash }}" != "${{ steps.base.outputs.base }}" ]; then
echo "changed=true" >> $GITHUB_OUTPUT
else
echo "changed=false" >> $GITHUB_OUTPUT
fi
- name: Find Comment
id: comment
uses: peter-evans/find-comment@v3
with:
issue-number: ${{ github.event.pull_request.number }}
comment-author: github-actions[bot]
body-includes: <!-- generated-comment submodule-updated -->
- name: Write Warning Comment
uses: peter-evans/create-or-update-comment@v4
if: steps.compare.outputs.changed == 'true'
with:
comment-id: ${{ steps.comment.outputs.comment-id }}
issue-number: ${{ github.event.pull_request.number }}
edit-mode: replace
body: |
⚠️ **Warning:** @${{ github.actor }}, this PR has changes to submodule versions.
If this change was intentional, please ignore this message. If not, please undo changes to submodules and rebase your branch.
<!-- generated-comment submodule-updated -->
- name: Add labels
uses: actions-cool/issues-helper@v3
if: steps.compare.outputs.changed == 'true'
with:
actions: "add-labels"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.pull_request.number }}
labels: "changed-submodules"
- name: Remove labels
uses: actions-cool/issues-helper@v3
if: steps.compare.outputs.changed == 'false'
with:
actions: "remove-labels"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.pull_request.number }}
labels: "changed-submodules"
- name: Delete outdated comment
uses: actions-cool/issues-helper@v3
if: steps.compare.outputs.changed == 'false' && steps.comment.outputs.comment-id != ''
with:
actions: "delete-comment"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.pull_request.number }}
comment-id: ${{ steps.comment.outputs.comment-id }}

View File

@@ -63,7 +63,7 @@ jobs:
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: "1.0.21"
bun-version: "1.1.20"
- name: Install Dependencies
run: bun install
- name: Sign Release
@@ -88,7 +88,7 @@ jobs:
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: "1.0.21"
bun-version: "1.1.20"
- name: Install Dependencies
run: bun install
- name: Release
@@ -117,7 +117,7 @@ jobs:
if: ${{ env.BUN_VERSION != 'canary' }}
uses: ./.github/actions/setup-bun
with:
bun-version: "1.0.21"
bun-version: "1.1.20"
- name: Setup Bun
if: ${{ env.BUN_VERSION == 'canary' }}
uses: ./.github/actions/setup-bun
@@ -259,7 +259,7 @@ jobs:
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: "1.0.21"
bun-version: "1.1.20"
- name: Install Dependencies
run: bun install
- name: Release
@@ -270,6 +270,24 @@ jobs:
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
AWS_ENDPOINT: ${{ secrets.AWS_ENDPOINT }}
AWS_BUCKET: bun
notify-sentry:
name: Notify Sentry
runs-on: ubuntu-latest
needs: s3
steps:
- name: Notify Sentry
uses: getsentry/action-release@v1.7.0
env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
with:
ignore_missing: true
ignore_empty: true
version: ${{ env.BUN_VERSION }}
environment: production
bump:
name: "Bump version"
runs-on: ubuntu-latest

View File

@@ -29,7 +29,7 @@ jobs:
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: "1.1.8"
bun-version: "1.1.20"
- name: Setup Zig
uses: goto-bus-stop/setup-zig@c7b6cdd3adba8f8b96984640ff172c37c93f73ee
with:

View File

@@ -17,7 +17,7 @@ on:
jobs:
lint-cpp:
name: Lint C++
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-xlarge' || 'macos-12' }}
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-xlarge' || 'macos-13' }}
steps:
- name: Checkout
uses: actions/checkout@v4

View File

@@ -80,3 +80,15 @@ jobs:
bun upgrade --canary
# bun upgrade --stable <- to downgrade
```
# If notifying sentry fails, don't fail the rest of the build.
- name: Notify Sentry
uses: getsentry/action-release@v1.7.0
env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
with:
ignore_missing: true
ignore_empty: true
version: ${{ github.event.workflow_run.head_sha || github.sha }}-canary
environment: canary

1
.gitignore vendored
View File

@@ -145,3 +145,4 @@ zig-cache
zig-out
test/node.js/upstream
.zig-cache
scripts/env.local

8
.gitmodules vendored
View File

@@ -76,6 +76,14 @@ ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = false
[submodule "src/deps/libuv"]
path = src/deps/libuv
url = https://github.com/libuv/libuv.git
ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = false
branch = v1.48.0
[submodule "zig"]
path = src/deps/zig
url = https://github.com/oven-sh/zig

11
.vscode/launch.json generated vendored
View File

@@ -145,13 +145,14 @@
"request": "launch",
"name": "bun run [file]",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["run", "${fileBasename}"],
"args": ["run", "${file}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "0",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_EventLoop": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
"BUN_DEBUG_ALL": "1",
},
"console": "internalConsole",
},
@@ -445,8 +446,8 @@
"request": "launch",
"name": "bun test [*] (ci)",
"program": "node",
"args": ["src/runner.node.mjs"],
"cwd": "${workspaceFolder}/packages/bun-internal-test",
"args": ["test/runner.node.mjs"],
"cwd": "${workspaceFolder}",
"console": "internalConsole",
},
// Windows: bun test [file]
@@ -1093,8 +1094,8 @@
"request": "launch",
"name": "Windows: bun test [*] (ci)",
"program": "node",
"args": ["src/runner.node.mjs"],
"cwd": "${workspaceFolder}/packages/bun-internal-test",
"args": ["test/runner.node.mjs"],
"cwd": "${workspaceFolder}",
"console": "internalConsole",
},
],

View File

@@ -27,7 +27,7 @@
// Zig
"zig.initialSetupDone": true,
"zig.buildOption": "build",
"zig.zls.zigLibPath": "src/deps/zig/lib",
"zig.zls.zigLibPath": "${workspaceFolder}/src/deps/zig/lib",
"zig.buildArgs": ["-Dgenerated-code=./build/codegen"],
"zig.zls.buildOnSaveStep": "check",
// "zig.zls.enableBuildOnSave": true,

View File

@@ -2,8 +2,9 @@ cmake_minimum_required(VERSION 3.22)
cmake_policy(SET CMP0091 NEW)
cmake_policy(SET CMP0067 NEW)
set(Bun_VERSION "1.1.17")
set(WEBKIT_TAG 5bbfe7e880090b9d0b5feaf3563e85957dd7b10d)
set(CMAKE_POLICY_DEFAULT_CMP0069 NEW)
set(Bun_VERSION "1.1.21")
set(WEBKIT_TAG 49907bff8781719bc2ded068b0c934f6d0074d1e)
set(BUN_WORKDIR "${CMAKE_CURRENT_BINARY_DIR}")
message(STATUS "Configuring Bun ${Bun_VERSION} in ${BUN_WORKDIR}")
@@ -14,7 +15,6 @@ set(CMAKE_C_STANDARD 17)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(CMAKE_C_STANDARD_REQUIRED ON)
# Should not start with v
# Used in process.version, process.versions.node, napi, and elsewhere
set(REPORTED_NODEJS_VERSION "22.3.0")
@@ -22,6 +22,7 @@ set(REPORTED_NODEJS_VERSION "22.3.0")
# If we do not set this, it will crash at startup on the first memory allocation.
if(NOT WIN32 AND NOT APPLE)
set(CMAKE_CXX_EXTENSIONS ON)
set(CMAKE_POSITION_INDEPENDENT_CODE FALSE)
endif()
# --- Build Type ---
@@ -38,6 +39,13 @@ else()
message(STATUS "The CMake build type is: ${CMAKE_BUILD_TYPE}")
endif()
if(WIN32 AND NOT CMAKE_CL_SHOWINCLUDES_PREFIX)
# workaround until cmake fix is shipped https://github.com/ninja-build/ninja/issues/2280
# './build/.ninja_deps' may need to be deleted, the bug is "Note: including file: ..." is saved
# as part of some file paths
set(CMAKE_CL_SHOWINCLUDES_PREFIX "Note: including file:")
endif()
if(CMAKE_BUILD_TYPE STREQUAL "Debug")
set(DEBUG ON)
set(DEFAULT_ZIG_OPTIMIZE "Debug")
@@ -50,11 +58,8 @@ elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
set(DEFAULT_ZIG_OPTIMIZE "ReleaseFast")
if(WIN32)
# lld-link will strip it for you, so we can build directly to bun.exe
# Debug symbols are in a separate file: bun.pdb
set(bun "bun")
# TODO(@paperdave): Remove this
# it is enabled for the time being to make sure to catch more bugs in the experimental windows builds
set(DEFAULT_ZIG_OPTIMIZE "ReleaseSafe")
else()
if(ZIG_OPTIMIZE STREQUAL "Debug")
@@ -67,7 +72,7 @@ endif()
# --- MacOS SDK ---
if(APPLE AND DEFINED ENV{CI})
set(CMAKE_OSX_DEPLOYMENT_TARGET "12.0")
set(CMAKE_OSX_DEPLOYMENT_TARGET "13.0")
endif()
if(APPLE AND NOT CMAKE_OSX_DEPLOYMENT_TARGET)
@@ -111,7 +116,11 @@ endif()
# we do some extra work afterwards to double-check, and we will rerun BUN_FIND_LLVM if the compiler did not match.
#
# If the user passes -DLLVM_PREFIX, most of this logic is skipped, but we still warn if invalid.
set(LLVM_VERSION 16)
if(WIN32 OR APPLE)
set(LLVM_VERSION 18)
else()
set(LLVM_VERSION 16)
endif()
macro(BUN_FIND_LLVM)
find_program(
@@ -145,11 +154,12 @@ macro(BUN_FIND_LLVM)
PATHS ENV PATH ${PLATFORM_LLVM_SEARCH_PATHS}
DOC "Path to LLVM ${LLVM_VERSION}'s llvm-strip binary"
)
find_program(
STRIP
NAMES strip
PATHS ENV PATH ${PLATFORM_LLVM_SEARCH_PATHS}
DOC "Path to LLVM ${LLVM_VERSION}'s llvm-strip binary"
DOC "Path to strip binary"
)
find_program(
DSYMUTIL
@@ -321,6 +331,16 @@ option(USE_STATIC_LIBATOMIC "Statically link libatomic, requires the presence of
option(USE_LTO "Enable Link-Time Optimization" ${DEFAULT_LTO})
if(APPLE AND USE_LTO)
set(USE_LTO OFF)
message(WARNING "Link-Time Optimization is not supported on macOS because it requires -fuse-ld=lld and lld causes many segfaults on macOS (likely related to stack size)")
endif()
if(WIN32 AND USE_LTO)
set(CMAKE_LINKER_TYPE LLD)
set(CMAKE_INTERPROCEDURAL_OPTIMIZATION OFF)
endif()
option(BUN_TIDY_ONLY "Only run clang-tidy" OFF)
option(BUN_TIDY_ONLY_EXTRA " Only run clang-tidy, with extra checks for local development" OFF)
@@ -614,7 +634,7 @@ set(BUN_DEPS_DIR "${BUN_SRC}/deps")
set(BUN_CODEGEN_SRC "${BUN_SRC}/codegen")
if(NOT BUN_DEPS_OUT_DIR)
set(BUN_DEPS_OUT_DIR "${BUN_DEPS_DIR}")
set(BUN_DEPS_OUT_DIR "${CMAKE_CURRENT_SOURCE_DIR}/build/bun-deps")
endif()
set(BUN_RAW_SOURCES, "")
@@ -632,16 +652,6 @@ file(GLOB BUN_CPP ${CONFIGURE_DEPENDS}
)
list(APPEND BUN_RAW_SOURCES ${BUN_CPP})
# -- Brotli --
set(BROTLI_SRC "${CMAKE_CURRENT_SOURCE_DIR}/src/deps/brotli")
file(GLOB BROTLI_FILES ${CONFIGURE_DEPENDS}
"${BROTLI_SRC}/common/*.c"
"${BROTLI_SRC}/enc/*.c"
"${BROTLI_SRC}/dec/*.c"
)
list(APPEND BUN_RAW_SOURCES ${BROTLI_FILES})
include_directories("${BUN_DEPS_DIR}/brotli/include")
# -- uSockets --
set(USOCKETS_SRC "${CMAKE_CURRENT_SOURCE_DIR}/packages/bun-usockets/src")
file(GLOB USOCKETS_FILES ${CONFIGURE_DEPENDS}
@@ -772,6 +782,8 @@ if(NOT NO_CODEGEN)
"${BUN_SRC}/js/thirdparty/*.ts"
"${BUN_SRC}/js/internal/*.js"
"${BUN_SRC}/js/internal/*.ts"
"${BUN_SRC}/js/internal/util/*.js"
"${BUN_SRC}/js/internal/fs/*.ts"
"${BUN_SRC}/js/node/*.js"
"${BUN_SRC}/js/node/*.ts"
"${BUN_SRC}/js/thirdparty/*.js"
@@ -855,13 +867,24 @@ file(GLOB ZIG_FILES
"${BUN_SRC}/*/*/*/*/*.zig"
)
if(NOT BUN_ZIG_OBJ_FORMAT)
# To use LLVM bitcode from Zig, more work needs to be done. Currently, an install of
# LLVM 18.1.7 does not compatible with what bitcode Zig 0.13 outputs (has LLVM 18.1.7)
# Change to "bc" to experiment, "Invalid record" means it is not valid output.
set(BUN_ZIG_OBJ_FORMAT "obj")
endif()
if(NOT BUN_ZIG_OBJ_DIR)
set(BUN_ZIG_OBJ_DIR "${BUN_WORKDIR}/CMakeFiles")
endif()
get_filename_component(BUN_ZIG_OBJ_DIR "${BUN_ZIG_OBJ_DIR}" REALPATH BASE_DIR "${CMAKE_BINARY_DIR}")
set(BUN_ZIG_OBJ "${BUN_ZIG_OBJ_DIR}/bun-zig.o")
if(WIN32)
set(BUN_ZIG_OBJ "${BUN_ZIG_OBJ_DIR}/bun-zig.o")
else()
set(BUN_ZIG_OBJ "${BUN_ZIG_OBJ_DIR}/bun-zig.o")
endif()
set(USES_TERMINAL_NOT_IN_CI "")
@@ -876,6 +899,7 @@ if(NOT BUN_LINK_ONLY AND NOT BUN_CPP_ONLY)
"${ZIG_COMPILER}" "build" "obj"
"--zig-lib-dir" "${ZIG_LIB_DIR}"
"--prefix" "${BUN_ZIG_OBJ_DIR}"
"--verbose"
"-Dgenerated-code=${BUN_WORKDIR}/codegen"
"-freference-trace=10"
"-Dversion=${Bun_VERSION}"
@@ -885,6 +909,7 @@ if(NOT BUN_LINK_ONLY AND NOT BUN_CPP_ONLY)
"-Dtarget=${ZIG_TARGET}"
"-Denable_logs=${ENABLE_LOGS}"
"-Dreported_nodejs_version=${REPORTED_NODEJS_VERSION}"
"-Dobj_format=${BUN_ZIG_OBJ_FORMAT}"
DEPENDS
"${CMAKE_CURRENT_SOURCE_DIR}/build.zig"
"${ZIG_FILES}"
@@ -948,12 +973,15 @@ set_target_properties(${bun} PROPERTIES
VISIBILITY_INLINES_HIDDEN YES
)
if(APPLE)
add_compile_definitions("__DARWIN_NON_CANCELABLE=1")
endif()
add_compile_definitions(
# TODO: are all of these variables strictly necessary?
"_HAS_EXCEPTIONS=0"
"LIBUS_USE_OPENSSL=1"
"UWS_HTTPRESPONSE_NO_WRITEMARK=1"
"LIBUS_USE_BORINGSSL=1"
"WITH_BORINGSSL=1"
"STATICALLY_LINKED_WITH_JavaScriptCore=1"
@@ -1058,7 +1086,7 @@ elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
if(NOT WIN32)
if(USE_LTO)
list(APPEND LTO_FLAG "-flto=full" "-emit-llvm")
list(APPEND LTO_FLAG "-flto=full" "-emit-llvm" "-fwhole-program-vtables" "-fforce-emit-vtables")
endif()
# Leave -Werror=unused off in release builds so we avoid errors from being used in ASSERT
@@ -1077,13 +1105,38 @@ elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
set(LTO_LINK_FLAG "")
if(USE_LTO)
# -emit-llvm seems to not be supported or under a different name on Windows.
target_compile_options(${bun} PUBLIC -Xclang -emit-llvm-bc)
list(APPEND LTO_FLAG "-flto=full")
list(APPEND LTO_LINK_FLAG "-flto=full")
list(APPEND LTO_LINK_FLAG "/LTCG")
list(APPEND LTO_LINK_FLAG "/OPT:REF")
list(APPEND LTO_LINK_FLAG "/OPT:NOICF")
endif()
target_compile_options(${bun} PUBLIC /O2 ${LTO_FLAG})
target_link_options(${bun} PUBLIC ${LTO_LINK_FLAG} /DEBUG:FULL)
target_compile_options(${bun} PUBLIC
/O2
${LTO_FLAG}
/Gy
/Gw
/GF
/GA
)
target_link_options(${bun} PUBLIC
${LTO_LINK_FLAG}
/DEBUG:FULL
/delayload:ole32.dll
/delayload:WINMM.dll
/delayload:dbghelp.dll
/delayload:VCRUNTIME140_1.dll
# libuv loads these two immediately, but for some reason it seems to still be slightly faster to delayload them
/delayload:WS2_32.dll
/delayload:WSOCK32.dll
/delayload:ADVAPI32.dll
/delayload:IPHLPAPI.dll
)
endif()
endif()
@@ -1101,6 +1154,11 @@ else()
# On arm macOS, we can set it to a minimum of the M1 cpu set. this might be the default already.
target_compile_options(${bun} PUBLIC "-mcpu=apple-m1")
endif()
if(NOT WIN32 AND NOT APPLE AND ARCH STREQUAL "aarch64")
# on arm64 linux, we set a minimum of armv8
target_compile_options(${bun} PUBLIC -march=armv8-a+crc -mtune=ampere1)
endif()
endif()
target_compile_options(${bun} PUBLIC -ferror-limit=${ERROR_LIMIT})
@@ -1114,23 +1172,29 @@ if(WIN32)
"BORINGSSL_NO_CXX=1" # lol
)
# set_property(TARGET ${bun} PROPERTY MSVC_RUNTIME_LIBRARY "MultiThreaded")
set_property(TARGET ${bun} PROPERTY MSVC_RUNTIME_LIBRARY "MultiThreadedDLL")
# set_property(TARGET ${bun} PROPERTY MSVC_RUNTIME_LIBRARY "MultiThreaded$<$<CONFIG:Debug>:Debug>")
set_property(TARGET ${bun} PROPERTY MSVC_RUNTIME_LIBRARY "MultiThreaded")
target_compile_options(${bun} PUBLIC "/EHsc" "/GR-")
target_link_options(${bun} PUBLIC "/STACK:0x1200000,0x100000" "/DEF:${BUN_SRC}/symbols.def")
target_compile_options(${bun} PUBLIC "/EHsc" "/GR-" -Xclang -fno-c++-static-destructors)
target_link_options(${bun} PUBLIC "/STACK:0x1200000,0x100000" "/DEF:${BUN_SRC}/symbols.def" "/errorlimit:0")
else()
target_compile_options(${bun} PUBLIC
-fPIC
-mtune=${CPU_TARGET}
-fconstexpr-steps=2542484
-fconstexpr-depth=54
-fno-exceptions
-fno-asynchronous-unwind-tables
-fno-unwind-tables
-fno-c++-static-destructors
-fvisibility=hidden
-fvisibility-inlines-hidden
-fno-rtti
-fno-omit-frame-pointer
-mno-omit-leaf-frame-pointer
-fno-pic
-fno-pie
-faddrsig
)
endif()
@@ -1140,17 +1204,18 @@ if(APPLE)
target_link_options(${bun} PUBLIC "-Wl,-stack_size,0x1200000")
target_link_options(${bun} PUBLIC "-exported_symbols_list" "${BUN_SRC}/symbols.txt")
set_target_properties(${bun} PROPERTIES LINK_DEPENDS "${BUN_SRC}/symbols.txt")
target_link_options(${bun} PUBLIC "-fno-keep-static-consts")
target_link_libraries(${bun} PRIVATE "resolv")
endif()
if(UNIX AND NOT APPLE)
target_link_options(${bun} PUBLIC
"-fuse-ld=lld"
"-static-libstdc++"
"-static-libgcc"
"-Wl,-z,now"
-fuse-ld=lld
-fno-pic
-static-libstdc++
-static-libgcc
"-Wl,-no-pie"
"-Wl,-icf=safe"
"-Wl,--as-needed"
"-Wl,--gc-sections"
"-Wl,-z,stack-size=12800000"
@@ -1179,6 +1244,8 @@ if(UNIX AND NOT APPLE)
"-rdynamic"
"-Wl,--dynamic-list=${BUN_SRC}/symbols.dyn"
"-Wl,--version-script=${BUN_SRC}/linker.lds"
-Wl,-z,lazy
-Wl,-z,norelro
)
target_link_libraries(${bun} PRIVATE "c")
@@ -1212,12 +1279,16 @@ endif()
# --- Stripped Binary "bun"
if(CMAKE_BUILD_TYPE STREQUAL "Release" AND NOT WIN32 AND NOT ASSERT_ENABLED)
# add_custom_command(
# TARGET ${bun}
# POST_BUILD
# COMMAND ${DSYMUTIL} -o ${BUN_WORKDIR}/bun.dSYM ${BUN_WORKDIR}/${bun}
# COMMENT "Stripping Symbols"
# )
# if(CI AND APPLE)
if(APPLE)
add_custom_command(
TARGET ${bun}
POST_BUILD
COMMAND ${DSYMUTIL} -o ${BUN_WORKDIR}/${bun}.dSYM ${BUN_WORKDIR}/${bun}
COMMENT "Generating .dSYM"
)
endif()
add_custom_command(
TARGET ${bun}
POST_BUILD
@@ -1385,6 +1456,11 @@ if(USE_STATIC_SQLITE)
"SQLITE_ENABLE_JSON1=1"
"SQLITE_ENABLE_MATH_FUNCTIONS=1"
)
if(WIN32)
target_compile_options(sqlite3 PRIVATE /MT /U_DLL)
endif()
target_link_libraries(${bun} PRIVATE sqlite3)
message(STATUS "Using static sqlite3")
target_compile_definitions(${bun} PRIVATE "LAZY_LOAD_SQLITE=0")
@@ -1393,6 +1469,24 @@ else()
target_compile_definitions(${bun} PRIVATE "LAZY_LOAD_SQLITE=1")
endif()
# -- Brotli --
set(BROTLI_SRC "${CMAKE_CURRENT_SOURCE_DIR}/src/deps/brotli")
file(GLOB BROTLI_FILES ${CONFIGURE_DEPENDS}
"${BROTLI_SRC}/common/*.c"
"${BROTLI_SRC}/enc/*.c"
"${BROTLI_SRC}/dec/*.c"
)
add_library(brotli STATIC ${BROTLI_FILES})
target_include_directories(brotli PRIVATE "${BROTLI_SRC}/include")
target_compile_definitions(brotli PRIVATE "BROTLI_STATIC")
if(WIN32)
target_compile_options(brotli PRIVATE /MT /U_DLL)
endif()
target_link_libraries(${bun} PRIVATE brotli)
include_directories("${BUN_DEPS_DIR}/brotli/include")
if(USE_CUSTOM_LSHPACK)
include_directories(${BUN_DEPS_DIR}/ls-hpack)
@@ -1412,7 +1506,6 @@ if(NOT WIN32)
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libJavaScriptCore.a")
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libbmalloc.a")
else()
target_link_options(${bun} PRIVATE "-static")
target_link_libraries(${bun} PRIVATE
"${WEBKIT_LIB_DIR}/WTF.lib"
"${WEBKIT_LIB_DIR}/JavaScriptCore.lib"
@@ -1422,10 +1515,10 @@ else()
winmm
bcrypt
ntdll
ucrt
userenv
dbghelp
wsock32 # ws2_32 required by TransmitFile aka sendfile on windows
delayimp.lib
)
endif()

View File

@@ -2,6 +2,11 @@ Configuring a development environment for Bun can take 10-30 minutes depending o
If you are using Windows, please refer to [this guide](/docs/project/building-windows)
{% details summary="For Ubuntu users" %}
TL;DR: Ubuntu 22.04 is suggested.
Bun currently requires `glibc >=2.32` in development which means if you're on Ubuntu 20.04 (glibc == 2.31), you may likely meet `error: undefined symbol: __libc_single_threaded `. You need to take extra configurations. Also, according to this [issue](https://github.com/llvm/llvm-project/issues/97314), LLVM 16 is no longer maintained on Ubuntu 24.04 (noble). And instead, you might want `brew` to install LLVM 16 for your Ubuntu 24.04.
{% /details %}
## Install Dependencies
Using your system's package manager, install Bun's dependencies:
@@ -107,7 +112,7 @@ $ export PATH="$PATH:/usr/lib/llvm16/bin"
{% /codetabs %}
> ⚠️ Ubuntu distributions may require installation of the C++ standard library independently. See the [troubleshooting section](#span-file-not-found-on-ubuntu) for more information.
> ⚠️ Ubuntu distributions (<= 20.04) may require installation of the C++ standard library independently. See the [troubleshooting section](#span-file-not-found-on-ubuntu) for more information.
## Building Bun
@@ -311,3 +316,12 @@ $ bun setup -DUSE_STATIC_LIBATOMIC=OFF
```
The built version of Bun may not work on other systems if compiled this way.
## ccache conflicts with building TinyCC on macOS
If you run into issues with `ccache` when building TinyCC, try reinstalling ccache
```bash
brew uninstall ccache
brew install ccache
```

View File

@@ -52,11 +52,8 @@ ENV CI 1
ENV CPU_TARGET=${CPU_TARGET}
ENV BUILDARCH=${BUILDARCH}
ENV BUN_DEPS_OUT_DIR=${BUN_DEPS_OUT_DIR}
ENV BUN_ENABLE_LTO 1
ENV CXX=clang++-${LLVM_VERSION}
ENV CC=clang-${LLVM_VERSION}
ENV AR=/usr/bin/llvm-ar-${LLVM_VERSION}
ENV LD=lld-${LLVM_VERSION}
ENV LC_CTYPE=en_US.UTF-8
ENV LC_ALL=en_US.UTF-8
@@ -93,6 +90,8 @@ RUN install_packages \
clangd-${LLVM_VERSION} \
libc++-${LLVM_VERSION}-dev \
libc++abi-${LLVM_VERSION}-dev \
llvm-${LLVM_VERSION}-runtime \
llvm-${LLVM_VERSION}-dev \
make \
cmake \
ninja-build \
@@ -119,6 +118,15 @@ RUN install_packages \
&& ln -sf /usr/bin/lldb-${LLVM_VERSION} /usr/bin/lldb \
&& ln -sf /usr/bin/clangd-${LLVM_VERSION} /usr/bin/clangd \
&& ln -sf /usr/bin/llvm-ar-${LLVM_VERSION} /usr/bin/llvm-ar \
&& ln -sf /usr/bin/ld.lld /usr/bin/ld \
&& ln -sf /usr/bin/llvm-ranlib-${LLVM_VERSION} /usr/bin/ranlib \
&& ln -sf /usr/bin/clang /usr/bin/cc \
&& ln -sf /usr/bin/clang /usr/bin/c89 \
&& ln -sf /usr/bin/clang /usr/bin/c99 \
&& ln -sf /usr/bin/clang++ /usr/bin/c++ \
&& ln -sf /usr/bin/clang++ /usr/bin/g++ \
&& ln -sf /usr/bin/llvm-ar /usr/bin/ar \
&& ln -sf /usr/bin/clang /usr/bin/gcc \
&& arch="$(dpkg --print-architecture)" \
&& case "${arch##*-}" in \
amd64) variant="x64";; \
@@ -131,6 +139,7 @@ RUN install_packages \
&& ln -s /usr/bin/bun /usr/bin/bunx \
&& rm -rf bun-linux-${variant} bun-linux-${variant}.zip \
&& mkdir -p ${BUN_DIR} ${BUN_DEPS_OUT_DIR}
# && if [ -n "${SCCACHE_BUCKET}" ]; then \
# echo "Setting up sccache" \
# && wget https://github.com/mozilla/sccache/releases/download/v0.5.4/sccache-v0.5.4-${BUILD_MACHINE_ARCH}-unknown-linux-musl.tar.gz \
@@ -167,13 +176,14 @@ ENV CCACHE_DIR=${CCACHE_DIR}
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/c-ares ${BUN_DIR}/src/deps/c-ares
COPY scripts ${BUN_DIR}/scripts
WORKDIR $BUN_DIR
RUN --mount=type=cache,target=${CCACHE_DIR} \
cd $BUN_DIR \
&& make c-ares \
&& rm -rf ${BUN_DIR}/src/deps/c-ares ${BUN_DIR}/Makefile
&& bash ./scripts/build-cares.sh \
&& rm -rf ${BUN_DIR}/src/deps/c-ares ${BUN_DIR}/Makefile ${BUN_DIR}/scripts
FROM bun-base as lolhtml
@@ -204,13 +214,14 @@ ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/mimalloc ${BUN_DIR}/src/deps/mimalloc
COPY scripts ${BUN_DIR}/scripts
ARG CCACHE_DIR=/ccache
ENV CCACHE_DIR=${CCACHE_DIR}
RUN --mount=type=cache,target=${CCACHE_DIR} \
cd ${BUN_DIR} \
&& make mimalloc \
&& bash ./scripts/build-mimalloc.sh \
&& rm -rf src/deps/mimalloc Makefile
FROM bun-base as mimalloc-debug
@@ -240,14 +251,17 @@ ARG CCACHE_DIR=/ccache
ENV CCACHE_DIR=${CCACHE_DIR}
COPY Makefile ${BUN_DIR}/Makefile
COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt
COPY scripts ${BUN_DIR}/scripts
COPY src/deps/zlib ${BUN_DIR}/src/deps/zlib
COPY package.json bun.lockb Makefile .gitmodules ${BUN_DIR}/
WORKDIR $BUN_DIR
RUN --mount=type=cache,target=${CCACHE_DIR} \
cd $BUN_DIR \
&& make zlib \
&& rm -rf src/deps/zlib Makefile
&& bash ./scripts/build-zlib.sh && rm -rf src/deps/zlib scripts
FROM bun-base as libarchive
@@ -286,6 +300,7 @@ ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY scripts ${BUN_DIR}/scripts
COPY src/deps/boringssl ${BUN_DIR}/src/deps/boringssl
WORKDIR $BUN_DIR
@@ -295,7 +310,7 @@ ENV CCACHE_DIR=${CCACHE_DIR}
RUN --mount=type=cache,target=${CCACHE_DIR} \
cd ${BUN_DIR} \
&& make boringssl \
&& bash ./scripts/build-boringssl.sh \
&& rm -rf src/deps/boringssl Makefile
@@ -311,12 +326,14 @@ ENV CCACHE_DIR=${CCACHE_DIR}
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/zstd ${BUN_DIR}/src/deps/zstd
COPY scripts ${BUN_DIR}/scripts
WORKDIR $BUN_DIR
RUN --mount=type=cache,target=${CCACHE_DIR} \
cd $BUN_DIR \
&& make zstd
&& bash ./scripts/build-zstd.sh \
&& rm -rf src/deps/zstd scripts
FROM bun-base as ls-hpack
@@ -330,12 +347,14 @@ ENV CCACHE_DIR=${CCACHE_DIR}
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/ls-hpack ${BUN_DIR}/src/deps/ls-hpack
COPY scripts ${BUN_DIR}/scripts
WORKDIR $BUN_DIR
RUN --mount=type=cache,target=${CCACHE_DIR} \
cd $BUN_DIR \
&& make lshpack
&& bash ./scripts/build-lshpack.sh \
&& rm -rf src/deps/ls-hpack scripts
FROM bun-base-with-zig as bun-identifier-cache
@@ -491,6 +510,7 @@ RUN mkdir -p build bun-webkit
# lol
COPY src/bun.js/bindings/sqlite/sqlite3.c ${BUN_DIR}/src/bun.js/bindings/sqlite/sqlite3.c
COPY src/deps/brotli ${BUN_DIR}/src/deps/brotli
COPY src/symbols.dyn src/linker.lds ${BUN_DIR}/src/
@@ -505,7 +525,8 @@ COPY --from=tinycc ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
COPY --from=c-ares ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
COPY --from=ls-hpack ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
COPY --from=bun-compile-zig-obj /tmp/bun-zig.o ${BUN_DIR}/build/bun-zig.o
COPY --from=bun-cpp-objects ${BUN_DIR}/build/bun-cpp-objects.a ${BUN_DIR}/build/bun-cpp-objects.a
COPY --from=bun-cpp-objects ${BUN_DIR}/build/*.a ${BUN_DIR}/build/
COPY --from=bun-cpp-objects ${BUN_DIR}/build/*.o ${BUN_DIR}/build/
COPY --from=bun-cpp-objects ${BUN_DIR}/bun-webkit/lib ${BUN_DIR}/bun-webkit/lib
WORKDIR $BUN_DIR/build

2
LATEST
View File

@@ -1 +1 @@
1.1.16
1.1.20

View File

@@ -26,8 +26,11 @@ ifeq ($(ARCH_NAME_RAW),arm64)
ARCH_NAME = aarch64
DOCKER_BUILDARCH = arm64
BREW_PREFIX_PATH = /opt/homebrew
DEFAULT_MIN_MACOS_VERSION = 11.0
DEFAULT_MIN_MACOS_VERSION = 13.0
MARCH_NATIVE = -mtune=$(CPU_TARGET)
ifeq ($(OS_NAME),linux)
MARCH_NATIVE = -march=armv8-a+crc -mtune=ampere1
endif
else
ARCH_NAME = x64
DOCKER_BUILDARCH = amd64
@@ -129,7 +132,7 @@ SED = $(shell which gsed 2>/dev/null || which sed 2>/dev/null)
BUN_DIR ?= $(shell dirname $(realpath $(firstword $(MAKEFILE_LIST))))
BUN_DEPS_DIR ?= $(shell pwd)/src/deps
BUN_DEPS_OUT_DIR ?= $(BUN_DEPS_DIR)
BUN_DEPS_OUT_DIR ?= $(shell pwd)/build/bun-deps
CPU_COUNT = 2
ifeq ($(OS_NAME),darwin)
CPU_COUNT = $(shell sysctl -n hw.logicalcpu)
@@ -154,7 +157,12 @@ CMAKE_FLAGS_WITHOUT_RELEASE = -DCMAKE_C_COMPILER=$(CC) \
-DCMAKE_OSX_DEPLOYMENT_TARGET=$(MIN_MACOS_VERSION) \
$(CMAKE_CXX_COMPILER_LAUNCHER_FLAG) \
-DCMAKE_AR=$(AR) \
-DCMAKE_RANLIB=$(which llvm-16-ranlib 2>/dev/null || which llvm-ranlib 2>/dev/null)
-DCMAKE_RANLIB=$(which llvm-16-ranlib 2>/dev/null || which llvm-ranlib 2>/dev/null) \
-DCMAKE_CXX_STANDARD=20 \
-DCMAKE_C_STANDARD=17 \
-DCMAKE_CXX_STANDARD_REQUIRED=ON \
-DCMAKE_C_STANDARD_REQUIRED=ON \
-DCMAKE_CXX_EXTENSIONS=ON
@@ -181,8 +189,8 @@ endif
OPTIMIZATION_LEVEL=-O3 $(MARCH_NATIVE)
DEBUG_OPTIMIZATION_LEVEL= -O1 $(MARCH_NATIVE) -gdwarf-4
CFLAGS_WITHOUT_MARCH = $(MACOS_MIN_FLAG) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden
BUN_CFLAGS = $(MACOS_MIN_FLAG) $(MARCH_NATIVE) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden
CFLAGS_WITHOUT_MARCH = $(MACOS_MIN_FLAG) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden -mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -fno-asynchronous-unwind-tables -fno-unwind-tables -fno-pie -fno-pic
BUN_CFLAGS = $(MACOS_MIN_FLAG) $(MARCH_NATIVE) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden -mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -fno-asynchronous-unwind-tables -fno-unwind-tables -fno-pie -fno-pic
BUN_TMP_DIR := /tmp/make-bun
CFLAGS=$(CFLAGS_WITHOUT_MARCH) $(MARCH_NATIVE)

Binary file not shown.

View File

@@ -3,6 +3,7 @@
"dependencies": {
"@babel/core": "^7.16.10",
"@babel/preset-react": "^7.16.7",
"@babel/standalone": "^7.24.7",
"@swc/core": "^1.2.133",
"benchmark": "^2.1.4",
"braces": "^3.0.2",

View File

@@ -6,6 +6,7 @@ const App = () => (
<html>
<body>
<h1>Hello World</h1>
<p>This is an example.</p>
</body>
</html>
);

View File

@@ -0,0 +1,14 @@
import { bench, run } from "mitata";
import { join } from "path";
const code = require("fs").readFileSync(
process.argv[2] || join(import.meta.dir, "../node_modules/@babel/standalone/babel.min.js"),
);
const transpiler = new Bun.Transpiler({ minify: true });
bench("transformSync", () => {
transpiler.transformSync(code);
});
await run();

174
build.zig
View File

@@ -33,8 +33,6 @@ comptime {
}
}
const default_reported_nodejs_version = "22.3.0";
const zero_sha = "0000000000000000000000000000000000000000";
const BunBuildOptions = struct {
@@ -48,7 +46,7 @@ const BunBuildOptions = struct {
sha: []const u8,
enable_logs: bool = false,
tracy_callstack_depth: u16,
reported_nodejs_version: []const u8 = default_reported_nodejs_version,
reported_nodejs_version: Version,
generated_code_dir: []const u8,
@@ -73,14 +71,7 @@ const BunBuildOptions = struct {
opts.addOption([:0]const u8, "sha", b.allocator.dupeZ(u8, this.sha) catch @panic("OOM"));
opts.addOption(bool, "baseline", this.isBaseline());
opts.addOption(bool, "enable_logs", this.enable_logs);
opts.addOption([:0]const u8, "reported_nodejs_version", b.allocator.dupeZ(u8, this.reported_nodejs_version) catch @panic("OOM"));
if (this.reported_nodejs_version.len > 0 and this.reported_nodejs_version[0] == 'v') {
@panic("Node.js version should not start with 'v'");
}
if (this.reported_nodejs_version.len == 0) {
@panic("Node.js version should not be empty");
}
opts.addOption([]const u8, "reported_nodejs_version", b.fmt("{}", .{this.reported_nodejs_version}));
const mod = opts.createModule();
this.cached_options_module = mod;
@@ -122,8 +113,25 @@ pub fn getOSGlibCVersion(os: OperatingSystem) ?Version {
};
}
pub fn getCpuModel(os: OperatingSystem, arch: Arch) ?Target.Query.CpuModel {
// https://github.com/oven-sh/bun/issues/12076
if (os == .linux and arch == .aarch64) {
return .{ .explicit = &Target.aarch64.cpu.cortex_a35 };
}
// Be explicit and ensure we do not accidentally target a newer M-series chip
if (os == .mac and arch == .aarch64) {
return .{ .explicit = &Target.aarch64.cpu.apple_m1 };
}
// note: x86_64 is dealt with in the CMake config and passed in.
// the reason for the explicit handling on aarch64 is due to troubles
// passing the exact target in via flags.
return null;
}
pub fn build(b: *Build) !void {
std.debug.print("zig build v{s}\n", .{builtin.zig_version_string});
std.log.info("zig compiler v{s}", .{builtin.zig_version_string});
b.zig_lib_dir = b.zig_lib_dir orelse b.path("src/deps/zig/lib");
@@ -147,6 +155,14 @@ pub fn build(b: *Build) !void {
break :brk .{ os, arch };
};
// target must be refined to support older but very popular devices on
// aarch64, this means moving the minimum supported CPU to support certain
// raspberry PIs. there are also a number of cloud hosts that use virtual
// machines with surprisingly out of date versions of glibc.
if (getCpuModel(os, arch)) |cpu_model| {
target_query.cpu_model = cpu_model;
}
target_query.os_version_min = getOSVersionMin(os);
target_query.glibc_version = getOSGlibCVersion(os);
@@ -163,6 +179,8 @@ pub fn build(b: *Build) !void {
break :ref_trace if (trace == 0) null else trace;
};
const obj_format = b.option(ObjectFormat, "obj_format", "Output file for object files") orelse .obj;
var build_options = BunBuildOptions{
.target = target,
.optimize = optimize,
@@ -178,7 +196,10 @@ pub fn build(b: *Build) !void {
break :canary if (rev == 0) null else rev;
},
.reported_nodejs_version = b.option([]const u8, "reported_nodejs_version", "Reported Node.js version") orelse default_reported_nodejs_version,
.reported_nodejs_version = try Version.parse(
b.option([]const u8, "reported_nodejs_version", "Reported Node.js version") orelse
"0.0.0-unset",
),
.sha = sha: {
const sha = b.option([]const u8, "sha", "Force the git sha") orelse
@@ -224,7 +245,7 @@ pub fn build(b: *Build) !void {
var step = b.step("obj", "Build Bun's Zig code as a .o file");
var bun_obj = addBunObject(b, &build_options);
step.dependOn(&bun_obj.step);
step.dependOn(&b.addInstallFile(bun_obj.getEmittedBin(), "bun-zig.o").step);
step.dependOn(addInstallObjectFile(b, bun_obj, "bun-zig", obj_format));
}
// zig build windows-shim
@@ -252,61 +273,59 @@ pub fn build(b: *Build) !void {
// zig build check-all
{
var step = b.step("check-all", "Check for semantic analysis errors on all supported platforms");
inline for (.{
const step = b.step("check-all", "Check for semantic analysis errors on all supported platforms");
addMultiCheck(b, step, build_options, &.{
.{ .os = .windows, .arch = .x86_64 },
.{ .os = .mac, .arch = .x86_64 },
.{ .os = .mac, .arch = .aarch64 },
.{ .os = .linux, .arch = .x86_64 },
.{ .os = .linux, .arch = .aarch64 },
}) |check| {
inline for (.{ .Debug, .ReleaseFast }) |mode| {
const check_target = b.resolveTargetQuery(.{
.os_tag = OperatingSystem.stdOSTag(check.os),
.cpu_arch = check.arch,
.os_version_min = getOSVersionMin(check.os),
.glibc_version = getOSGlibCVersion(check.os),
});
var options = BunBuildOptions{
.target = check_target,
.os = check.os,
.arch = check_target.result.cpu.arch,
.optimize = mode,
.canary_revision = build_options.canary_revision,
.sha = build_options.sha,
.tracy_callstack_depth = build_options.tracy_callstack_depth,
.version = build_options.version,
.reported_nodejs_version = build_options.reported_nodejs_version,
.generated_code_dir = build_options.generated_code_dir,
};
var obj = addBunObject(b, &options);
obj.generated_bin = null;
step.dependOn(&obj.step);
}
}
});
}
// Running `zig build` with no arguments is almost always a mistake.
// TODO: revive this error. cannot right now since ZLS runs zig build without arguments
// zig build check-windows
{
// const mistake_message = b.addSystemCommand(&.{
// "echo",
// \\
// \\To build Bun from source, please use `bun run setup` instead of `zig build`"
// \\For more info, see https://bun.sh/docs/project/contributing
// \\
// \\If you want to build the zig code in isolation, run:
// \\ 'zig build obj -Dgenerated-code=./build/codegen [...opts]'
// \\
// \\If you want to test a compile without emitting an object:
// \\ 'zig build check'
// \\ 'zig build check-all' (run linux+mac+windows)
// \\
// });
const step = b.step("check-windows", "Check for semantic analysis errors on Windows");
addMultiCheck(b, step, build_options, &.{
.{ .os = .windows, .arch = .x86_64 },
});
}
}
// b.default_step.dependOn(&mistake_message.step);
pub inline fn addMultiCheck(
b: *Build,
parent_step: *Step,
root_build_options: BunBuildOptions,
to_check: []const struct { os: OperatingSystem, arch: Arch },
) void {
inline for (to_check) |check| {
inline for (.{ .Debug, .ReleaseFast }) |mode| {
const check_target = b.resolveTargetQuery(.{
.os_tag = OperatingSystem.stdOSTag(check.os),
.cpu_arch = check.arch,
.cpu_model = getCpuModel(check.os, check.arch) orelse .determined_by_cpu_arch,
.os_version_min = getOSVersionMin(check.os),
.glibc_version = getOSGlibCVersion(check.os),
});
var options: BunBuildOptions = .{
.target = check_target,
.os = check.os,
.arch = check_target.result.cpu.arch,
.optimize = mode,
.canary_revision = root_build_options.canary_revision,
.sha = root_build_options.sha,
.tracy_callstack_depth = root_build_options.tracy_callstack_depth,
.version = root_build_options.version,
.reported_nodejs_version = root_build_options.reported_nodejs_version,
.generated_code_dir = root_build_options.generated_code_dir,
};
var obj = addBunObject(b, &options);
obj.generated_bin = null;
parent_step.dependOn(&obj.step);
}
}
}
@@ -319,10 +338,13 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
},
.target = opts.target,
.optimize = opts.optimize,
// https://github.com/ziglang/zig/issues/17430
.pic = true,
.omit_frame_pointer = false,
.strip = false, // stripped at the end
});
obj.bundle_compiler_rt = false;
obj.formatted_panics = true;
obj.root_module.omit_frame_pointer = false;
@@ -340,9 +362,10 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
}
if (opts.os == .linux) {
obj.link_emit_relocs = true;
obj.link_eh_frame_hdr = true;
obj.link_emit_relocs = false;
obj.link_eh_frame_hdr = false;
obj.link_function_sections = true;
obj.link_data_sections = true;
if (opts.optimize == .Debug) {
obj.root_module.valgrind = true;
@@ -353,6 +376,25 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
return obj;
}
const ObjectFormat = enum {
bc,
obj,
};
pub fn addInstallObjectFile(
b: *Build,
compile: *Compile,
name: []const u8,
out_mode: ObjectFormat,
) *Step {
// bin always needed to be computed or else the compilation will do nothing. zig build system bug?
const bin = compile.getEmittedBin();
return &b.addInstallFile(switch (out_mode) {
.obj => bin,
.bc => compile.getEmittedLlvmBc(),
}, b.fmt("{s}.o", .{name})).step;
}
fn exists(path: []const u8) bool {
const file = std.fs.openFileAbsolute(path, .{ .mode = .read_only }) catch return false;
file.close();
@@ -413,7 +455,11 @@ fn addInternalPackages(b: *Build, obj: *Compile, opts: *BunBuildOptions) void {
fn validateGeneratedPath(path: []const u8) void {
if (!exists(path)) {
std.debug.panic("{s} does not exist in generated code directory!", .{std.fs.path.basename(path)});
std.debug.panic(
\\Generated file '{s}' is missing!
\\
\\Make sure to use CMake and Ninja, or pass a manual codegen folder with '-Dgenerated-code=...'
, .{path});
}
}

BIN
bun.lockb

Binary file not shown.

View File

@@ -82,7 +82,7 @@ _bun_completions() {
declare -A PACKAGE_OPTIONS;
declare -A PM_OPTIONS;
local SUBCOMMANDS="dev bun create run install add remove upgrade completions discord help init pm x";
local SUBCOMMANDS="dev bun create run install add remove upgrade completions discord help init pm x test repl update link unlink build";
GLOBAL_OPTIONS[LONG_OPTIONS]="--use --cwd --bunfile --server-bunfile --config --disable-react-fast-refresh --disable-hmr --env-file --extension-order --jsx-factory --jsx-fragment --extension-order --jsx-factory --jsx-fragment --jsx-import-source --jsx-production --jsx-runtime --main-fields --no-summary --version --platform --public-dir --tsconfig-override --define --external --help --inject --loader --origin --port --dump-environment-variables --dump-limits --disable-bun-js";
GLOBAL_OPTIONS[SHORT_OPTIONS]="-c -v -d -e -h -i -l -u -p";

View File

@@ -425,6 +425,7 @@ _bun_run_completion() {
'--external[Exclude module from transpilation (can use * wildcards). ex: -e react]:external' \
'-e[Exclude module from transpilation (can use * wildcards). ex: -e react]:external' \
'--loader[Parse files with .ext:loader, e.g. --loader .js:jsx. Valid loaders: js, jsx, ts, tsx, json, toml, text, file, wasm, napi]:loader' \
'--packages[Exclude dependencies from bundle, e.g. --packages external. Valid options: bundle, external]:packages' \
'-l[Parse files with .ext:loader, e.g. --loader .js:jsx. Valid loaders: js, jsx, ts, tsx, json, toml, text, file, wasm, napi]:loader' \
'--origin[Rewrite import URLs to start with --origin. Default: ""]:origin' \
'-u[Rewrite import URLs to start with --origin. Default: ""]:origin' \

View File

@@ -61,7 +61,7 @@ To do anything interesting we need a construct known as a "view". A view is a cl
The `DataView` class is a lower-level interface for reading and manipulating the data in an `ArrayBuffer`.
Below we create a new `DataView` and set the first byte to 5.
Below we create a new `DataView` and set the first byte to 3.
```ts
const buf = new ArrayBuffer(4);
@@ -395,7 +395,7 @@ Bun implements `Buffer`, a Node.js API for working with binary data that pre-dat
```ts
const buf = Buffer.from("hello world");
// => Buffer(16) [ 116, 104, 105, 115, 32, 105, 115, 32, 97, 32, 115, 116, 114, 105, 110, 103 ]
// => Buffer(11) [ 104, 101, 108, 108, 111, 32, 119, 111, 114, 108, 100 ]
buf.length; // => 11
buf[0]; // => 104, ascii for 'h'

308
docs/api/fetch.md Normal file
View File

@@ -0,0 +1,308 @@
Bun implements the WHATWG `fetch` standard, with some extensions to meet the needs of server-side JavaScript.
Bun also implements `node:http`, but `fetch` is generally recommended instead.
## Sending an HTTP request
To send an HTTP request, use `fetch`
```ts
const response = await fetch("http://example.com");
console.log(response.status); // => 200
const text = await response.text(); // or response.json(), response.formData(), etc.
```
`fetch` also works with HTTPS URLs.
```ts
const response = await fetch("https://example.com");
```
You can also pass `fetch` a [`Request`](https://developer.mozilla.org/en-US/docs/Web/API/Request) object.
```ts
const request = new Request("http://example.com", {
method: "POST",
body: "Hello, world!",
});
const response = await fetch(request);
```
### Sending a POST request
To send a POST request, pass an object with the `method` property set to `"POST"`.
```ts
const response = await fetch("http://example.com", {
method: "POST",
body: "Hello, world!",
});
```
`body` can be a string, a `FormData` object, an `ArrayBuffer`, a `Blob`, and more. See the [MDN documentation](https://developer.mozilla.org/en-US/docs/Web/API/Body/body) for more information.
### Proxying requests
To proxy a request, pass an object with the `proxy` property set to a URL.
```ts
const response = await fetch("http://example.com", {
proxy: "http://proxy.com",
});
```
### Custom headers
To set custom headers, pass an object with the `headers` property set to an object.
```ts
const response = await fetch("http://example.com", {
headers: {
"X-Custom-Header": "value",
},
});
```
You can also set headers using the [Headers](https://developer.mozilla.org/en-US/docs/Web/API/Headers) object.
```ts
const headers = new Headers();
headers.append("X-Custom-Header", "value");
const response = await fetch("http://example.com", {
headers,
});
```
### Response bodies
To read the response body, use one of the following methods:
- `response.text(): Promise<string>`: Returns a promise that resolves with the response body as a string.
- `response.json(): Promise<any>`: Returns a promise that resolves with the response body as a JSON object.
- `response.formData(): Promise<FormData>`: Returns a promise that resolves with the response body as a `FormData` object.
- `response.bytes(): Promise<Uint8Array>`: Returns a promise that resolves with the response body as a `Uint8Array`.
- `response.arrayBuffer(): Promise<ArrayBuffer>`: Returns a promise that resolves with the response body as an `ArrayBuffer`.
- `response.blob(): Promise<Blob>`: Returns a promise that resolves with the response body as a `Blob`.
#### Streaming response bodies
You can use async iterators to stream the response body.
```ts
const response = await fetch("http://example.com");
for await (const chunk of response.body) {
console.log(chunk);
}
```
You can also more directly access the `ReadableStream` object.
```ts
const response = await fetch("http://example.com");
const stream = response.body;
const reader = stream.getReader();
const { value, done } = await reader.read();
```
### Fetching a URL with a timeout
To fetch a URL with a timeout, use `AbortSignal.timeout`:
```ts
const response = await fetch("http://example.com", {
signal: AbortSignal.timeout(1000),
});
```
#### Canceling a request
To cancel a request, use an `AbortController`:
```ts
const controller = new AbortController();
const response = await fetch("http://example.com", {
signal: controller.signal,
});
controller.abort();
```
### Unix domain sockets
To fetch a URL using a Unix domain socket, use the `unix: string` option:
```ts
const response = await fetch("https://hostname/a/path", {
unix: "/var/run/path/to/unix.sock",
method: "POST",
body: JSON.stringify({ message: "Hello from Bun!" }),
headers: {
"Content-Type": "application/json",
},
});
```
### TLS
To use a client certificate, use the `tls` option:
```ts
await fetch("https://example.com", {
tls: {
key: Bun.file("/path/to/key.pem"),
cert: Bun.file("/path/to/cert.pem"),
// ca: [Bun.file("/path/to/ca.pem")],
},
});
```
#### Custom TLS Validation
To customize the TLS validation, use the `checkServerIdentity` option in `tls`
```ts
await fetch("https://example.com", {
tls: {
checkServerIdentity: (hostname, peerCertificate) => {
// Return an error if the certificate is invalid
},
},
});
```
This is similar to how it works in Node's `net` module.
## Debugging
To help with debugging, you can pass `verbose: true` to `fetch`:
```ts
const response = await fetch("http://example.com", {
verbose: true,
});
```
This will print the request and response headers to your terminal:
```sh
[fetch] > HTTP/1.1 GET http://example.com/
[fetch] > Connection: keep-alive
[fetch] > User-Agent: Bun/1.1.21
[fetch] > Accept: */*
[fetch] > Host: example.com
[fetch] > Accept-Encoding: gzip, deflate, br
[fetch] < 200 OK
[fetch] < Content-Encoding: gzip
[fetch] < Age: 201555
[fetch] < Cache-Control: max-age=604800
[fetch] < Content-Type: text/html; charset=UTF-8
[fetch] < Date: Sun, 21 Jul 2024 02:41:14 GMT
[fetch] < Etag: "3147526947+gzip"
[fetch] < Expires: Sun, 28 Jul 2024 02:41:14 GMT
[fetch] < Last-Modified: Thu, 17 Oct 2019 07:18:26 GMT
[fetch] < Server: ECAcc (sac/254F)
[fetch] < Vary: Accept-Encoding
[fetch] < X-Cache: HIT
[fetch] < Content-Length: 648
```
Note: `verbose: boolean` is not part of the Web standard `fetch` API and is specific to Bun.
## Performance
Before an HTTP request can be sent, the DNS lookup must be performed. This can take a significant amount of time, especially if the DNS server is slow or the network connection is poor.
After the DNS lookup, the TCP socket must be connected and the TLS handshake might need to be performed. This can also take a significant amount of time.
After the request completes, consuming the response body can also take a significant amount of time and memory.
At every step of the way, Bun provides APIs to help you optimize the performance of your application.
### DNS prefetching
To prefetch a DNS entry, you can use the `dns.prefetch` API. This API is useful when you know you'll need to connect to a host soon and want to avoid the initial DNS lookup.
```ts
import { dns } from "bun";
dns.prefetch("bun.sh", 443);
```
#### DNS caching
By default, Bun caches and deduplicates DNS queries in-memory for up to 30 seconds. You can see the cache stats by calling `dns.getCacheStats()`:
To learn more about DNS caching in Bun, see the [DNS caching](/docs/api/dns) documentation.
### Preconnect to a host
To preconnect to a host, you can use the `fetch.preconnect` API. This API is useful when you know you'll need to connect to a host soon and want to start the initial DNS lookup, TCP socket connection, and TLS handshake early.
```ts
import { fetch } from "bun";
fetch.preconnect("https://bun.sh");
```
Note: calling `fetch` immediately after `fetch.preconnect` will not make your request faster. Preconnecting only helps if you know you'll need to connect to a host soon, but you're not ready to make the request yet.
#### Preconnect at startup
To preconnect to a host at startup, you can pass `--fetch-preconnect`:
```sh
$ bun --fetch-preconnect https://bun.sh ./my-script.ts
```
This is sort of like `<link rel="preconnect">` in HTML.
This feature is not implemented on Windows yet. If you're interested in using this feature on Windows, please file an issue and we can implement support for it on Windows.
### Connection pooling & HTTP keep-alive
Bun automatically reuses connections to the same host. This is known as connection pooling. This can significantly reduce the time it takes to establish a connection. You don't need to do anything to enable this; it's automatic.
#### Simultaneous connection limit
By default, Bun limits the maximum number of simultaneous `fetch` requests to 256. We do this for several reasons:
- It improves overall system stability. Operating systems have an upper limit on the number of simultaneous open TCP sockets, usually in the low thousands. Nearing this limit causes your entire computer to behave strangely. Applications hang and crash.
- It encourages HTTP Keep-Alive connection reuse. For short-lived HTTP requests, the slowest step is often the initial connection setup. Reusing connections can save a lot of time.
When the limit is exceeded, the requests are queued and sent as soon as the next request ends.
You can increase the maximum number of simultaneous connections via the `BUN_CONFIG_MAX_HTTP_REQUESTS` environment variable:
```sh
$ BUN_CONFIG_MAX_HTTP_REQUESTS=512 bun ./my-script.ts
```
The max value for this limit is currently set to 65,336. The maximum port number is 65,535, so it's quite difficult for any one computer to exceed this limit.
### Response buffering
Bun goes to great lengths to optimize the performance of reading the response body. The fastest way to read the response body is to use one of these methods:
- `response.text(): Promise<string>`
- `response.json(): Promise<any>`
- `response.formData(): Promise<FormData>`
- `response.bytes(): Promise<Uint8Array>`
- `response.arrayBuffer(): Promise<ArrayBuffer>`
- `response.blob(): Promise<Blob>`
You can also use `Bun.write` to write the response body to a file on disk:
```ts
import { write } from "bun";
await write("output.txt", response);
```

View File

@@ -756,6 +756,25 @@ $ bun build ./index.tsx --outdir ./out --external '*'
{% /codetabs %}
### `packages`
Control whatever package dependencies are included to bundle or not. Possible values: `bundle` (default), `external`. Bun threats any import which path do not start with `.`, `..` or `/` as package.
{% codetabs group="a" %}
```ts#JavaScript
await Bun.build({
entrypoints: ['./index.ts'],
packages: 'external',
})
```
```bash#CLI
$ bun build ./index.ts --packages external
```
{% /codetabs %}
### `naming`
Customizes the generated file names. Defaults to `./[dir]/[name].[ext]`.

View File

@@ -94,8 +94,8 @@ In Bun's CLI, simple boolean flags like `--minify` do not accept an argument. Ot
---
- `--packages`
- n/a
- Not supported
- `--packages`
- No differences
---

View File

@@ -35,6 +35,10 @@ $ bun add --optional lodash
## `--exact`
{% callout %}
**Alias**`-E`
{% /callout %}
To add a package and pin to the resolved version, use `--exact`. This will resolve the version of the package and add it to your `package.json` with an exact version number instead of a version range.
```bash

View File

@@ -15,7 +15,7 @@ To _containerize_ our application, we define a `Dockerfile`. This file contains
```docker#Dockerfile
# use the official Bun image
# see all versions at https://hub.docker.com/r/oven/bun/tags
FROM oven/bun:1 as base
FROM oven/bun:1 AS base
WORKDIR /usr/src/app
# install dependencies into temp directory

View File

@@ -69,7 +69,7 @@ export const movies = sqliteTable("movies", {
We can use the `drizzle-kit` CLI to generate an initial SQL migration.
```sh
$ bunx drizzle-kit generate:sqlite --schema ./schema.ts
$ bunx drizzle-kit generate --dialect sqlite --schema ./schema.ts
```
---

View File

@@ -13,7 +13,7 @@ console.log(Bun.argv);
Running this file with arguments results in the following:
```sh
$ bun run cli.tsx --flag1 --flag2 value
$ bun run cli.ts --flag1 --flag2 value
[ '/path/to/bun', '/path/to/cli.ts', '--flag1', '--flag2', 'value' ]
```
@@ -47,7 +47,7 @@ console.log(positionals);
then it outputs
```
$ bun run cli.tsx --flag1 --flag2 value
$ bun run cli.ts --flag1 --flag2 value
{
flag1: true,
flag2: "value",

View File

@@ -16,7 +16,7 @@ await proc.exited;
The second argument accepts a configuration object.
```ts
const proc = Bun.spawn("echo", ["Hello, world!"], {
const proc = Bun.spawn(["echo", "Hello, world!"], {
cwd: "/tmp",
env: { FOO: "bar" },
onExit(proc, exitCode, signalCode, error) {

View File

@@ -13,7 +13,7 @@ jobs:
steps:
# ...
- uses: actions/checkout@v4
+ - uses: oven-sh/setup-bun@v1
+ - uses: oven-sh/setup-bun@v2
# run any `bun` or `bunx` command
+ - run: bun install
@@ -33,7 +33,7 @@ jobs:
runs-on: ubuntu-latest
steps:
# ...
- uses: oven-sh/setup-bun@v1
- uses: oven-sh/setup-bun@v2
+ with:
+ bun-version: 1.0.11 # or "latest", "canary", <sha>
```

View File

@@ -52,7 +52,7 @@ Different thresholds can be set for line-level and function-level coverage.
```toml
[test]
# to set different thresholds for lines and functions
coverageThreshold = { line = 0.5, function = 0.7 }
coverageThreshold = { lines = 0.5, functions = 0.7 }
```
---

View File

@@ -1,6 +1,6 @@
All packages downloaded from the registry are stored in a global cache at `~/.bun/install/cache`. They are stored in subdirectories named like `${name}@${version}`, so multiple versions of a package can be cached.
{% details summary="Configuring cache behavior" (bunfig.toml) %}
{% details summary="Configuring cache behavior (bunfig.toml)" %}
```toml
[install.cache]
@@ -15,8 +15,6 @@ disable = false
disableManifest = false
```
{% /details %}
## Minimizing re-downloads
Bun strives to avoid re-downloading packages multiple times. When installing a package, if the cache already contains a version in the range specified by `package.json`, Bun will use the cached package instead of downloading it again.

75
docs/install/npmrc.md Normal file
View File

@@ -0,0 +1,75 @@
Bun supports loading configuration options from [`.npmrc`](https://docs.npmjs.com/cli/v10/configuring-npm/npmrc) files, allowing you to reuse existing registry/scope configurations.
{% callout %}
**NOTE**: We recommend migrating your `.npmrc` file to Bun's [`bunfig.toml`](/docs/runtime/bunfig) format, as it provides more flexible options and can let you configure Bun-specific options.
{% /callout %}
# Supported options
### `registry`: Set the default registry
The default registry is used to resolve packages, it's default value is `npm`'s official registry (`https://registry.npmjs.org/`).
To change it, you can set the `registry` option in `.npmrc`:
```ini
registry=http://localhost:4873/
```
The equivalent `bunfig.toml` option is [`install.registry`](/docs/runtime/bunfig#install-registry):
```toml
install.registry = "http://localhost:4873/"
```
### `@<scope>:registry`: Set the registry for a specific scope
Allows you to set the registry for a specific scope:
```ini
@myorg:registry=http://localhost:4873/
```
The equivalent `bunfig.toml` option is to add a key in [`install.scopes`](/docs/runtime/bunfig#install-registry):
```toml
[install.scopes]
myorg = "http://localhost:4873/"
```
### `//<registry_url>/:<key>=<value>`: Confgure options for a specific registry
Allows you to set options for a specific registry:
```ini
# set an auth token for the registry
# ${...} is a placeholder for environment variables
//http://localhost:4873/:_authToken=${NPM_TOKEN}
# or you could set a username and password
# note that the password is base64 encoded
//http://localhost:4873/:username=myusername
//http://localhost:4873/:_password=${NPM_PASSWORD}
# or use _auth, which is your username and password
# combined into a single string, which is then base 64 encoded
//http://localhost:4873/:_auth=${NPM_AUTH}
```
The following options are supported:
- `_authToken`
- `username`
- `_password` (base64 encoded password)
- `_auth` (base64 encoded username:password, e.g. `btoa(username + ":" + password)`)
The equivalent `bunfig.toml` option is to add a key in [`install.scopes`](/docs/runtime/bunfig#install-registry):
```toml
[install.scopes]
myorg = { url = "http://localhost:4873/", username = "myusername", password = "$NPM_PASSWORD" }
```

View File

@@ -30,10 +30,6 @@ $ docker pull oven/bun
$ docker run --rm --init --ulimit memlock=-1:-1 oven/bun
```
```bash#Proto
$ proto install bun
```
{% /codetabs %}
### Windows
@@ -146,7 +142,6 @@ $ bun upgrade
**Scoop users** — To avoid conflicts with Scoop, use `scoop update bun` instead.
**proto users** - Use `proto install bun --pin` instead.
{% /callout %}
## Canary builds
@@ -291,8 +286,4 @@ $ npm uninstall -g bun
$ brew uninstall bun
```
```bash#Proto
$ proto uninstall bun
```
{% /codetabs %}

View File

@@ -197,6 +197,9 @@ export default {
description:
"Patch dependencies in your project to fix bugs or add features without vendoring the entire package.",
}),
page("install/npmrc", ".npmrc support", {
description: "Bun supports loading some configuration options from .npmrc",
}),
// page("install/utilities", "Utilities", {
// description: "Use `bun pm` to introspect your global module cache or project dependency tree.",
// }),
@@ -284,8 +287,11 @@ export default {
divider("API"),
page("api/http", "HTTP server", {
description: `Bun implements Web-standard fetch, plus a Bun-native API for building fast HTTP servers.`,
description: `Bun implements a fast HTTP server built on Request/Response objects, along with supporting node:http APIs.`,
}), // "`Bun.serve`"),
page("api/fetch", "HTTP client", {
description: `Bun implements Web-standard fetch with some Bun-native extensions.`,
}), // "fetch"),
page("api/websockets", "WebSockets", {
description: `Bun supports server-side WebSockets with on-the-fly compression, TLS support, and a Bun-native pubsub API.`,
}), // "`Bun.serve`"),

View File

@@ -60,7 +60,7 @@ Visual Studio can be installed graphically using the wizard or through WinGet:
After Visual Studio, you need the following:
- LLVM 16
- LLVM 18.1.8
- Go
- Rust
- NASM
@@ -78,14 +78,14 @@ After Visual Studio, you need the following:
```ps1#WinGet
## Select "Add LLVM to the system PATH for all users" in the LLVM installer
> winget install -i LLVM.LLVM -v 16.0.6 && winget install GoLang.Go Rustlang.Rustup NASM.NASM StrawberryPerl.StrawberryPerl RubyInstallerTeam.Ruby.3.2 OpenJS.NodeJS.LTS
> winget install -i LLVM.LLVM -v 18.1.8 && winget install GoLang.Go Rustlang.Rustup NASM.NASM StrawberryPerl.StrawberryPerl RubyInstallerTeam.Ruby.3.2 OpenJS.NodeJS.LTS
```
```ps1#Scoop
> irm https://get.scoop.sh | iex
> scoop install nodejs-lts go rust nasm ruby perl
# scoop seems to be buggy if you install llvm and the rest at the same time
> scoop install llvm@16.0.6
> scoop install llvm@18.1.8
```
{% /codetabs %}

View File

@@ -179,7 +179,7 @@ These environment variables are read by Bun and configure aspects of its behavio
---
- `BUN_CONFIG_NO_CLEAR_TERMINAL_ON_RELOAD`
- If `BUN_CONFIG_NO_CLEAR_TERMINAL_ON_RELOAD=1`, then `bun --watch` will not clear the console on reload
- If `BUN_CONFIG_NO_CLEAR_TERMINAL_ON_RELOAD=true`, then `bun --watch` will not clear the console on reload
---

View File

@@ -48,14 +48,6 @@ In this case, we are importing from `./hello`, a relative path with no extension
- `./hello/index.cjs`
- `./hello/index.json`
Import paths are case-insensitive, meaning these are all valid imports:
```ts#index.ts
import { hello } from "./hello";
import { hello } from "./HELLO";
import { hello } from "./hElLo";
```
Import paths can optionally include extensions. If an extension is present, Bun will only check for a file with that exact extension.
```ts#index.ts

View File

@@ -193,7 +193,7 @@ The table below lists all globals implemented by Node.js and Bun's current compa
### [`Buffer`](https://nodejs.org/api/buffer.html#class-buffer)
🟡 Incomplete implementation of `base64` and `base64url` encodings.
🟢 Fully implemented.
### [`ByteLengthQueuingStrategy`](https://developer.mozilla.org/en-US/docs/Web/API/ByteLengthQueuingStrategy)

View File

@@ -196,3 +196,41 @@ As of Bun v1.0.19, Bun automatically resolves the `specifier` argument to `mock.
After resolution, the mocked module is stored in the ES Module registry **and** the CommonJS require cache. This means that you can use `import` and `require` interchangeably for mocked modules.
The callback function is called lazily, only if the module is imported or required. This means that you can use `mock.module()` to mock modules that don't exist yet, and it means that you can use `mock.module()` to mock modules that are imported by other modules.
## Restore all function mocks to their original values with `mock.restore()`
Instead of manually restoring each mock individually with `mockFn.mockRestore()`, restore all mocks with one command by calling `mock.restore()`. Doing so does not reset the value of modules overridden with `mock.module()`.
Using `mock.restore()` can reduce the amount of code in your tests by adding it to `afterEach` blocks in each test file or even in your [test preload code](https://bun.sh/docs/runtime/bunfig#test-preload).
```ts
import { expect, mock, spyOn, test } from "bun:test";
import * as fooModule from './foo.ts';
import * as barModule from './bar.ts';
import * as bazModule from './baz.ts';
test('foo, bar, baz', () => {
const fooSpy = spyOn(fooModule, 'foo');
const barSpy = spyOn(barModule, 'bar');
const bazSpy = spyOn(bazModule, 'baz');
expect(fooSpy).toBe('foo');
expect(barSpy).toBe('bar');
expect(bazSpy).toBe('baz');
fooSpy.mockImplementation(() => 42);
barSpy.mockImplementation(() => 43);
bazSpy.mockImplementation(() => 44);
expect(fooSpy).toBe(42);
expect(barSpy).toBe(43);
expect(bazSpy).toBe(44);
mock.restore();
expect(fooSpy).toBe('foo');
expect(barSpy).toBe('bar');
expect(bazSpy).toBe('baz');
});
```

View File

@@ -182,7 +182,7 @@ pub fn main() anyerror!void {
try channel.buffer.ensureTotalCapacity(1);
try HTTPThread.init();
HTTPThread.init();
var ctx = try default_allocator.create(HTTP.HTTPChannelContext);
ctx.* = .{

View File

@@ -4,24 +4,22 @@
"workspaces": [
"./packages/bun-types"
],
"dependencies": {
"@vscode/debugadapter": "^1.61.0",
"esbuild": "^0.17.15",
"eslint": "^8.20.0",
"eslint-config-prettier": "^8.5.0",
"mitata": "^0.1.3",
"devDependencies": {
"@vscode/debugadapter": "^1.65.0",
"esbuild": "^0.21.4",
"eslint": "^9.4.0",
"eslint-config-prettier": "^9.1.0",
"mitata": "^0.1.11",
"peechy": "0.4.34",
"prettier": "^3.2.5",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"source-map-js": "^1.0.2",
"typescript": "^5.0.2"
},
"devDependencies": {
"@types/bun": "^1.1.2",
"@types/react": "^18.0.25",
"@typescript-eslint/eslint-plugin": "^5.31.0",
"@typescript-eslint/parser": "^5.31.0"
"react": "^18.3.1",
"react-dom": "^18.3.1",
"source-map-js": "^1.2.0",
"typescript": "^5.4.5",
"@types/bun": "^1.1.3",
"@types/react": "^18.3.3",
"@typescript-eslint/eslint-plugin": "^7.11.0",
"@typescript-eslint/parser": "^7.11.0"
},
"resolutions": {
"bun-types": "workspace:packages/bun-types"
@@ -34,6 +32,7 @@
"build:tidy": "BUN_SILENT=1 cmake --log-level=WARNING . -DZIG_OPTIMIZE=Debug -DUSE_DEBUG_JSC=ON -DBUN_TIDY_ONLY=ON -DCMAKE_BUILD_TYPE=Debug -GNinja -Bbuild-tidy >> ${GITHUB_STEP_SUMMARY:-/dev/stdout} && BUN_SILENT=1 ninja -Cbuild-tidy >> ${GITHUB_STEP_SUMMARY:-/dev/stdout}",
"build:tidy-extra": "cmake . -DZIG_OPTIMIZE=Debug -DUSE_DEBUG_JSC=ON -DBUN_TIDY_ONLY_EXTRA=ON -DCMAKE_BUILD_TYPE=Debug -GNinja -Bbuild-tidy && ninja -Cbuild-tidy",
"build:release": "cmake . -DCMAKE_BUILD_TYPE=Release -GNinja -Bbuild-release && ninja -Cbuild-release",
"build:release:local": "cmake . -DCMAKE_BUILD_TYPE=Release -DWEBKIT_DIR=$(pwd)/src/bun.js/WebKit/WebKitBuild/Release -GNinja -Bbuild-release-local && ninja -Cbuild-release-local",
"build:release:with_logs": "cmake . -DCMAKE_BUILD_TYPE=Release -DENABLE_LOGS=true -GNinja -Bbuild-release && ninja -Cbuild-release",
"build:debug-zig-release": "cmake . -DCMAKE_BUILD_TYPE=Release -DZIG_OPTIMIZE=Debug -GNinja -Bbuild-debug-zig-release && ninja -Cbuild-debug-zig-release",
"build:safe": "cmake . -DZIG_OPTIMIZE=ReleaseSafe -DUSE_DEBUG_JSC=ON -DCMAKE_BUILD_TYPE=Release -GNinja -Bbuild-safe && ninja -Cbuild-safe",
@@ -43,10 +42,12 @@
"fmt:zig": "zig fmt src/*.zig src/*/*.zig src/*/*/*.zig src/*/*/*/*.zig",
"lint": "eslint './**/*.d.ts' --cache",
"lint:fix": "eslint './**/*.d.ts' --cache --fix",
"test": "node packages/bun-internal-test/src/runner.node.mjs ./build/bun-debug",
"test:release": "node packages/bun-internal-test/src/runner.node.mjs ./build-release/bun",
"test": "node scripts/runner.node.mjs ./build/bun-debug",
"test:release": "node scripts/runner.node.mjs ./build-release/bun",
"banned": "bun packages/bun-internal-test/src/linter.ts",
"zig-check": ".cache/zig/zig.exe build check --summary new",
"zig-check-all": ".cache/zig/zig.exe build check-all --summary new",
"zig-check-windows": ".cache/zig/zig.exe build check-windows --summary new",
"zig": ".cache/zig/zig.exe "
}
}

View File

@@ -5,9 +5,13 @@
"std.debug.assert": "Use bun.assert instead",
"std.debug.dumpStackTrace": "Use bun.handleErrorReturnTrace or bun.crash_handler.dumpStackTrace instead",
"std.debug.print": "Don't let this be committed",
"std.mem.indexOfAny": "Use bun.strings.indexAny or bun.strings.indexAnyComptime",
"std.mem.indexOfAny(u8": "Use bun.strings.indexOfAny",
"undefined != ": "This is by definition Undefined Behavior.",
"undefined == ": "This is by definition Undefined Behavior.",
"bun.toFD(std.fs.cwd().fd)": "Use bun.FD.cwd()",
"std.StringArrayHashMapUnmanaged(": "bun.StringArrayHashMapUnmanaged has a faster `eql`",
"std.StringArrayHashMap(": "bun.StringArrayHashMap has a faster `eql`",
"std.StringHashMapUnmanaged(": "bun.StringHashMapUnmanaged has a faster `eql`",
"std.StringHashMap(": "bun.StringHashMaphas a faster `eql`",
"": ""
}

View File

@@ -19,9 +19,7 @@ for (const [banned, suggestion] of Object.entries(BANNED)) {
if (banned.length === 0) continue;
// Run git grep to find occurrences of std.debug.assert in .zig files
// .nothrow() is here since git will exit with non-zero if no matches are found.
let stdout = await $`git grep -n -F "${banned}" "src/**/**.zig" | grep -v -F '//' | grep -v -F bench`
.nothrow()
.text();
let stdout = await $`git grep -n -F "${banned}" "src/**.zig" | grep -v -F '//' | grep -v -F bench`.nothrow().text();
stdout = stdout.trim();
if (stdout.length === 0) continue;

View File

@@ -290,7 +290,7 @@ function formatBody(body?: string, isBase64Encoded?: boolean): string | null {
if (!isBase64Encoded) {
return body;
}
return Buffer.from(body).toString("base64");
return Buffer.from(body, "base64").toString("utf8");
}
type HttpEventV1 = {

View File

@@ -22,10 +22,10 @@ bun upgrade
- [Linux, arm64](https://www.npmjs.com/package/@oven/bun-linux-aarch64)
- [Linux, x64](https://www.npmjs.com/package/@oven/bun-linux-x64)
- [Linux, x64 (without AVX2 instructions)](https://www.npmjs.com/package/@oven/bun-linux-x64-baseline)
- [Windows (using Windows Subsystem for Linux, aka. "WSL")](https://relatablecode.com/how-to-set-up-bun-on-a-windows-machine)
- [Windows](https://www.npmjs.com/package/@oven/bun-windows-x64)
- [Windows (without AVX2 instructions)](https://www.npmjs.com/package/@oven/bun-windows-x64-baseline)
### Future Platforms
- [Windows](https://github.com/oven-sh/bun/issues/43)
- Unix-like variants such as FreeBSD, OpenBSD, etc.
- Android and iOS

View File

@@ -1455,7 +1455,7 @@ declare module "bun" {
* ```js
* const {imports, exports} = transpiler.scan(`
* import {foo} from "baz";
* const hello = "hi!";
* export const hello = "hi!";
* `);
*
* console.log(imports); // ["baz"]
@@ -1516,6 +1516,7 @@ declare module "bun" {
plugins?: BunPlugin[];
// manifest?: boolean; // whether to return manifest
external?: string[];
packages?: "bundle" | "external";
publicPath?: string;
define?: Record<string, string>;
// origin?: string; // e.g. http://mydomain.com
@@ -2968,7 +2969,7 @@ declare module "bun" {
* Returns 0 if the versions are equal, 1 if `v1` is greater, or -1 if `v2` is greater.
* Throws an error if either version is invalid.
*/
order(v1: StringLike, v2: StringLike): -1 | 0 | 1;
order(this: void, v1: StringLike, v2: StringLike): -1 | 0 | 1;
}
var semver: Semver;
@@ -3099,6 +3100,10 @@ declare module "bun" {
*/
function openInEditor(path: string, options?: EditorOptions): void;
const fetch: typeof globalThis.fetch & {
preconnect(url: string): void;
};
interface EditorOptions {
editor?: "vscode" | "subl";
line?: number;

View File

@@ -907,26 +907,42 @@ declare global {
new (): ShadowRealm;
};
/**
* Send a HTTP(s) request
*
* @param request Request object
* @param init A structured value that contains settings for the fetch() request.
*
* @returns A promise that resolves to {@link Response} object.
*/
interface Fetch {
/**
* Send a HTTP(s) request
*
* @param request Request object
* @param init A structured value that contains settings for the fetch() request.
*
* @returns A promise that resolves to {@link Response} object.
*/
(request: Request, init?: RequestInit): Promise<Response>;
// tslint:disable-next-line:unified-signatures
function fetch(request: Request, init?: RequestInit): Promise<Response>;
/**
* Send a HTTP(s) request
*
* @param url URL string
* @param init A structured value that contains settings for the fetch() request.
*
* @returns A promise that resolves to {@link Response} object.
*/
function fetch(url: string | URL | Request, init?: FetchRequestInit): Promise<Response>;
/**
* Send a HTTP(s) request
*
* @param url URL string
* @param init A structured value that contains settings for the fetch() request.
*
* @returns A promise that resolves to {@link Response} object.
*/
(url: string | URL | Request, init?: FetchRequestInit): Promise<Response>;
(input: string | URL | globalThis.Request, init?: RequestInit): Promise<Response>;
/**
* Start the DNS resolution, TCP connection, and TLS handshake for a request
* before the request is actually sent.
*
* This can reduce the latency of a request when you know there's some
* long-running task that will delay the request starting.
*
* This is a bun-specific API and is not part of the Fetch API specification.
*/
preconnect(url: string | URL): void;
}
var fetch: Fetch;
function queueMicrotask(callback: (...args: any[]) => void): void;
/**

View File

@@ -78,21 +78,7 @@ declare module "bun:jsc" {
*/
function setTimeZone(timeZone: string): string;
/**
* Run JavaScriptCore's sampling profiler for a particular function
*
* This is pretty low-level.
*
* Things to know:
* - LLint means "Low Level Interpreter", which is the interpreter that runs before any JIT compilation
* - Baseline is the first JIT compilation tier. It's the least optimized, but the fastest to compile
* - DFG means "Data Flow Graph", which is the second JIT compilation tier. It has some optimizations, but is slower to compile
* - FTL means "Faster Than Light", which is the third JIT compilation tier. It has the most optimizations, but is the slowest to compile
*/
function profile(
callback: CallableFunction,
sampleInterval?: number,
): {
interface SamplingProfile {
/**
* A formatted summary of the top functions
*
@@ -183,7 +169,24 @@ declare module "bun:jsc" {
* Stack traces of the top functions
*/
stackTraces: string[];
};
}
/**
* Run JavaScriptCore's sampling profiler for a particular function
*
* This is pretty low-level.
*
* Things to know:
* - LLint means "Low Level Interpreter", which is the interpreter that runs before any JIT compilation
* - Baseline is the first JIT compilation tier. It's the least optimized, but the fastest to compile
* - DFG means "Data Flow Graph", which is the second JIT compilation tier. It has some optimizations, but is slower to compile
* - FTL means "Faster Than Light", which is the third JIT compilation tier. It has the most optimizations, but is the slowest to compile
*/
function profile<T extends (...args: any[]) => any>(
callback: T,
sampleInterval?: number,
...args: Parameters<T>
): ReturnType<T> extends Promise<infer U> ? Promise<SamplingProfile> : SamplingProfile;
/**
* This returns objects which native code has explicitly protected from being

View File

@@ -36,7 +36,7 @@ declare module "bun:sqlite" {
* ```ts
* const db = new Database("mydb.sqlite");
* db.run("CREATE TABLE foo (bar TEXT)");
* db.run("INSERT INTO foo VALUES (?)", "baz");
* db.run("INSERT INTO foo VALUES (?)", ["baz"]);
* console.log(db.query("SELECT * FROM foo").all());
* ```
*
@@ -47,7 +47,7 @@ declare module "bun:sqlite" {
* ```ts
* const db = new Database(":memory:");
* db.run("CREATE TABLE foo (bar TEXT)");
* db.run("INSERT INTO foo VALUES (?)", "hiiiiii");
* db.run("INSERT INTO foo VALUES (?)", ["hiiiiii"]);
* console.log(db.query("SELECT * FROM foo").all());
* ```
*
@@ -158,7 +158,7 @@ declare module "bun:sqlite" {
* @example
* ```ts
* db.run("CREATE TABLE foo (bar TEXT)");
* db.run("INSERT INTO foo VALUES (?)", "baz");
* db.run("INSERT INTO foo VALUES (?)", ["baz"]);
* ```
*
* Useful for queries like:
@@ -268,9 +268,9 @@ declare module "bun:sqlite" {
* @example
* ```ts
* db.run("CREATE TABLE foo (bar TEXT)");
* db.run("INSERT INTO foo VALUES (?)", "baz");
* db.run("INSERT INTO foo VALUES (?)", ["baz"]);
* db.run("BEGIN");
* db.run("INSERT INTO foo VALUES (?)", "qux");
* db.run("INSERT INTO foo VALUES (?)", ["qux"]);
* console.log(db.inTransaction());
* ```
*/

View File

@@ -42,6 +42,7 @@
#define HAS_MSGX
#endif
/* We need to emulate sendmmsg, recvmmsg on platform who don't have it */
int bsd_sendmmsg(LIBUS_SOCKET_DESCRIPTOR fd, struct udp_sendbuf* sendbuf, int flags) {
#if defined(_WIN32)// || defined(__APPLE__)
@@ -397,7 +398,9 @@ int bsd_addr_get_port(struct bsd_addr_t *addr) {
// called by dispatch_ready_poll
LIBUS_SOCKET_DESCRIPTOR bsd_accept_socket(LIBUS_SOCKET_DESCRIPTOR fd, struct bsd_addr_t *addr) {
LIBUS_SOCKET_DESCRIPTOR accepted_fd;
addr->len = sizeof(addr->mem);
while (1) {
addr->len = sizeof(addr->mem);
#if defined(SOCK_CLOEXEC) && defined(SOCK_NONBLOCK)
// Linux, FreeBSD
@@ -405,12 +408,18 @@ LIBUS_SOCKET_DESCRIPTOR bsd_accept_socket(LIBUS_SOCKET_DESCRIPTOR fd, struct bsd
#else
// Windows, OS X
accepted_fd = accept(fd, (struct sockaddr *) addr, &addr->len);
#endif
/* We cannot rely on addr since it is not initialized if failed */
if (accepted_fd == LIBUS_SOCKET_ERROR) {
return LIBUS_SOCKET_ERROR;
if (UNLIKELY(IS_EINTR(accepted_fd))) {
continue;
}
/* We cannot rely on addr since it is not initialized if failed */
if (accepted_fd == LIBUS_SOCKET_ERROR) {
return LIBUS_SOCKET_ERROR;
}
break;
}
internal_finalize_bsd_addr(addr);
@@ -423,14 +432,22 @@ LIBUS_SOCKET_DESCRIPTOR bsd_accept_socket(LIBUS_SOCKET_DESCRIPTOR fd, struct bsd
#endif
}
int bsd_recv(LIBUS_SOCKET_DESCRIPTOR fd, void *buf, int length, int flags) {
return recv(fd, buf, length, flags);
ssize_t bsd_recv(LIBUS_SOCKET_DESCRIPTOR fd, void *buf, int length, int flags) {
while (1) {
ssize_t ret = recv(fd, buf, length, flags);
if (UNLIKELY(IS_EINTR(ret))) {
continue;
}
return ret;
}
}
#if !defined(_WIN32)
#include <sys/uio.h>
int bsd_write2(LIBUS_SOCKET_DESCRIPTOR fd, const char *header, int header_length, const char *payload, int payload_length) {
ssize_t bsd_write2(LIBUS_SOCKET_DESCRIPTOR fd, const char *header, int header_length, const char *payload, int payload_length) {
struct iovec chunks[2];
chunks[0].iov_base = (char *)header;
@@ -438,13 +455,21 @@ int bsd_write2(LIBUS_SOCKET_DESCRIPTOR fd, const char *header, int header_length
chunks[1].iov_base = (char *)payload;
chunks[1].iov_len = payload_length;
return writev(fd, chunks, 2);
while (1) {
ssize_t written = writev(fd, chunks, 2);
if (UNLIKELY(IS_EINTR(written))) {
continue;
}
return written;
}
}
#else
int bsd_write2(LIBUS_SOCKET_DESCRIPTOR fd, const char *header, int header_length, const char *payload, int payload_length) {
int written = bsd_send(fd, header, header_length, 0);
ssize_t bsd_write2(LIBUS_SOCKET_DESCRIPTOR fd, const char *header, int header_length, const char *payload, int payload_length) {
ssize_t written = bsd_send(fd, header, header_length, 0);
if (written == header_length) {
int second_write = bsd_send(fd, payload, payload_length, 0);
ssize_t second_write = bsd_send(fd, payload, payload_length, 0);
if (second_write > 0) {
written += second_write;
}
@@ -453,26 +478,28 @@ int bsd_write2(LIBUS_SOCKET_DESCRIPTOR fd, const char *header, int header_length
}
#endif
int bsd_send(LIBUS_SOCKET_DESCRIPTOR fd, const char *buf, int length, int msg_more) {
ssize_t bsd_send(LIBUS_SOCKET_DESCRIPTOR fd, const char *buf, int length, int msg_more) {
while (1) {
// MSG_MORE (Linux), MSG_PARTIAL (Windows), TCP_NOPUSH (BSD)
#ifndef MSG_NOSIGNAL
#define MSG_NOSIGNAL 0
#endif
#ifdef MSG_MORE
#ifdef MSG_MORE
// for Linux we do not want signals
ssize_t rc = send(fd, buf, length, ((msg_more != 0) * MSG_MORE) | MSG_NOSIGNAL | MSG_DONTWAIT);
#else
// use TCP_NOPUSH
ssize_t rc = send(fd, buf, length, MSG_NOSIGNAL | MSG_DONTWAIT);
#endif
// for Linux we do not want signals
return send(fd, buf, length, ((msg_more != 0) * MSG_MORE) | MSG_NOSIGNAL | MSG_DONTWAIT);
if (UNLIKELY(IS_EINTR(rc))) {
continue;
}
#else
// use TCP_NOPUSH
return send(fd, buf, length, MSG_NOSIGNAL | MSG_DONTWAIT);
#endif
return rc;
}
}
int bsd_would_block() {
@@ -483,6 +510,23 @@ int bsd_would_block() {
#endif
}
static int us_internal_bind_and_listen(LIBUS_SOCKET_DESCRIPTOR listenFd, struct sockaddr *listenAddr, socklen_t listenAddrLength, int backlog) {
int result;
do
result = bind(listenFd, listenAddr, listenAddrLength);
while (IS_EINTR(result));
if (result == -1) {
return -1;
}
do
result = listen(listenFd, backlog);
while (IS_EINTR(result));
return result;
}
inline __attribute__((always_inline)) LIBUS_SOCKET_DESCRIPTOR bsd_bind_listen_fd(
LIBUS_SOCKET_DESCRIPTOR listenFd,
struct addrinfo *listenAddr,
@@ -512,7 +556,7 @@ inline __attribute__((always_inline)) LIBUS_SOCKET_DESCRIPTOR bsd_bind_listen_fd
setsockopt(listenFd, IPPROTO_IPV6, IPV6_V6ONLY, (void *) &disabled, sizeof(disabled));
#endif
if (bind(listenFd, listenAddr->ai_addr, (socklen_t) listenAddr->ai_addrlen) || listen(listenFd, 512)) {
if (us_internal_bind_and_listen(listenFd, listenAddr->ai_addr, (socklen_t) listenAddr->ai_addrlen, 512)) {
return LIBUS_SOCKET_ERROR;
}
@@ -690,7 +734,7 @@ static LIBUS_SOCKET_DESCRIPTOR internal_bsd_create_listen_socket_unix(const char
unlink(path);
#endif
if (bind(listenFd, (struct sockaddr *)server_address, addrlen) || listen(listenFd, 512)) {
if (us_internal_bind_and_listen(listenFd, (struct sockaddr *) server_address, (socklen_t) addrlen, 512)) {
#if defined(_WIN32)
int shouldSimulateENOENT = WSAGetLastError() == WSAENETDOWN;
#endif
@@ -838,7 +882,7 @@ int bsd_connect_udp_socket(LIBUS_SOCKET_DESCRIPTOR fd, const char *host, int por
}
freeaddrinfo(result);
return LIBUS_SOCKET_ERROR;
return (int)LIBUS_SOCKET_ERROR;
}
int bsd_disconnect_udp_socket(LIBUS_SOCKET_DESCRIPTOR fd) {
@@ -925,7 +969,7 @@ static int bsd_do_connect_raw(LIBUS_SOCKET_DESCRIPTOR fd, struct sockaddr *addr,
do {
errno = 0;
r = connect(fd, (struct sockaddr *)addr, namelen);
} while (r == -1 && errno == EINTR);
} while (IS_EINTR(r));
// connect() can return -1 with an errno of 0.
// the errno is the correct one in that case.

View File

@@ -1740,15 +1740,20 @@ void us_internal_ssl_socket_shutdown(struct us_internal_ssl_socket_t *s) {
loop_ssl_data->ssl_socket = &s->s;
loop_ssl_data->msg_more = 0;
// sets SSL_SENT_SHUTDOWN no matter what (not actually true if error!)
int ret = SSL_shutdown(s->ssl);
if (ret == 0) {
ret = SSL_shutdown(s->ssl);
}
if (ret < 0) {
if (SSL_in_init(s->ssl) || SSL_get_quiet_shutdown(s->ssl)) {
// when SSL_in_init or quiet shutdown in BoringSSL, we call shutdown
// directly
us_socket_shutdown(0, &s->s);
return;
}
if (ret < 0) {
int err = SSL_get_error(s->ssl, ret);
if (err == SSL_ERROR_SSL || err == SSL_ERROR_SYSCALL) {
// clear

View File

@@ -109,6 +109,51 @@ struct us_loop_t *us_timer_loop(struct us_timer_t *t) {
return internal_cb->loop;
}
#if defined(LIBUS_USE_EPOLL)
#include <sys/syscall.h>
static int has_epoll_pwait2 = -1;
#ifndef SYS_epoll_pwait2
// It's consistent on multiple architectures
// https://github.com/torvalds/linux/blob/9d1ddab261f3e2af7c384dc02238784ce0cf9f98/include/uapi/asm-generic/unistd.h#L795
// https://github.com/google/gvisor/blob/master/test/syscalls/linux/epoll.cc#L48C1-L50C7
#define SYS_epoll_pwait2 441
#endif
static ssize_t sys_epoll_pwait2(int epfd, struct epoll_event *events, int maxevents, const struct timespec *timeout, const sigset_t *sigmask, size_t sigsetsize) {
return syscall(SYS_epoll_pwait2, epfd, events, maxevents, timeout, sigmask, sigsetsize);
}
static int bun_epoll_pwait2(int epfd, struct epoll_event *events, int maxevents, const struct timespec *timeout) {
int ret;
if (has_epoll_pwait2 != 0) {
do {
ret = sys_epoll_pwait2(epfd, events, maxevents, timeout, NULL, 0);
} while (IS_EINTR(ret));
if (LIKELY(ret != -1 || errno != ENOSYS)) {
return ret;
}
has_epoll_pwait2 = 0;
}
int timeoutMs = -1;
if (timeout) {
timeoutMs = timeout->tv_sec * 1000 + timeout->tv_nsec / 1000000;
}
do {
ret = epoll_wait(epfd, events, maxevents, timeoutMs);
} while (IS_EINTR(ret));
return ret;
}
#endif
/* Loop */
struct us_loop_t *us_create_loop(void *hint, void (*wakeup_cb)(struct us_loop_t *loop), void (*pre_cb)(struct us_loop_t *loop), void (*post_cb)(struct us_loop_t *loop), unsigned int ext_size) {
struct us_loop_t *loop = (struct us_loop_t *) us_calloc(1, sizeof(struct us_loop_t) + ext_size);
@@ -139,9 +184,11 @@ void us_loop_run(struct us_loop_t *loop) {
/* Fetch ready polls */
#ifdef LIBUS_USE_EPOLL
loop->num_ready_polls = epoll_wait(loop->fd, loop->ready_polls, 1024, -1);
loop->num_ready_polls = bun_epoll_pwait2(loop->fd, loop->ready_polls, 1024, NULL);
#else
loop->num_ready_polls = kevent64(loop->fd, NULL, 0, loop->ready_polls, 1024, 0, NULL);
do {
loop->num_ready_polls = kevent64(loop->fd, NULL, 0, loop->ready_polls, 1024, 0, NULL);
} while (IS_EINTR(loop->num_ready_polls));
#endif
/* Iterate ready polls, dispatching them by type */
@@ -183,12 +230,6 @@ void us_loop_run(struct us_loop_t *loop) {
}
}
#if defined(LIBUS_USE_EPOLL)
// static int has_epoll_pwait2 = 0;
// TODO:
#endif
void us_loop_run_bun_tick(struct us_loop_t *loop, const struct timespec* timeout) {
if (loop->num_polls == 0)
@@ -207,13 +248,12 @@ void us_loop_run_bun_tick(struct us_loop_t *loop, const struct timespec* timeout
/* Fetch ready polls */
#ifdef LIBUS_USE_EPOLL
int timeoutMs = -1;
if (timeout) {
timeoutMs = timeout->tv_sec * 1000 + timeout->tv_nsec / 1000000;
}
loop->num_ready_polls = epoll_wait(loop->fd, loop->ready_polls, 1024, timeoutMs);
loop->num_ready_polls = bun_epoll_pwait2(loop->fd, loop->ready_polls, 1024, timeout);
#else
loop->num_ready_polls = kevent64(loop->fd, NULL, 0, loop->ready_polls, 1024, 0, timeout);
do {
loop->num_ready_polls = kevent64(loop->fd, NULL, 0, loop->ready_polls, 1024, 0, timeout);
} while (IS_EINTR(loop->num_ready_polls));
#endif
/* Iterate ready polls, dispatching them by type */
@@ -296,7 +336,10 @@ int kqueue_change(int kqfd, int fd, int old_events, int new_events, void *user_d
EV_SET64(&change_list[change_length++], fd, EVFILT_WRITE, (new_events & LIBUS_SOCKET_WRITABLE) ? EV_ADD : EV_DELETE, 0, 0, (uint64_t)(void*)user_data, 0, 0);
}
int ret = kevent64(kqfd, change_list, change_length, change_list, change_length, KEVENT_FLAG_ERROR_EVENTS, NULL);
int ret;
do {
ret = kevent64(kqfd, change_list, change_length, change_list, change_length, KEVENT_FLAG_ERROR_EVENTS, NULL);
} while (IS_EINTR(ret));
// ret should be 0 in most cases (not guaranteed when removing async)
@@ -332,7 +375,10 @@ void us_poll_start(struct us_poll_t *p, struct us_loop_t *loop, int events) {
struct epoll_event event;
event.events = events;
event.data.ptr = p;
epoll_ctl(loop->fd, EPOLL_CTL_ADD, p->state.fd, &event);
int ret;
do {
ret = epoll_ctl(loop->fd, EPOLL_CTL_ADD, p->state.fd, &event);
} while (IS_EINTR(ret));
#else
kqueue_change(loop->fd, p->state.fd, 0, events, p);
#endif
@@ -348,7 +394,10 @@ void us_poll_change(struct us_poll_t *p, struct us_loop_t *loop, int events) {
struct epoll_event event;
event.events = events;
event.data.ptr = p;
epoll_ctl(loop->fd, EPOLL_CTL_MOD, p->state.fd, &event);
int rc;
do {
rc = epoll_ctl(loop->fd, EPOLL_CTL_MOD, p->state.fd, &event);
} while (IS_EINTR(rc));
#else
kqueue_change(loop->fd, p->state.fd, old_events, events, p);
#endif
@@ -362,7 +411,10 @@ void us_poll_stop(struct us_poll_t *p, struct us_loop_t *loop) {
int new_events = 0;
#ifdef LIBUS_USE_EPOLL
struct epoll_event event;
epoll_ctl(loop->fd, EPOLL_CTL_DEL, p->state.fd, &event);
int rc;
do {
rc = epoll_ctl(loop->fd, EPOLL_CTL_DEL, p->state.fd, &event);
} while (IS_EINTR(rc));
#else
if (old_events) {
kqueue_change(loop->fd, p->state.fd, old_events, new_events, NULL);
@@ -373,12 +425,14 @@ void us_poll_stop(struct us_poll_t *p, struct us_loop_t *loop) {
us_internal_loop_update_pending_ready_polls(loop, p, 0, old_events, new_events);
}
unsigned int us_internal_accept_poll_event(struct us_poll_t *p) {
size_t us_internal_accept_poll_event(struct us_poll_t *p) {
#ifdef LIBUS_USE_EPOLL
int fd = us_poll_fd(p);
uint64_t buf;
int read_length = read(fd, &buf, 8);
(void)read_length;
ssize_t read_length = 0;
do {
read_length = read(fd, &buf, 8);
} while (IS_EINTR(read_length));
return buf;
#else
/* Kqueue has no underlying FD for timers or user events */
@@ -467,7 +521,11 @@ void us_timer_close(struct us_timer_t *timer, int fallthrough) {
struct kevent64_s event;
EV_SET64(&event, (uint64_t) (void*) internal_cb, EVFILT_TIMER, EV_DELETE, 0, 0, (uint64_t)internal_cb, 0, 0);
kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
int ret;
do {
ret = kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
} while (IS_EINTR(ret));
/* (regular) sockets are the only polls which are not freed immediately */
if(fallthrough){
@@ -486,7 +544,11 @@ void us_timer_set(struct us_timer_t *t, void (*cb)(struct us_timer_t *t), int ms
struct kevent64_s event;
uint64_t ptr = (uint64_t)(void*)internal_cb;
EV_SET64(&event, ptr, EVFILT_TIMER, EV_ADD | (repeat_ms ? 0 : EV_ONESHOT), 0, ms, (uint64_t)internal_cb, 0, 0);
kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
int ret;
do {
ret = kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
} while (IS_EINTR(ret));
}
#endif
@@ -581,7 +643,11 @@ void us_internal_async_close(struct us_internal_async *a) {
struct kevent64_s event;
uint64_t ptr = (uint64_t)(void*)internal_cb;
EV_SET64(&event, ptr, EVFILT_MACHPORT, EV_DELETE, 0, 0, (uint64_t)(void*)internal_cb, 0,0);
kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
int ret;
do {
ret = kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
} while (IS_EINTR(ret));
mach_port_deallocate(mach_task_self(), internal_cb->port);
us_free(internal_cb->machport_buf);
@@ -609,7 +675,10 @@ void us_internal_async_set(struct us_internal_async *a, void (*cb)(struct us_int
event.ext[1] = MACHPORT_BUF_LEN;
event.udata = (uint64_t)(void*)internal_cb;
int ret = kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
int ret;
do {
ret = kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
} while (IS_EINTR(ret));
if (UNLIKELY(ret == -1)) {
abort();

View File

@@ -125,7 +125,7 @@ int us_poll_events(struct us_poll_t *p) {
((p->poll_type & POLL_TYPE_POLLING_OUT) ? LIBUS_SOCKET_WRITABLE : 0);
}
unsigned int us_internal_accept_poll_event(struct us_poll_t *p) { return 0; }
size_t us_internal_accept_poll_event(struct us_poll_t *p) { return 0; }
int us_internal_poll_type(struct us_poll_t *p) { return p->poll_type & POLL_TYPE_KIND_MASK; }

View File

@@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#ifndef INTERNAL_H
#define INTERNAL_H
@@ -22,6 +22,10 @@
#ifndef __cplusplus
#define alignas(x) __declspec(align(x))
#endif
#include <BaseTsd.h>
typedef SSIZE_T ssize_t;
#else
#include <stdalign.h>
#endif
@@ -52,6 +56,17 @@ void us_internal_loop_update_pending_ready_polls(struct us_loop_t *loop,
#include "internal/eventing/libuv.h"
#endif
#ifndef LIKELY
#define LIKELY(cond) __builtin_expect((_Bool)(cond), 1)
#define UNLIKELY(cond) __builtin_expect((_Bool)(cond), 0)
#endif
#ifdef _WIN32
#define IS_EINTR(rc) (rc == SOCKET_ERROR && WSAGetLastError() == WSAEINTR)
#else
#define IS_EINTR(rc) (rc == -1 && errno == EINTR)
#endif
/* Poll type and what it polls for */
enum {
/* Three first bits */
@@ -118,7 +133,7 @@ void us_internal_async_set(struct us_internal_async *a,
void us_internal_async_wakeup(struct us_internal_async *a);
/* Eventing related */
unsigned int us_internal_accept_poll_event(struct us_poll_t *p);
size_t us_internal_accept_poll_event(struct us_poll_t *p);
int us_internal_poll_type(struct us_poll_t *p);
void us_internal_poll_set_type(struct us_poll_t *p, int poll_type);

View File

@@ -134,9 +134,9 @@ int bsd_addr_get_port(struct bsd_addr_t *addr);
// called by dispatch_ready_poll
LIBUS_SOCKET_DESCRIPTOR bsd_accept_socket(LIBUS_SOCKET_DESCRIPTOR fd, struct bsd_addr_t *addr);
int bsd_recv(LIBUS_SOCKET_DESCRIPTOR fd, void *buf, int length, int flags);
int bsd_send(LIBUS_SOCKET_DESCRIPTOR fd, const char *buf, int length, int msg_more);
int bsd_write2(LIBUS_SOCKET_DESCRIPTOR fd, const char *header, int header_length, const char *payload, int payload_length);
ssize_t bsd_recv(LIBUS_SOCKET_DESCRIPTOR fd, void *buf, int length, int flags);
ssize_t bsd_send(LIBUS_SOCKET_DESCRIPTOR fd, const char *buf, int length, int msg_more);
ssize_t bsd_write2(LIBUS_SOCKET_DESCRIPTOR fd, const char *header, int header_length, const char *payload, int payload_length);
int bsd_would_block();
// return LIBUS_SOCKET_ERROR or the fd that represents listen socket

View File

@@ -75,14 +75,6 @@ public:
void writeMark() {
/* Date is always written */
writeHeader("Date", std::string_view(((LoopData *) us_loop_ext(us_socket_context_loop(SSL, (us_socket_context(SSL, (us_socket_t *) this)))))->date, 29));
/* You can disable this altogether */
// #ifndef UWS_HTTPRESPONSE_NO_WRITEMARK
// if (!Super::getLoopData()->noMark) {
// /* We only expose major version */
// writeHeader("uWebSockets", "20");
// }
// #endif
}
/* Returns true on success, indicating that it might be feasible to write more data.
@@ -588,7 +580,19 @@ public:
httpResponseData->onAborted = std::move(handler);
return this;
}
HttpResponse* clearOnWritableAndAborted() {
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();
httpResponseData->onWritable = nullptr;
httpResponseData->onAborted = nullptr;
return this;
}
HttpResponse* clearOnAborted() {
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();
httpResponseData->onAborted = nullptr;
return this;
}
/* Attach a read handler for data sent. Will be called with FIN set true if last segment. */
void onData(MoveOnlyFunction<void(std::string_view, bool)> &&handler) {
HttpResponseData<SSL> *data = getHttpResponseData();

View File

@@ -95,13 +95,14 @@ private:
// This is both a performance thing, and also to prevent freeing some things which are not meant to be freed
// such as uv_tty_t
if(loop && cleanMe && !bun_is_exiting()) {
cleanMe = false;
loop->free();
}
}
Loop *loop = nullptr;
bool cleanMe = false;
};
static LoopCleaner &getLazyLoop() {
static thread_local LoopCleaner lazyLoop;
return lazyLoop;
@@ -126,6 +127,12 @@ public:
return getLazyLoop().loop;
}
static void clearLoopAtThreadExit() {
if (getLazyLoop().cleanMe) {
getLazyLoop().loop->free();
}
}
/* Freeing the default loop should be done once */
void free() {
LoopData *loopData = (LoopData *) us_loop_ext((us_loop_t *) this);

9
scripts/all-dependencies.ps1 Normal file → Executable file
View File

@@ -3,13 +3,14 @@ param(
)
$ErrorActionPreference = 'Stop'
. (Join-Path $PSScriptRoot "env.ps1")
if ($env:CI) {
& (Join-Path $PSScriptRoot "update-submodules.ps1")
}
$DidAnything = $false;
$BUN_BASE_DIR = if ($env:BUN_BASE_DIR) { $env:BUN_BASE_DIR } else { Join-Path $PSScriptRoot '..' }
$BUN_DEPS_DIR = if ($env:BUN_DEPS_DIR) { $env:BUN_DEPS_DIR } else { Join-Path $BUN_BASE_DIR 'src\deps' }
$BUN_DEPS_OUT_DIR = if ($env:BUN_DEPS_OUT_DIR) { $env:BUN_DEPS_OUT_DIR } else { $BUN_DEPS_DIR }
function Build-Dependency {
param(
$Script,

View File

@@ -1,6 +1,11 @@
#!/usr/bin/env bash
set -euo pipefail
set -eo pipefail
source "$(dirname -- "${BASH_SOURCE[0]}")/env.sh"
if [[ "$CI" ]]; then
$(dirname -- "${BASH_SOURCE[0]}")/update-submodules.sh
fi
FORCE=
while getopts "f" opt; do
@@ -19,16 +24,35 @@ while getopts "f" opt; do
done
BUILT_ANY=0
SUBMODULES=
CACHE_DIR=
CACHE=0
if [ -n "$BUN_DEPS_CACHE_DIR" ]; then
CACHE_DIR="$BUN_DEPS_CACHE_DIR"
CACHE=1
SUBMODULES="$(git submodule status)"
fi
dep() {
local script="$1"
local submodule="$1"
local script="$2"
CACHE_KEY=
if [ "$CACHE" == "1" ]; then
CACHE_KEY="$submodule/$(echo "$SUBMODULES" | grep "$submodule" | git hash-object --stdin)"
fi
if [ -z "$FORCE" ]; then
HAS_ALL_DEPS=1
shift
for lib in "$@"; do
for lib in "${@:2}"; do
if [ ! -f "$BUN_DEPS_OUT_DIR/$lib" ]; then
HAS_ALL_DEPS=0
break
if [[ "$CACHE" == "1" && -f "$CACHE_DIR/$CACHE_KEY/$lib" ]]; then
mkdir -p "$BUN_DEPS_OUT_DIR"
cp "$CACHE_DIR/$CACHE_KEY/$lib" "$BUN_DEPS_OUT_DIR/$lib"
printf "%s %s - already cached\n" "$script" "$lib"
else
HAS_ALL_DEPS=0
break
fi
fi
done
if [ "$HAS_ALL_DEPS" == "1" ]; then
@@ -41,27 +65,34 @@ dep() {
set +e
bash "$SCRIPT_DIR/build-$script.sh"
EXIT=$?
set -e
if [ "$EXIT" -ne 0 ]; then
printf "Failed to build %s\n" "$script"
exit "$EXIT"
fi
set -e
if [ "$CACHE" == "1" ]; then
mkdir -p "$CACHE_DIR/$CACHE_KEY"
for lib in "${@:2}"; do
cp "$BUN_DEPS_OUT_DIR/$lib" "$CACHE_DIR/$CACHE_KEY/$lib"
printf "%s %s - cached\n" "$script" "$lib"
done
fi
BUILT_ANY=1
}
dep boringssl libcrypto.a libssl.a libdecrepit.a
dep cares libcares.a
dep libarchive libarchive.a
dep lolhtml liblolhtml.a
dep mimalloc-debug libmimalloc-debug.a libmimalloc-debug.o
dep mimalloc libmimalloc.a libmimalloc.o
dep tinycc libtcc.a
dep zlib libz.a
dep zstd libzstd.a
dep lshpack liblshpack.a
dep boringssl boringssl libcrypto.a libssl.a libdecrepit.a
dep c-ares cares libcares.a
dep libarchive libarchive libarchive.a
dep lol-html lolhtml liblolhtml.a
dep mimalloc mimalloc-debug libmimalloc-debug.a libmimalloc-debug.o
dep mimalloc mimalloc libmimalloc.a libmimalloc.o
dep tinycc tinycc libtcc.a
dep zlib zlib libz.a
dep zstd zstd libzstd.a
dep ls-hpack lshpack liblshpack.a
if [ "$BUILT_ANY" -eq 0 ]; then
printf "(run with -f to rebuild)\n"

0
scripts/build-boringssl.ps1 Normal file → Executable file
View File

View File

@@ -1,12 +1,12 @@
#!/usr/bin/env bash
set -euxo pipefail
set -exo pipefail
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
cd $BUN_DEPS_DIR/boringssl
mkdir -p build
cd build
cmake "${CMAKE_FLAGS[@]}" -DCMAKE_EXE_LINKER_FLAGS="-fuse-ld=lld" -GNinja ..
cmake "${CMAKE_FLAGS[@]}" -GNinja ..
ninja libcrypto.a libssl.a libdecrepit.a
cp **/libcrypto.a $BUN_DEPS_OUT_DIR/libcrypto.a

29
scripts/build-bun-cpp.ps1 Executable file
View File

@@ -0,0 +1,29 @@
param (
[switch] $Baseline = $False,
[switch] $Fast = $False
)
$ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash
$Tag = If ($Baseline) { "-Baseline" } Else { "" }
$UseBaselineBuild = If ($Baseline) { "ON" } Else { "OFF" }
$UseLto = If ($Fast) { "OFF" } Else { "ON" }
# $CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
$CANARY_REVISION = 0
.\scripts\env.ps1 $Tag
.\scripts\update-submodules.ps1
.\scripts\build-libuv.ps1 -CloneOnly $True
cd build
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release `
-DNO_CODEGEN=0 `
-DNO_CONFIGURE_DEPENDS=1 `
"-DUSE_BASELINE_BUILD=${UseBaselineBuild}" `
"-DUSE_LTO=${UseLto}" `
"-DCANARY=${CANARY_REVISION}" `
-DBUN_CPP_ONLY=1 $Flags
if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" }
.\compile-cpp-only.ps1 -v
if ($LASTEXITCODE -ne 0) { throw "C++ compilation failed" }

48
scripts/build-bun-cpp.sh Executable file
View File

@@ -0,0 +1,48 @@
#!/usr/bin/env bash
set -exo pipefail
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
export USE_LTO="${USE_LTO:-ON}"
case "$(uname -m)" in
aarch64|arm64)
export CPU_TARGET="${CPU_TARGET:-native}"
;;
*)
export CPU_TARGET="${CPU_TARGET:-haswell}"
;;
esac
while [[ $# -gt 0 ]]; do
case "$1" in
--fast|--no-lto)
export USE_LTO="OFF"
shift
;;
--baseline)
export CPU_TARGET="nehalem"
shift
;;
--cpu)
export CPU_TARGET="$2"
shift
shift
;;
*|-*|--*)
echo "Unknown option $1"
exit 1
;;
esac
done
mkdir -p build
cd build
mkdir -p tmp_modules tmp_functions js codegen
cmake .. \
-GNinja \
-DCMAKE_BUILD_TYPE=Release \
-DUSE_LTO=${USE_LTO} \
-DCPU_TARGET=${CPU_TARGET} \
-DBUN_CPP_ONLY=1 \
-DNO_CONFIGURE_DEPENDS=1
chmod +x ./compile-cpp-only.sh
bash ./compile-cpp-only.sh -v

95
scripts/build-bun-zig.sh Executable file
View File

@@ -0,0 +1,95 @@
#!/usr/bin/env bash
set -exo pipefail
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
cwd=$(pwd)
zig=
if [[ "$CI" ]]; then
# Since the zig build depends on files from the zig submodule,
# make sure to update the submodule before building.
git submodule update --init --recursive --progress --depth=1 --checkout src/deps/zig
# Also update the correct version of zig in the submodule.
$(dirname -- "${BASH_SOURCE[0]}")/download-zig.sh
fi
if [ -f "$cwd/.cache/zig/zig" ]; then
zig="$cwd/.cache/zig/zig"
else
zig=$(which zig)
fi
ZIG_OPTIMIZE="${ZIG_OPTIMIZE:-ReleaseFast}"
CANARY="${CANARY:-0}"
GIT_SHA="${GIT_SHA:-$(git rev-parse HEAD)}"
BUILD_MACHINE_ARCH="${BUILD_MACHINE_ARCH:-$(uname -m)}"
DOCKER_MACHINE_ARCH=""
if [[ "$BUILD_MACHINE_ARCH" == "x86_64" || "$BUILD_MACHINE_ARCH" == "amd64" ]]; then
BUILD_MACHINE_ARCH="x86_64"
DOCKER_MACHINE_ARCH="amd64"
elif [[ "$BUILD_MACHINE_ARCH" == "aarch64" || "$BUILD_MACHINE_ARCH" == "arm64" ]]; then
BUILD_MACHINE_ARCH="aarch64"
DOCKER_MACHINE_ARCH="arm64"
fi
TARGET_OS="${1:-linux}"
TARGET_ARCH="${2:-x64}"
TARGET_CPU="${3:-${CPU_TARGET:-native}}"
BUILDARCH=""
if [[ "$TARGET_ARCH" == "x64" || "$TARGET_ARCH" == "x86_64" || "$TARGET_ARCH" == "amd64" ]]; then
TARGET_ARCH="x86_64"
BUILDARCH="amd64"
elif [[ "$TARGET_ARCH" == "aarch64" || "$TARGET_ARCH" == "arm64" ]]; then
TARGET_ARCH="aarch64"
BUILDARCH="arm64"
fi
TRIPLET=""
if [[ "$TARGET_OS" == "linux" ]]; then
TRIPLET="$TARGET_ARCH-linux-gnu"
elif [[ "$TARGET_OS" == "darwin" ]]; then
TRIPLET="$TARGET_ARCH-macos-none"
elif [[ "$TARGET_OS" == "windows" ]]; then
TRIPLET="$TARGET_ARCH-windows-msvc"
fi
echo "--- Building identifier-cache"
$zig run src/js_lexer/identifier_data.zig
echo "--- Building node-fallbacks"
cd src/node-fallbacks
bun install --frozen-lockfile
bun run build
cd "$cwd"
echo "--- Building codegen"
bun install --frozen-lockfile
make runtime_js fallback_decoder bun_error
echo "--- Building modules"
mkdir -p build
bun run src/codegen/bundle-modules.ts --debug=OFF build
echo "--- Building zig"
cd build
cmake .. \
-GNinja \
-DCMAKE_BUILD_TYPE=Release \
-DUSE_LTO=ON \
-DZIG_OPTIMIZE="${ZIG_OPTIMIZE}" \
-DGIT_SHA="${GIT_SHA}" \
-DARCH="${TARGET_ARCH}" \
-DBUILDARCH="${BUILDARCH}" \
-DCPU_TARGET="${TARGET_CPU}" \
-DZIG_TARGET="${TRIPLET}" \
-DASSERTIONS="OFF" \
-DWEBKIT_DIR="omit" \
-DNO_CONFIGURE_DEPENDS=1 \
-DNO_CODEGEN=1 \
-DBUN_ZIG_OBJ_DIR="$cwd/build" \
-DCANARY="$CANARY" \
-DZIG_LIB_DIR=src/deps/zig/lib
ONLY_ZIG=1 ninja "$cwd/build/bun-zig.o" -v

0
scripts/build-cares.ps1 Normal file → Executable file
View File

View File

@@ -1,10 +1,12 @@
#!/usr/bin/env bash
set -euxo pipefail
set -exo pipefail
export FORCE_PIC=1
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
cd $BUN_DEPS_DIR/c-ares
rm -rf build
rm -rf build CMakeCache.txt CMakeFiles
mkdir -p build
cd build
@@ -12,8 +14,9 @@ cd build
cmake "${CMAKE_FLAGS[@]}" .. \
-DCMAKE_INSTALL_LIBDIR=lib \
-DCARES_STATIC=ON \
-DCARES_STATIC_PIC=ON \
-DCARES_STATIC_PIC=OFF \
-DCARES_SHARED=OFF \
-DCARES_BUILD_TOOLS=ON \
-G "Ninja"
ninja

0
scripts/build-libarchive.ps1 Normal file → Executable file
View File

View File

@@ -1,5 +1,6 @@
#!/usr/bin/env bash
set -euxo pipefail
set -exo pipefail
export FORCE_PIC=1
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
mkdir -p $BUN_DEPS_OUT_DIR

0
scripts/build-libuv.ps1 Normal file → Executable file
View File

0
scripts/build-lolhtml.ps1 Normal file → Executable file
View File

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env bash
set -euxo pipefail
set -exo pipefail
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
cd $BUN_DEPS_DIR/lol-html/c-api

0
scripts/build-lshpack.ps1 Normal file → Executable file
View File

6
scripts/build-lshpack.sh Normal file → Executable file
View File

@@ -1,12 +1,12 @@
#!/usr/bin/env bash
set -euxo pipefail
set -exo pipefail
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
rm -rf CMakeFiles CMakeCache build.ninja
mkdir -p $BUN_DEPS_OUT_DIR
cd $BUN_DEPS_DIR/ls-hpack
rm -rf CMakeCache* CMakeFiles
cmake "${CMAKE_FLAGS[@]}" . \
@@ -15,6 +15,6 @@ cmake "${CMAKE_FLAGS[@]}" . \
-DSHARED=0 \
-GNinja
ninja
ninja libls-hpack.a
cp ./libls-hpack.a $BUN_DEPS_OUT_DIR/liblshpack.a

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env bash
set -euxo pipefail
set -exo pipefail
source "$(dirname -- "${BASH_SOURCE[0]}")/env.sh"
MIMALLOC_OVERRIDE_FLAG=${MIMALLOC_OVERRIDE_FLAG:-}

0
scripts/build-mimalloc.ps1 Normal file → Executable file
View File

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env bash
set -euxo pipefail
set -exo pipefail
source "$(dirname -- "${BASH_SOURCE[0]}")/env.sh"
MIMALLOC_OVERRIDE_FLAG=${MIMALLOC_OVERRIDE_FLAG:-}

5
scripts/build-tinycc.ps1 Normal file → Executable file
View File

@@ -22,9 +22,8 @@ try {
$Baseline = $env:BUN_DEV_ENV_SET -eq "Baseline=True"
# TODO: -MT
Run clang-cl @($env:CFLAGS -split ' ') libtcc.c -o tcc.obj "-DTCC_TARGET_PE" "-DTCC_TARGET_X86_64" "-O2" "-W2" "-Zi" "-MD" "-GS-" "-c"
Run lib "tcc.obj" "-OUT:tcc.lib"
Run clang-cl @($env:CFLAGS -split ' ') libtcc.c -o tcc.obj "-DTCC_TARGET_PE" "-DTCC_TARGET_X86_64" "-O2" "-W2" "-Zi" "-MD" "-GS-" "-c" "-MT"
Run llvm-lib "tcc.obj" "-OUT:tcc.lib"
Copy-Item tcc.obj $BUN_DEPS_OUT_DIR/tcc.lib

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env bash
set -euxo pipefail
set -exo pipefail
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
mkdir -p $BUN_DEPS_OUT_DIR

0
scripts/build-zlib.ps1 Normal file → Executable file
View File

View File

@@ -1,13 +1,12 @@
#!/usr/bin/env bash
set -euxo pipefail
set -exo pipefail
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
mkdir -p $BUN_DEPS_OUT_DIR
cd $BUN_DEPS_DIR/zlib
export CFLAGS="-O3"
if [[ $(uname -s) == 'Darwin' ]]; then
export CFLAGS="$CFLAGS -mmacosx-version-min=${CMAKE_OSX_DEPLOYMENT_TARGET}"
fi
CFLAGS="${CFLAGS}" ./configure --static
make -j${CPUS}
rm -rf build
mkdir build
cd build
cmake $CMAKE_FLAGS -G Ninja -DCMAKE_BUILD_TYPE=Release ..
ninja
cp ./libz.a $BUN_DEPS_OUT_DIR/libz.a

5
scripts/build-zstd.ps1 Normal file → Executable file
View File

@@ -3,9 +3,10 @@ $ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pip
Push-Location (Join-Path $BUN_DEPS_DIR 'zstd')
try {
Remove-Item CMakeCache.txt -ErrorAction SilentlyContinue
Remove-Item CMakeCache.txt, CMakeFiles -Recurse -ErrorAction SilentlyContinue
Run cmake -S "build/cmake" @CMAKE_FLAGS -DZSTD_BUILD_STATIC=ON
# CL_SHOWINCLUDES_PREFIX is workaround for cmake bug in 3.28. .ninja_deps still needs to be deleted. Bug is fixed in 3.30
Run cmake -S "build/cmake" @CMAKE_FLAGS -DZSTD_BUILD_STATIC=ON -DCMAKE_CL_SHOWINCLUDES_PREFIX="Note: including file:"
Run cmake --build . --clean-first --config Release
Copy-Item lib/zstd_static.lib $BUN_DEPS_OUT_DIR/zstd.lib

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env bash
set -euxo pipefail
set -exo pipefail
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
mkdir -p $BUN_DEPS_OUT_DIR
@@ -7,5 +7,5 @@ mkdir -p $BUN_DEPS_OUT_DIR
cd $BUN_DEPS_DIR/zstd
rm -rf Release CMakeCache.txt CMakeFiles
cmake "${CMAKE_FLAGS[@]}" -DZSTD_BUILD_STATIC=ON -B Release -S build/cmake -G Ninja
ninja -C Release
ninja libzstd_static -C Release
cp Release/lib/libzstd.a $BUN_DEPS_OUT_DIR/libzstd.a

0
scripts/build.ps1 Normal file → Executable file
View File

1415
scripts/build.sh Executable file

File diff suppressed because it is too large Load Diff

59
scripts/buildkite-link-bun.ps1 Executable file
View File

@@ -0,0 +1,59 @@
param (
[switch] $Baseline = $False,
[switch] $Fast = $False
)
$ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash
$Target = If ($Baseline) { "windows-x64-baseline" } Else { "windows-x64" }
$Tag = "bun-$Target"
$TagSuffix = If ($Baseline) { "-Baseline" } Else { "" }
$UseBaselineBuild = If ($Baseline) { "ON" } Else { "OFF" }
$UseLto = If ($Fast) { "OFF" } Else { "ON" }
.\scripts\env.ps1 $TagSuffix
mkdir -Force build
buildkite-agent artifact download "**" build --step "${Target}-build-zig"
buildkite-agent artifact download "**" build --step "${Target}-build-cpp"
buildkite-agent artifact download "**" build --step "${Target}-build-deps"
mv -Force -ErrorAction SilentlyContinue build\build\bun-deps\* build\bun-deps
mv -Force -ErrorAction SilentlyContinue build\build\* build
Set-Location build
$CANARY_REVISION = 0
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release `
-DNO_CODEGEN=1 `
-DNO_CONFIGURE_DEPENDS=1 `
"-DCPU_TARGET=${CPU_TARGET}" `
"-DCANARY=${CANARY_REVISION}" `
-DBUN_LINK_ONLY=1 `
"-DUSE_BASELINE_BUILD=${UseBaselineBuild}" `
"-DUSE_LTO=${UseLto}" `
"-DBUN_DEPS_OUT_DIR=$(Resolve-Path bun-deps)" `
"-DBUN_CPP_ARCHIVE=$(Resolve-Path bun-cpp-objects.a)" `
"-DBUN_ZIG_OBJ_DIR=$(Resolve-Path .)" `
"$Flags"
if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" }
ninja -v
if ($LASTEXITCODE -ne 0) { throw "Link failed!" }
ls
if ($Fast) {
$Tag = "$Tag-nolto"
}
Set-Location ..
$Dist = mkdir -Force "${Tag}"
cp -r build\bun.exe "$Dist\bun.exe"
Compress-Archive -Force "$Dist" "${Dist}.zip"
$Dist = "$Dist-profile"
MkDir -Force "$Dist"
cp -r build\bun.exe "$Dist\bun.exe"
cp -r build\bun.pdb "$Dist\bun.pdb"
Compress-Archive -Force "$Dist" "$Dist.zip"
$env:BUN_GARBAGE_COLLECTOR_LEVEL = "1"
$env:BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING = "1"
.\build\bun.exe --print "JSON.stringify(require('bun:internal-for-testing').crash_handler.getFeatureData())" > .\features.json

80
scripts/buildkite-link-bun.sh Executable file
View File

@@ -0,0 +1,80 @@
#!/usr/bin/env bash
set -exo pipefail
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
export USE_LTO="${USE_LTO:-ON}"
case "$(uname -m)" in
aarch64|arm64)
export CPU_TARGET="${CPU_TARGET:-native}"
;;
*)
export CPU_TARGET="${CPU_TARGET:-haswell}"
;;
esac
export TAG=""
while [[ $# -gt 0 ]]; do
case "$1" in
--tag)
export TAG="$2"
shift
shift
;;
--fast|--no-lto)
export USE_LTO="OFF"
shift
;;
--baseline)
export CPU_TARGET="nehalem"
shift
;;
--cpu)
export CPU_TARGET="$2"
shift
shift
;;
*|-*|--*)
echo "Unknown option $1"
exit 1
;;
esac
done
if [[ -z "$TAG" ]]; then
echo "--tag <name> is required"
exit 1
fi
rm -rf release
mkdir -p release
buildkite-agent artifact download '**' release --step $TAG-build-deps
buildkite-agent artifact download '**' release --step $TAG-build-zig
buildkite-agent artifact download '**' release --step $TAG-build-cpp
cd release
cmake .. \
-GNinja \
-DCMAKE_BUILD_TYPE=Release \
-DCPU_TARGET=${CPU_TARGET} \
-DUSE_LTO=${USE_LTO} \
-DBUN_LINK_ONLY=1 \
-DBUN_ZIG_OBJ_DIR="$(pwd)/build" \
-DBUN_CPP_ARCHIVE="$(pwd)/build/bun-cpp-objects.a" \
-DBUN_DEPS_OUT_DIR="$(pwd)/build/bun-deps" \
-DNO_CONFIGURE_DEPENDS=1
ninja -v
if [[ "${USE_LTO}" == "OFF" ]]; then
TAG="${TAG}-nolto"
fi
chmod +x bun-profile bun
mkdir -p bun-$TAG-profile/ bun-$TAG/
mv bun-profile bun-$TAG-profile/bun-profile
mv bun bun-$TAG/bun
zip -r bun-$TAG-profile.zip bun-$TAG-profile
zip -r bun-$TAG.zip bun-$TAG
cd ..
mv release/bun-$TAG.zip bun-$TAG.zip
mv release/bun-$TAG-profile.zip bun-$TAG-profile.zip

0
scripts/clean-dependencies.ps1 Normal file → Executable file
View File

0
scripts/download-webkit.ps1 Normal file → Executable file
View File

Some files were not shown because too many files have changed in this diff Show More