Compare commits

..

190 Commits

Author SHA1 Message Date
Cursor Agent
fb516e7264 Implement IPC socket handling and file descriptor passing in Bun 2025-06-05 23:48:28 +00:00
Jarred Sumner
f62940bbda A couple small zig cleanup things (#20196) 2025-06-05 05:11:28 -07:00
Meghan Denny
c82345c0a0 zig: fix debug crash in uws.Response (#20202) 2025-06-04 23:43:27 -07:00
Kai Tamkun
817d0464f6 Add support for node:vm.SyntheticModule (#19878) 2025-06-04 19:41:26 -07:00
Jarred Sumner
a5bb525614 Ensure we set the socket flag in LifecycleScriptSubprocess (#20179) 2025-06-04 19:38:47 -07:00
190n
4cb7910e32 remove unnecessary explicit backing integer (#20188) 2025-06-04 16:44:55 -07:00
190n
d7970946eb Delete flaky tests from #20065 (#20189) 2025-06-04 16:44:31 -07:00
pfg
014fb6be8f test-tls-check-server-identity (#20170) 2025-06-04 16:44:15 -07:00
Meghan Denny
5c7991b707 cpp: fix compile errors caught from disallowing implicit conversion from JSValue to EncodedJSValue (#20175) 2025-06-04 14:48:35 -07:00
Jarred Sumner
da5fc817d1 Change copy to a .len += 2025-06-04 00:25:57 -07:00
Jarred Sumner
407c4e800a Revert "add support for "workspaces.nohoist" and "workspaces.hoistingLimits" (#20124)"
This reverts commit 11070b8e16.
2025-06-03 23:51:03 -07:00
Dylan Conway
11070b8e16 add support for "workspaces.nohoist" and "workspaces.hoistingLimits" (#20124) 2025-06-03 23:44:09 -07:00
Kai Tamkun
adfdaab4fd Add test for #20144 (#20171) 2025-06-03 23:41:37 -07:00
Meghan Denny
bfd7fc06c7 fix test-net-server-max-connections.js (#20034)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
Co-authored-by: nektro <5464072+nektro@users.noreply.github.com>
Co-authored-by: Ciro Spaciari <ciro.spaciari@gmail.com>
2025-06-03 22:50:08 -07:00
pfg
bd3abc5a2a Fix calc bug (#20174) 2025-06-03 22:08:51 -07:00
Ali
193193024f Fixed exit signals hanging in loops (#20164) 2025-06-03 15:07:46 -07:00
nobkd
6edc3a9900 remove audit from bun pm help (#20167) 2025-06-03 14:37:03 -07:00
Jarred Sumner
1bd44e9ce7 Fixes #18239 (#20152) 2025-06-03 13:23:12 -07:00
Jarred Sumner
c7327d62c2 bun run prettier 2025-06-03 04:17:42 -07:00
Ali
90dda8219f Fixed: radians rewritten to degrees without converting (#19848) 2025-06-03 04:06:05 -07:00
Ali
885979644d Fixed : bun:ffi new CString() ignores byteOffset argument if byteLength is not provided (#19819)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2025-06-03 04:05:04 -07:00
Joel Shepherd
13c5b0d9cb Added rapidhash algorithm (#20163) 2025-06-03 03:34:35 -07:00
Kuba Ellwart
d6e45afef9 Support Optional Message Argument in RedisClient.ping() (#20161) 2025-06-03 02:33:10 -07:00
Jarred Sumner
300aedd9cc Bump WebKit, libpas on Windows edition (#20068)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
2025-06-03 02:32:15 -07:00
Jarred Sumner
d9cf836b67 Split server.zig into more files (#20139)
Co-authored-by: Claude <noreply@anthropic.com>
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
2025-06-03 01:38:26 -07:00
Justin Yao Du
293215778f Fix a typo in bunfig docs page (extra if) (#20160) 2025-06-03 01:21:40 -07:00
Ali
95346bd919 fixed SharedArrayBuffer crashing on transfer (#20130) 2025-06-02 23:05:52 -07:00
Meghan Denny
ceaaed4848 node: more now passing tests (#20065) 2025-06-02 23:03:47 -07:00
Meghan Denny
abaa69183b write test in server.spec.ts better (#20150) 2025-06-02 23:03:05 -07:00
Meghan Denny
3e1075410b Delete test-net-allow-half-open.js
turned out to be flaky
2025-06-02 19:01:49 -07:00
190n
7a88bb0e1c add gamble.ts (#20153) 2025-06-02 18:17:05 -07:00
Meghan Denny
7a790581e0 Revert "fix test-net-bytes-stats.js" (#20154) 2025-06-02 18:15:39 -07:00
190n
d5cc530024 State CPU requirements in README (#20146) 2025-06-02 14:06:39 -07:00
Ben Grant
d7548325b1 delete event_loop/README.md 2025-06-02 11:49:09 -07:00
Jarred Sumner
d11fd94cdb Add readme to src/bun.js/event_loop 2025-06-02 03:51:46 -07:00
github-actions[bot]
4cbd040485 deps: update sqlite to 3.50.0 (#20122)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2025-06-02 03:27:02 -07:00
Jarred Sumner
773484a628 Split uSockets/uWS <> Zig bindings into many different files (#20138)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
2025-06-02 02:10:57 -07:00
Jarred Sumner
71c14fac7b Split EventLoop into many more files (#20134)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
2025-06-01 20:43:30 -07:00
Stanislaw Wozniak
b2a728e45d bug: Do not duplicate Transfer-Encoding header (#20116)
Co-authored-by: Ciro Spaciari <ciro.spaciari@gmail.com>
2025-06-01 15:54:04 -07:00
Jarred Sumner
390798c172 Fix memory leak in Bun.spawn (#20095)
Co-authored-by: Dylan Conway <dylan.conway567@gmail.com>
2025-05-31 20:06:22 -07:00
Meghan Denny
284de53f26 safety: audit and add missing exception checks to JSC::constructArray+constructEmptyArray (#20119) 2025-05-31 20:05:02 -07:00
Jarred Sumner
5a025abddf Address clang warnings on newer clang (#20054) 2025-05-31 19:44:36 -07:00
Dylan Conway
4ab4b1b131 Clean up help text for package manager commands (#20117) 2025-05-31 19:21:37 -07:00
Roman A
13ea970852 A couple grammar fixes (#20096) 2025-05-31 19:14:51 -07:00
Meghan Denny
ba78d5b2c3 ci: pass the src directory to 'zig fmt' (#20114)
Co-authored-by: nektro <5464072+nektro@users.noreply.github.com>
2025-05-31 18:52:18 -07:00
Dylan Conway
ce8767cdc8 add HTTPParser (#20049)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2025-05-31 16:21:08 -07:00
familyboat
082a9cb59c fix: missing "default" export in module "data.yml" (#20094) 2025-05-31 14:55:15 -07:00
Jarred Sumner
3c37f25b65 Avoid allocating 16 KB to read a potentially empty buffer (#20101) 2025-05-31 14:00:51 -07:00
Ali
a079743a02 fix NapiHandleScopeImpl memory leak (#20108) 2025-05-31 13:57:46 -07:00
Meghan Denny
e0852fd651 fix memory leak when pipe Bun.spawn stdio is never read repeatedly (#20102)
Co-authored-by: nektro <5464072+nektro@users.noreply.github.com>
2025-05-31 11:16:49 -07:00
Grigory
6bbd1e0685 fix(NodeValidator): make object check less strict (#19047)
Co-authored-by: graphite-app[bot] <96075541+graphite-app[bot]@users.noreply.github.com>
2025-05-30 22:10:29 -07:00
Ali
4534f6e635 fix NapiHandleScopeImpl race condition (#20093) 2025-05-30 20:33:50 -07:00
Meghan Denny
c62a7a77a3 fix assertion in JSSocketAddressDTO__create (#20063) 2025-05-30 19:33:58 -07:00
190n
ecf5ea389f Move LLDB initialization commands to make attach configuration work (#20085) 2025-05-30 19:33:03 -07:00
Jarred Sumner
010ef4d119 More comments in the socket type definition (#19410)
Co-authored-by: Alistair Smith <hi@alistair.sh>
2025-05-30 19:12:30 -07:00
Julie Saia
4d77cd53f1 Add support for catalogs in bun outdated (#20090)
Co-authored-by: Dylan Conway <dylan.conway567@gmail.com>
2025-05-30 17:23:59 -07:00
Leander Paul
3cf353b755 feat: additional jest types in bun test (#19255)
Co-authored-by: Alistair Smith <hi@alistair.sh>
2025-05-30 15:28:11 -07:00
Jarred Sumner
fd894f5a65 Fix test-child-process-server-close (#20062) 2025-05-30 00:15:26 -07:00
Jarred Sumner
a9969b7db2 Update codex-test-sync.yml 2025-05-30 00:04:39 -07:00
Jarred Sumner
27a08fca84 Update codex-test-sync.yml 2025-05-29 23:40:57 -07:00
Jarred Sumner
a398bd62a3 Add passing node tests (#20052) 2025-05-29 22:53:28 -07:00
Jarred Sumner
2aa7c59727 Delete environment.json 2025-05-29 21:57:06 -07:00
Jarred Sumner
7765b61038 Update environment.json 2025-05-29 21:46:24 -07:00
Jarred Sumner
8a06ddb1fb Update environment.json 2025-05-29 21:11:53 -07:00
Jarred Sumner
2e76e69939 Update environment.json 2025-05-29 21:08:55 -07:00
Jarred Sumner
aa404b14c4 Fix http Expect header handling (#20026)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
Co-authored-by: Ciro Spaciari <ciro.spaciari@gmail.com>
2025-05-29 20:03:32 -07:00
Jarred Sumner
a4819b41e9 Fix setSourceMapsEnabled node test (#20039)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
2025-05-29 18:04:31 -07:00
Jarred Sumner
f5bfda9699 Fix http socket encoding check (#20031)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
2025-05-29 17:05:51 -07:00
Jarred Sumner
9f5adfefe3 Add action that ensures the node test is downloaded from node's repo 2025-05-29 17:02:11 -07:00
Jarred Sumner
316c8d6c48 Add node:test:cp package.json script 2025-05-29 16:48:37 -07:00
Ashcon Partovi
da87890532 ci: Fix build image step with zig (#20023) 2025-05-29 16:07:35 -07:00
Meghan Denny
576f66c149 fix test-net-server-drop-connections.js (#19995) 2025-05-29 13:55:25 -07:00
Ashcon Partovi
cd0756c95c Revert "ci: Fix build image step with cross-compiled zig"
This reverts commit c92f3f7b72.
2025-05-29 12:44:43 -07:00
Ashcon Partovi
c92f3f7b72 ci: Fix build image step with cross-compiled zig 2025-05-29 12:43:29 -07:00
190n
f1226c9767 ci: fix machine.mjs for Windows instances (#20021)
Co-authored-by: 190n <7763597+190n@users.noreply.github.com>
2025-05-29 12:04:10 -07:00
Meghan Denny
b111e6db02 fix test-net-connect-handle-econnrefused.js (#19993) 2025-05-29 11:32:54 -07:00
Meghan Denny
ffffb634c6 fix test-net-bytes-stats.js (#20003) 2025-05-29 11:32:13 -07:00
Meghan Denny
d109183d3e fix test-net-better-error-messages-port.js (#20008) 2025-05-29 11:31:53 -07:00
Meghan Denny
14c9165d6f fix test-net-socket-local-address.js (#20010) 2025-05-29 11:31:26 -07:00
wldfngrs
c42539b0bf Fix parse segfault #18888 (#19817)
Co-authored-by: Dylan Conway <dylan.conway567@gmail.com>
2025-05-29 08:44:19 -07:00
Meghan Denny
022a567af0 tidy from 19970 (#20002) 2025-05-29 00:31:44 -07:00
Jarred Sumner
cfb8956ac5 Cursor config 2025-05-28 23:09:16 -07:00
190n
2bb36ca6b4 Fix crash initializing process stdio streams while process is overridden (#19978) 2025-05-28 22:57:59 -07:00
Jarred Sumner
24b3de1bc3 Fix net close event and add reconnect test (#19975)
Co-authored-by: Meghan Denny <meghan@bun.sh>
2025-05-28 22:27:52 -07:00
Jarred Sumner
b01ffe6da8 Fix pauseOnConnect semantics for node:net server (#19987) 2025-05-28 22:23:57 -07:00
Kai Tamkun
579f2ecd51 Add node:vm leak tests (#19947)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2025-05-28 22:23:30 -07:00
Jarred Sumner
627b0010e0 Fix Node net bytesWritten with pending strings (#19962)
Co-authored-by: Meghan Denny <meghan@bun.sh>
2025-05-28 22:21:28 -07:00
Jarred Sumner
3369e25a70 Update environment.json 2025-05-28 22:04:38 -07:00
Jarred Sumner
06a40f0b29 Configure cursor 2025-05-28 21:55:08 -07:00
Jarred Sumner
7989352b39 Add node server close test (#19972)
Co-authored-by: Meghan Denny <meghan@bun.sh>
2025-05-28 21:38:52 -07:00
Jarred Sumner
e1ab6fe36b Add net autoselectfamily default test (#19970)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
2025-05-28 21:30:22 -07:00
Jarred Sumner
14f59568cc Fix net.listen backlog arg & add Node test (#19966)
Co-authored-by: Meghan Denny <meghan@bun.sh>
2025-05-28 21:23:35 -07:00
github-actions[bot]
1855836259 deps: update c-ares to v1.34.5 (#19897)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2025-05-28 19:50:29 -07:00
Meghan Denny
c85cf136a5 test-http-get-pipeline-problem.js passes on windows (#19980) 2025-05-28 19:28:02 -07:00
Meghan Denny
4da85ac9c1 test-http2-compat-serverrequest-pipe.js passes on windows (#19981) 2025-05-28 19:27:41 -07:00
Meghan Denny
9248d81871 test-http2-trailers-after-session-close.js passes on windows (#19983) 2025-05-28 19:27:12 -07:00
Meghan Denny
ba21d6d54b test-require-long-path.js passes on windows (#19984) 2025-05-28 19:26:44 -07:00
Meghan Denny
32985591eb test-http2-pipe-named-pipe.js passes on windows (#19982) 2025-05-28 19:26:20 -07:00
Jarred Sumner
544d399980 Start splitting install.zig into a few more files (#19959)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
2025-05-28 19:25:59 -07:00
Meghan Denny
809992229f node:net rework (#18962)
Co-authored-by: nektro <5464072+nektro@users.noreply.github.com>
2025-05-28 17:04:37 -07:00
190n
9a0624bd99 Delete files used by issue triage agent (#19955) 2025-05-28 12:07:47 -07:00
Dylan Conway
ec2c2281cf bump 2025-05-28 11:51:39 -07:00
Jarred Sumner
df017990aa Implement automatic workspace folders support for Chrome DevTools (#19949) 2025-05-28 00:25:30 -07:00
pfg
bf02d04479 Don't validate cookie strings passed in the CookieMap constructor (#19945)
Co-authored-by: pfgithub <6010774+pfgithub@users.noreply.github.com>
2025-05-27 20:14:21 -07:00
Dylan Conway
5910504aeb bun pm audit -> bun audit (#19944) 2025-05-27 19:52:18 -07:00
Meghan Denny
8759527feb zsh: fix syntax error in bun audit completion 2025-05-27 19:51:18 -07:00
Jarred Sumner
7b4b299be0 Move this up even more 2025-05-27 18:22:30 -07:00
Jarred Sumner
ff8c2dcbc4 Bump WebKit again (#19943)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
2025-05-27 17:55:43 -07:00
Jarred Sumner
a275ed654b Move this code up 2025-05-27 16:57:43 -07:00
Jarred Sumner
7b164ee9de Fix async explicit resource management in browser builds (#19896)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
Co-authored-by: Dylan Conway <dylan.conway567@gmail.com>
2025-05-27 16:45:54 -07:00
Varun Narravula
fc92921a4a fix: parse JSX namespace identifiers that have numbers in them (#19912)
Co-authored-by: Michael H <git@riskymh.dev>
Co-authored-by: Dylan Conway <dylan.conway567@gmail.com>
2025-05-27 15:45:11 -07:00
Jarred Sumner
44d04968cd Add a cursor rule (#19926)
Co-authored-by: Andrew Jefferson <8148776+eastlondoner@users.noreply.github.com>
2025-05-27 15:45:01 -07:00
Ben Grant
e6ab636313 disallow bash tool in issue triage agent 2025-05-27 11:24:44 -07:00
Yechao LI
325d0b1ed6 fix: correct function type for spyon an optional function (#19240)
Co-authored-by: Alistair Smith <hi@alistair.sh>
2025-05-26 21:58:10 -07:00
Jarred Sumner
a8e4489e10 Add docs for bun pm audit (#19885)
Co-authored-by: Alistair Smith <hi@alistair.sh>
Co-authored-by: alii <25351731+alii@users.noreply.github.com>
2025-05-26 21:56:32 -07:00
Alistair Smith
31980bc151 perf_hooks.Histogram (#19920) 2025-05-26 21:18:22 -07:00
Jarred Sumner
e58df65a75 Bump WebKit (#19882) 2025-05-26 18:56:32 -07:00
Pierre
6317d6498f fix: Add missing CryptoKeyPair global type (#19921)
Co-authored-by: Alistair Smith <hi@alistair.sh>
2025-05-26 18:17:29 -07:00
Alistair Smith
9e61b70535 test-net-socket-constructor.js (#19804) 2025-05-26 13:14:42 -07:00
Alistair Smith
58c1372b50 Implements Node.js behaviour for parallel/test-tls-set-ciphers-error.js (#19443) 2025-05-26 13:13:59 -07:00
familyboat
88840dcafa doc: remove redundant word "page" (#19915) 2025-05-26 12:45:41 -07:00
Jarred Sumner
793a9752c9 Update react.md 2025-05-25 13:16:09 -07:00
Jarred Sumner
8f08e84c1e Update react.md 2025-05-25 12:56:46 -07:00
Jarred Sumner
3605531e34 Remove empty page 2025-05-25 12:09:25 -07:00
Jarred Sumner
7dc58e0ce4 Add BUN_OPTIONS env var (#19766)
Co-authored-by: claude[bot] <209825114+claude[bot]@users.noreply.github.com>
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2025-05-24 13:36:51 -07:00
Sculas
15a58cca1c fix(windows): respect NO_COLOR in filter_run (#19888) 2025-05-24 13:33:16 -07:00
Jarred Sumner
a3fdfeb924 Add more to this list 2025-05-24 00:22:15 -07:00
Seth Flynn
c024e73e6a fix(BunRequest): make clone() return a BunRequest (#19813) 2025-05-23 23:37:47 -07:00
Kai Tamkun
392212b090 node:vm compatibility (#19703) 2025-05-23 22:59:58 -07:00
Jarred Sumner
3ea6133c46 CI: Remove unused top-level decls in formatter in zig (#19879)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
Co-authored-by: graphite-app[bot] <96075541+graphite-app[bot]@users.noreply.github.com>
2025-05-23 22:49:48 -07:00
190n
5d84f8a102 delete flaky test-worker-uncaught-exception.js (#19857) 2025-05-23 22:49:09 -07:00
Alistair Smith
9e329ee605 bun pm audit (#19855) 2025-05-23 22:31:12 -07:00
Kai Tamkun
76f6574729 Fix memory leak in c_ares.zig Error.Deferred.rejectLater (#19871) 2025-05-23 20:54:50 -07:00
Jarred Sumner
50b938561a Normalize versions in bun pm view <pkg> versions like npm view does (#19870)
Co-authored-by: Dylan Conway <35280289+dylan-conway@users.noreply.github.com>
2025-05-23 18:10:56 -07:00
190n
3b75095f0c Use long timeout for bun test launch configurations (#19869) 2025-05-23 18:03:09 -07:00
Jarred Sumner
7b127c946d Fix regression from #19783 (#19837) 2025-05-23 17:49:36 -07:00
Ashcon Partovi
b9a63893fe ci: Fix permissions with gh CLI 2025-05-23 17:23:34 -07:00
Ashcon Partovi
ff1a35668f ci: Fix claude tool permissions 2025-05-23 17:17:04 -07:00
Ashcon Partovi
b36b4b2888 ci: Fix triage permissions 2025-05-23 17:13:55 -07:00
Ashcon Partovi
e7e5528632 ci: Fix triage workflow 2025-05-23 17:10:30 -07:00
Ashcon Partovi
9a5ff02420 ci: Fix anthropic auth in CI 2025-05-23 17:09:52 -07:00
Ashcon Partovi
4e9ee08a4a ci: Fix running claude in CI 2025-05-23 17:08:12 -07:00
Ashcon Partovi
e11ac9d1b8 ci: fix install claude in workflow 2025-05-23 17:06:05 -07:00
Ashcon Partovi
e9414966ca ci: Install claude for agent workflow 2025-05-23 17:01:38 -07:00
Ashcon Partovi
b2ae98865b ci: fix triage on linux 2025-05-23 16:58:30 -07:00
Ashcon Partovi
e8ed50cd9a ci: tweak how triage automation works 2025-05-23 16:56:14 -07:00
190n
9dd799d2e6 Implement Worker.getHeapSnapshot (#19706)
Co-authored-by: 190n <7763597+190n@users.noreply.github.com>
2025-05-23 16:50:13 -07:00
Meghan Denny
ba28eeece6 ci: add update-zstd.yml (#19812)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2025-05-23 16:49:37 -07:00
Meghan Denny
e9f908fcbf cmake: move SetupWebkit early-return so that WEBKIT_NAME is always printed (#19716) 2025-05-23 15:49:57 -07:00
Ashcon Partovi
654472f217 ci: fix triage automation trigger, again 2025-05-23 15:46:39 -07:00
Ashcon Partovi
5dcf99424c ci: fix triage automation trigger 2025-05-23 15:45:34 -07:00
Ashcon Partovi
ae91711010 ci: Fix triage workflow 2025-05-23 15:42:26 -07:00
Ashcon Partovi
ca6ba0fa2d ci: add triage automation (#19873) 2025-05-23 15:38:15 -07:00
Jarred Sumner
3195df8796 Remove leading period 2025-05-22 23:55:57 -07:00
Jarred Sumner
9d1eace981 Add bun pm view command (#19841)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
2025-05-22 23:51:31 -07:00
Dylan Conway
8e80afbce1 Replace string runtime flags with enum (#19827) 2025-05-22 22:36:46 -07:00
190n
efb6b823c9 Strongly type GlobalObject::processObject() (#19826) 2025-05-22 21:48:48 -07:00
Jarred Sumner
6d348fa759 Add glob sources workflow (#19860) 2025-05-22 21:48:22 -07:00
Jarred Sumner
69be630aea WebKit Upgrade (#19839)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
Co-authored-by: Ben Grant <ben@bun.sh>
Co-authored-by: 190n <7763597+190n@users.noreply.github.com>
2025-05-22 21:12:43 -07:00
Jarred Sumner
bca833ad59 Split lockfile.zig into a more logical directory structure (#19858) 2025-05-22 21:11:54 -07:00
Ciro Spaciari
ef9ea8ae1c fix(fetch) ignore trailers and add trailer tests (#19854) 2025-05-22 20:17:21 -07:00
Dylan Conway
a844957eb3 Use operationMathPow for parser constant folding (#19853) 2025-05-22 20:16:37 -07:00
Jarred Sumner
573927c4bf Add a cursor rule 2025-05-22 12:04:10 -07:00
190n
3e97c1caf3 restore bun bd and make it quiet (#19831) 2025-05-22 00:40:48 -07:00
Meghan Denny
b4450db807 Bump 2025-05-21 16:58:52 -07:00
Alistair Smith
6a363a38da node:net compat: Invalid port test for .listen (#19768) 2025-05-21 11:56:17 -07:00
Jarred Sumner
ffa286ef70 Update docs on workspaces and catalogs (#19815) 2025-05-21 11:38:37 -07:00
Seokho Song, dave@modusign
2fc8785868 Add x25519 elliptic curve cryptography to webcrypto (#19674) 2025-05-21 11:23:23 -07:00
Dylan Conway
8ddb92085b update bun.lock types for catalog(s) (#19814)
Co-authored-by: RiskyMH <git@riskymh.dev>
2025-05-21 00:22:18 -07:00
Jarred Sumner
4ca83be84f Add Zstd decompression to HTTP client (#19800)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
Co-authored-by: Dylan Conway <35280289+dylan-conway@users.noreply.github.com>
2025-05-20 23:26:47 -07:00
Jarred Sumner
8aae534270 Fix Node browser fallbacks to have util.inherit and other size improvements (#19783)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
2025-05-20 23:25:52 -07:00
Dylan Conway
98ee30eccf Implement catalogs in bun install (#19809)
Co-authored-by: graphite-app[bot] <96075541+graphite-app[bot]@users.noreply.github.com>
Co-authored-by: claude[bot] <209825114+claude[bot]@users.noreply.github.com>
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2025-05-20 23:03:21 -07:00
Jarred Sumner
562a65037d Bump zstd version (#19801)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
2025-05-20 21:32:18 -07:00
pfg
beb1db967b Fix numeric header in node http server (#19811) 2025-05-20 21:32:07 -07:00
Jarred Sumner
0efbb29581 Do not use TCP cork in proxied HTTPS (#19794)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
2025-05-20 21:27:17 -07:00
Ciro Spaciari
0e883c935c fix(install/fetch) proper handle proxy (#19771)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
Co-authored-by: cirospaciari <6379399+cirospaciari@users.noreply.github.com>
Co-authored-by: Meghan Denny <meghan@bun.sh>
2025-05-20 21:11:22 -07:00
Jarred Sumner
497360d543 Fix BroadcastChannel.unref() return value (#19810)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
Co-authored-by: claude[bot] <209825114+claude[bot]@users.noreply.github.com>
2025-05-20 21:09:16 -07:00
190n
e23491391b bun run prettier (#19807)
Co-authored-by: 190n <7763597+190n@users.noreply.github.com>
2025-05-20 20:01:38 -07:00
190n
259bf47abd Add sourceMap to launch.json so lldb can find WebKit code (#19263) 2025-05-20 16:50:47 -07:00
190n
d1ac52da2c ci: use ARM EC2 instances for build-zig (#19781) 2025-05-20 12:41:06 -07:00
Ben Grant
1ebec90d6e Revert "Add test from #18287 (#19775)"
This reverts commit f1504c4265.
2025-05-20 12:22:01 -07:00
190n
f1504c4265 Add test from #18287 (#19775) 2025-05-20 11:56:30 -07:00
Ashcon Partovi
21f238a827 cmake: Move sources to their own folder (#19776) 2025-05-20 10:53:57 -07:00
Dylan Conway
33be08bde8 Fix RuntimeError.from return value (#19777)
Co-authored-by: claude[bot] <209825114+claude[bot]@users.noreply.github.com>
Co-authored-by: dylan-conway <dylan-conway@users.noreply.github.com>
2025-05-19 17:05:10 -07:00
Braden Everson
67b64c3334 Update TextDecoder's constructor to Handle Undefined (#19708)
Co-authored-by: Dylan Conway <35280289+dylan-conway@users.noreply.github.com>
2025-05-19 16:44:57 -07:00
Jarred Sumner
bfd12eeeba [bun install] Do not prefetch dns for npm registry if proxy (#19749) 2025-05-19 12:35:16 -07:00
Jarred Sumner
004ee11bed [internal builtins] Small typescript fix 2025-05-19 12:18:50 -07:00
Jarred Sumner
457c15e424 [bun install] Fix race condition when error occurs while extracting tarballs (#19751) 2025-05-19 11:55:34 -07:00
Jarred Sumner
815182799e [bun install] Don't save manifest cache when --no-cache is passed (#19752) 2025-05-19 11:39:39 -07:00
Jarred Sumner
a5cb42c407 Add Claude Code GitHub Workflow (#19769) 2025-05-19 11:28:20 -07:00
1100 changed files with 73159 additions and 30611 deletions

78
.agent/agent.mjs Normal file
View File

@@ -0,0 +1,78 @@
import { spawnSync } from "node:child_process";
import { readFileSync, existsSync } from "node:fs";
import { parseArgs } from "node:util";
const { positionals, values } = parseArgs({
allowPositionals: true,
options: {
help: {
type: "boolean",
short: "h",
default: false,
},
interactive: {
type: "boolean",
short: "i",
default: false,
},
},
});
if (values.help || positionals.length === 0) {
console.log("Usage: node agent.mjs <prompt_name> [extra_args...]");
console.log("Example: node agent.mjs triage fix bug in authentication");
console.log("Options:");
console.log(" -h, --help Show this help message");
console.log(" -i, --interactive Run in interactive mode");
process.exit(0);
}
const promptName = positionals[0].toUpperCase();
const promptFile = `.agent/${promptName}.md`;
const extraArgs = positionals.slice(1);
if (!existsSync(promptFile)) {
console.error(`Error: Prompt file "${promptFile}" not found`);
console.error(`Available prompts should be named like: .agent/triage.md, .agent/debug.md, etc.`);
process.exit(1);
}
try {
let prompt = readFileSync(promptFile, "utf-8");
const githubEnvs = Object.entries(process.env)
.filter(([key]) => key.startsWith("GITHUB_"))
.sort(([a], [b]) => a.localeCompare(b));
if (githubEnvs.length > 0) {
const githubContext = `## GitHub Environment\n\n${githubEnvs
.map(([key, value]) => `**${key}**: \`${value}\``)
.join("\n")}\n\n---\n\n`;
prompt = githubContext + prompt;
}
if (extraArgs.length > 0) {
const extraArgsContext = `\n\n## Additional Arguments\n\n${extraArgs.join(" ")}\n\n---\n\n`;
prompt = prompt + extraArgsContext;
}
const claudeArgs = [prompt, "--allowedTools=Edit,Write,Replace,Search", "--output-format=json"];
if (!values.interactive) {
claudeArgs.unshift("--print");
}
const { status, error } = spawnSync("claude", claudeArgs, {
stdio: "inherit",
encoding: "utf-8",
});
if (error) {
console.error("Error running claude:", error);
process.exit(1);
}
process.exit(status || 0);
} catch (error) {
console.error(`Error reading prompt file "${promptFile}":`, error);
process.exit(1);
}

View File

@@ -228,13 +228,7 @@ function getRetry(limit = 0) {
manual: {
permit_on_passed: true,
},
automatic: [
{ exit_status: 1, limit },
{ exit_status: -1, limit: 1 },
{ exit_status: 255, limit: 1 },
{ signal_reason: "cancel", limit: 1 },
{ signal_reason: "agent_stop", limit: 1 },
],
automatic: false,
};
}
@@ -315,6 +309,19 @@ function getCppAgent(platform, options) {
});
}
/**
* @returns {Platform}
*/
function getZigPlatform() {
return {
os: "linux",
arch: "aarch64",
abi: "musl",
distro: "alpine",
release: "3.21",
};
}
/**
* @param {Platform} platform
* @param {PipelineOptions} options
@@ -328,21 +335,9 @@ function getZigAgent(platform, options) {
// queue: "build-zig",
// };
return getEc2Agent(
{
os: "linux",
arch: "x64",
abi: "musl",
distro: "alpine",
release: "3.21",
},
options,
{
instanceType: "c7i.2xlarge",
cpuCount: 4,
threadsPerCore: 1,
},
);
return getEc2Agent(getZigPlatform(), options, {
instanceType: "r8g.large",
});
}
/**
@@ -455,7 +450,7 @@ function getBuildCppStep(platform, options) {
BUN_CPP_ONLY: "ON",
...getBuildEnv(platform, options),
},
// We used to build the C++ dependencies and bun in seperate steps.
// We used to build the C++ dependencies and bun in separate steps.
// However, as long as the zig build takes longer than both sequentially,
// it's cheaper to run them in the same step. Can be revisited in the future.
command: [`${command} --target bun`, `${command} --target dependencies`],
@@ -574,7 +569,7 @@ function getTestBunStep(platform, options, testOptions = {}) {
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
parallelism: unifiedTests ? undefined : os === "darwin" ? 2 : 10,
timeout_in_minutes: profile === "asan" ? 90 : 30,
timeout_in_minutes: profile === "asan" ? 45 : 30,
command:
os === "windows"
? `node .\\scripts\\runner.node.mjs ${args.join(" ")}`
@@ -927,7 +922,7 @@ function getOptionsStep() {
{
key: "unified-builds",
select: "Do you want to build each platform in a single step?",
hint: "If true, builds will not be split into seperate steps (this will likely slow down the build)",
hint: "If true, builds will not be split into separate steps (this will likely slow down the build)",
required: false,
default: "false",
options: booleanOptions,
@@ -935,7 +930,7 @@ function getOptionsStep() {
{
key: "unified-tests",
select: "Do you want to run tests in a single step?",
hint: "If true, tests will not be split into seperate steps (this will be very slow)",
hint: "If true, tests will not be split into separate steps (this will be very slow)",
required: false,
default: "false",
options: booleanOptions,
@@ -1113,6 +1108,11 @@ async function getPipeline(options = {}) {
steps.push(
...relevantBuildPlatforms.map(target => {
const imageKey = getImageKey(target);
const zigImageKey = getImageKey(getZigPlatform());
const dependsOn = imagePlatforms.has(zigImageKey) ? [`${zigImageKey}-build-image`] : [];
if (imagePlatforms.has(imageKey)) {
dependsOn.push(`${imageKey}-build-image`);
}
return getStepWithDependsOn(
{
@@ -1122,7 +1122,7 @@ async function getPipeline(options = {}) {
? [getBuildBunStep(target, options)]
: [getBuildCppStep(target, options), getBuildZigStep(target, options), getLinkBunStep(target, options)],
},
imagePlatforms.has(imageKey) ? `${imageKey}-build-image` : undefined,
...dependsOn,
);
}),
);

View File

@@ -1,27 +1,13 @@
---
description: How to build Bun
globs:
globs:
alwaysApply: false
---
# How to build Bun
## CMake
Run:
Bun is built using CMake, which you can find in `CMakeLists.txt` and in the `cmake/` directory.
* `CMakeLists.txt`
* `cmake/`
* `Globals.cmake` - macros and functions used by all the other files
* `Options.cmake` - build options for configuring the build (e.g. debug/release mode)
* `CompilerFlags.cmake` - compiler and linker flags used by all the targets
* `tools/` - setup scripts for various build tools (e.g. llvm, zig, webkit, rust, etc.)
* `targets/` - targets for bun and its dependencies (e.g. brotli, boringssl, libuv, etc.)
## How to
There are `package.json` scripts that make it easy to build Bun without calling CMake directly, for example:
```sh
bun run build # builds a debug build: `build/debug/bun-debug`
bun run build:release # builds a release build: `build/release/bun`
bun run build:assert # builds a release build with debug assertions: `build/assert/bun`
```bash
bun bd
```

View File

@@ -91,7 +91,7 @@ devTest("html file is watched", {
`files` holds the initial state, and the callback runs with the server running. `dev.fetch()` runs HTTP requests, while `dev.client()` opens a browser instance to the code.
Functions `dev.write` and `dev.patch` and `dev.delete` mutate the filesystem. Do not use `node:fs` APIs, as the dev server ones are hooked to wait for hot-reload, and all connected clients to recieve changes.
Functions `dev.write` and `dev.patch` and `dev.delete` mutate the filesystem. Do not use `node:fs` APIs, as the dev server ones are hooked to wait for hot-reload, and all connected clients to receive changes.
When a change performs a hard-reload, that must be explicitly annotated with `expectReload`. This tells `client-fixture.mjs` that the test is meant to reload the page once; All other hard reloads automatically fail the test.

View File

@@ -0,0 +1,203 @@
# Registering Functions, Objects, and Modules in Bun
This guide documents the process of adding new functionality to the Bun global object and runtime.
## Overview
Bun's architecture exposes functionality to JavaScript through a set of carefully registered functions, objects, and modules. Most core functionality is implemented in Zig, with JavaScript bindings that make these features accessible to users.
There are several key ways to expose functionality in Bun:
1. **Global Functions**: Direct methods on the `Bun` object (e.g., `Bun.serve()`)
2. **Getter Properties**: Lazily initialized properties on the `Bun` object (e.g., `Bun.sqlite`)
3. **Constructor Classes**: Classes available through the `Bun` object (e.g., `Bun.ValkeyClient`)
4. **Global Modules**: Modules that can be imported directly (e.g., `import {X} from "bun:*"`)
## The Registration Process
Adding new functionality to Bun involves several coordinated steps across multiple files:
### 1. Implement the Core Functionality in Zig
First, implement your feature in Zig, typically in its own directory in `src/`. Examples:
- `src/valkey/` for Redis/Valkey client
- `src/semver/` for SemVer functionality
- `src/smtp/` for SMTP client
### 2. Create JavaScript Bindings
Create bindings that expose your Zig functionality to JavaScript:
- Create a class definition file (e.g., `js_bindings.classes.ts`) to define the JavaScript interface
- Implement `JSYourFeature` struct in a file like `js_your_feature.zig`
Example from a class definition file:
```typescript
// Example from a .classes.ts file
import { define } from "../../codegen/class-definitions";
export default [
define({
name: "YourFeature",
construct: true,
finalize: true,
hasPendingActivity: true,
memoryCost: true,
klass: {},
JSType: "0b11101110",
proto: {
yourMethod: {
fn: "yourZigMethod",
length: 1,
},
property: {
getter: "getProperty",
},
},
values: ["cachedValues"],
}),
];
```
### 3. Register with BunObject in `src/bun.js/bindings/BunObject+exports.h`
Add an entry to the `FOR_EACH_GETTER` macro:
```c
// In BunObject+exports.h
#define FOR_EACH_GETTER(macro) \
macro(CSRF) \
macro(CryptoHasher) \
... \
macro(YourFeature) \
```
### 4. Create a Getter Function in `src/bun.js/api/BunObject.zig`
Implement a getter function in `BunObject.zig` that returns your feature:
```zig
// In BunObject.zig
pub const YourFeature = toJSGetter(Bun.getYourFeatureConstructor);
// In the exportAll() function:
@export(&BunObject.YourFeature, .{ .name = getterName("YourFeature") });
```
### 5. Implement the Getter Function in a Relevant Zig File
Implement the function that creates your object:
```zig
// In your main module file (e.g., src/your_feature/your_feature.zig)
pub fn getYourFeatureConstructor(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.JSValue {
return JSC.API.YourFeature.getConstructor(globalThis);
}
```
### 6. Add to Build System
Ensure your files are included in the build system by adding them to the appropriate targets.
## Example: Adding a New Module
Here's a comprehensive example of adding a hypothetical SMTP module:
1. Create implementation files in `src/smtp/`:
- `index.zig`: Main entry point that exports everything
- `SmtpClient.zig`: Core SMTP client implementation
- `js_smtp.zig`: JavaScript bindings
- `js_bindings.classes.ts`: Class definition
2. Define your JS class in `js_bindings.classes.ts`:
```typescript
import { define } from "../../codegen/class-definitions";
export default [
define({
name: "EmailClient",
construct: true,
finalize: true,
hasPendingActivity: true,
configurable: false,
memoryCost: true,
klass: {},
JSType: "0b11101110",
proto: {
send: {
fn: "send",
length: 1,
},
verify: {
fn: "verify",
length: 0,
},
close: {
fn: "close",
length: 0,
},
},
values: ["connectionPromise"],
}),
];
```
3. Add getter to `BunObject+exports.h`:
```c
#define FOR_EACH_GETTER(macro) \
macro(CSRF) \
... \
macro(SMTP) \
```
4. Add getter function to `BunObject.zig`:
```zig
pub const SMTP = toJSGetter(Bun.getSmtpConstructor);
// In exportAll:
@export(&BunObject.SMTP, .{ .name = getterName("SMTP") });
```
5. Implement getter in your module:
```zig
pub fn getSmtpConstructor(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.JSValue {
return JSC.API.JSEmailClient.getConstructor(globalThis);
}
```
## Best Practices
1. **Follow Naming Conventions**: Align your naming with existing patterns
2. **Reference Existing Modules**: Study similar modules like Valkey or S3Client for guidance
3. **Memory Management**: Be careful with memory management and reference counting
4. **Error Handling**: Use `bun.JSError!JSValue` for proper error propagation
5. **Documentation**: Add JSDoc comments to your JavaScript bindings
6. **Testing**: Add tests for your new functionality
## Common Gotchas
- Be sure to handle reference counting properly with `ref()`/`deref()`
- Always implement proper cleanup in `deinit()` and `finalize()`
- For network operations, manage socket lifetimes correctly
- Use `JSC.Codegen` correctly to generate necessary binding code
## Related Files
- `src/bun.js/bindings/BunObject+exports.h`: Registration of getters and functions
- `src/bun.js/api/BunObject.zig`: Implementation of getters and object creation
- `src/bun.js/api/BunObject.classes.ts`: Class definitions
- `.cursor/rules/zig-javascriptcore-classes.mdc`: More details on class bindings
## Additional Resources
For more detailed information on specific topics:
- See `zig-javascriptcore-classes.mdc` for details on creating JS class bindings
- Review existing modules like `valkey`, `sqlite`, or `s3` for real-world examples

View File

@@ -11,10 +11,10 @@ You'll find all of Bun's tests in the `test/` directory.
* `test/`
* `cli/` - CLI command tests, like `bun install` or `bun init`
* `js/` - JavaScript & TypeScript tests
* `bun/` - `Bun` APIs tests, seperated by category, for example: `glob/` for `Bun.Glob` tests
* `node/` - Node.js module tests, seperated by module, for example: `assert/` for `node:assert` tests
* `bun/` - `Bun` APIs tests, separated by category, for example: `glob/` for `Bun.Glob` tests
* `node/` - Node.js module tests, separated by module, for example: `assert/` for `node:assert` tests
* `test/` - Vendored Node.js tests, taken from the Node.js repository (does not conform to Bun's test style)
* `web/` - Web API tests, seperated by category, for example: `fetch/` for `Request` and `Response` tests
* `web/` - Web API tests, separated by category, for example: `fetch/` for `Request` and `Response` tests
* `third_party/` - npm package tests, to validate that basic usage works in Bun
* `napi/` - N-API tests
* `v8/` - V8 C++ API tests

18
.github/CODEOWNERS vendored
View File

@@ -1,18 +1,18 @@
# Project
.github/CODEOWNERS @Jarred-Sumner
/.github/CODEOWNERS @Jarred-Sumner
# Build system
CMakeLists.txt @Electroid
cmake/ @Electroid
scripts/ @Electroid
/CMakeLists.txt @Electroid
/cmake/*.cmake @Electroid
/scripts/ @Electroid
# CI
.buildkite/ @Electroid
.github/workflows/ @Electroid
/.buildkite/ @Electroid
/.github/workflows/ @Electroid
# Debugger protocol
packages/bun-inspector-protocol/ @Electroid
packages/bun-debug-adapter-protocol/ @Electroid
/packages/bun-inspector-protocol/ @Electroid
/packages/bun-debug-adapter-protocol/ @Electroid
# Tests
test/expectations.txt @Jarred-Sumner
/test/expectations.txt @Jarred-Sumner

35
.github/workflows/claude.yml vendored Normal file
View File

@@ -0,0 +1,35 @@
name: Claude Code
on:
issue_comment:
types: [created]
pull_request_review_comment:
types: [created]
issues:
types: [opened, assigned]
pull_request_review:
types: [submitted]
jobs:
claude:
if: |
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
(github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
(github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude')))
runs-on: ubuntu-latest
permissions:
contents: read
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Run Claude Code
id: claude
uses: anthropics/claude-code-action@beta
with:
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}

58
.github/workflows/codex-test-sync.yml vendored Normal file
View File

@@ -0,0 +1,58 @@
name: Codex Test Sync
on:
pull_request:
types: [labeled, opened]
env:
BUN_VERSION: "1.2.15"
jobs:
sync-node-tests:
runs-on: ubuntu-latest
if: |
(github.event.action == 'labeled' && github.event.label.name == 'codex') ||
(github.event.action == 'opened' && contains(github.event.pull_request.labels.*.name, 'codex')) ||
contains(github.head_ref, 'codex')
permissions:
contents: write
pull-requests: write
steps:
- name: Checkout
uses: actions/checkout@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
fetch-depth: 0
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@v44
with:
files: |
test/js/node/test/parallel/**/*.{js,mjs,ts}
test/js/node/test/sequential/**/*.{js,mjs,ts}
- name: Sync tests
if: steps.changed-files.outputs.any_changed == 'true'
shell: bash
run: |
echo "Changed test files:"
echo "${{ steps.changed-files.outputs.all_changed_files }}"
# Process each changed test file
for file in ${{ steps.changed-files.outputs.all_changed_files }}; do
# Extract test name from file path
test_name=$(basename "$file" | sed 's/\.[^.]*$//')
echo "Syncing test: $test_name"
bun node:test:cp "$test_name"
done
- name: Commit changes
uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: "Sync Node.js tests with upstream"

View File

@@ -44,7 +44,8 @@ jobs:
version: 0.14.0
- name: Zig Format
run: |
zig fmt src/**.zig
bun scripts/zig-remove-unreferenced-top-level-decls.ts src/
zig fmt src
- name: Commit
uses: stefanzweifel/git-auto-commit-action@v5
with:

41
.github/workflows/glob-sources.yml vendored Normal file
View File

@@ -0,0 +1,41 @@
name: Glob Sources
permissions:
contents: write
on:
workflow_call:
workflow_dispatch:
pull_request:
env:
BUN_VERSION: "1.2.11"
jobs:
glob-sources:
name: Glob Sources
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Configure Git
run: |
git config --global core.autocrlf true
git config --global core.ignorecase true
git config --global core.precomposeUnicode true
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Setup Dependencies
run: |
bun install
- name: Glob sources
run: bun scripts/glob-sources.mjs
- name: Commit
uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: "`bun scripts/glob-sources.mjs`"

View File

@@ -50,12 +50,12 @@ jobs:
exit 1
fi
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/ref/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
LATEST_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
exit 1

View File

@@ -50,7 +50,7 @@ jobs:
exit 1
fi
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/libarchive/libarchive/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/libarchive/libarchive/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1

View File

@@ -50,7 +50,7 @@ jobs:
exit 1
fi
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/ebiggers/libdeflate/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/ebiggers/libdeflate/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1

View File

@@ -50,7 +50,7 @@ jobs:
exit 1
fi
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1

View File

@@ -50,7 +50,7 @@ jobs:
exit 1
fi
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1

99
.github/workflows/update-zstd.yml vendored Normal file
View File

@@ -0,0 +1,99 @@
name: Update zstd
on:
schedule:
- cron: "0 1 * * 0"
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check zstd version
id: check-version
run: |
set -euo pipefail
# Extract the commit hash from the line after COMMIT
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildZstd.cmake)
if [ -z "$CURRENT_VERSION" ]; then
echo "Error: Could not find COMMIT line in BuildZstd.cmake"
exit 1
fi
# Validate that it looks like a git hash
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid git hash format in BuildZstd.cmake"
echo "Found: $CURRENT_VERSION"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/facebook/zstd/releases/latest)
if [ -z "$LATEST_RELEASE" ]; then
echo "Error: Failed to fetch latest release from GitHub API"
exit 1
fi
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
echo "Error: Could not extract tag name from GitHub API response"
exit 1
fi
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/facebook/zstd/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/facebook/zstd/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
exit 1
fi
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid SHA format received from GitHub"
echo "Found: $LATEST_SHA"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
- name: Update version if needed
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
run: |
set -euo pipefail
# Handle multi-line format where COMMIT and its value are on separate lines
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildZstd.cmake
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |
cmake/targets/BuildZstd.cmake
commit-message: "deps: update zstd to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update zstd to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-zstd-${{ github.run_number }}
body: |
## What does this PR do?
Updates zstd to version ${{ steps.check-version.outputs.tag }}
Compare: https://github.com/facebook/zstd/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-zstd.yml)

View File

@@ -1,16 +1 @@
# Tell LLDB what to do when the debugged process receives SIGPWR: pass it through to the process
# (-p), but do not stop the process (-s) or notify the user (-n).
#
# JSC's garbage collector sends this signal (as configured by Bun WebKit in
# Thread::initializePlatformThreading() in ThreadingPOSIX.cpp) to the JS thread to suspend or resume
# it. So stopping the process would just create noise when debugging any long-running script.
process handle -p true -s false -n false SIGPWR
command script import misctools/lldb/lldb_pretty_printers.py
type category enable zig.lang
type category enable zig.std
command script import misctools/lldb/lldb_webkit.py
command script delete btjs
command alias btjs p {printf("gathering btjs trace...\n");printf("%s\n", (char*)dumpBtjsTrace())}
command source -C -s true -e true misctools/lldb/init.lldb

327
.vscode/launch.json generated vendored
View File

@@ -5,6 +5,9 @@
// - FORCE_COLOR=1 forces colors in the terminal
// - "${workspaceFolder}/test" is the cwd for `bun test` so it matches CI, we should fix this later
// - "cppvsdbg" is used instead of "lldb" on Windows, because "lldb" is too slow
// - Seeing WebKit files requires `vendor/WebKit` to exist and have code from the right commit.
// Run `bun sync-webkit-source` to ensure that folder is at the right commit. If you haven't
// cloned it at all, that script will suggest how.
"version": "0.2.0",
"configurations": [
// bun test [file]
@@ -13,7 +16,7 @@
"request": "launch",
"name": "bun test [file]",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${file}"],
"args": ["test", "--timeout=3600000", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -21,14 +24,21 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] --only",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "--only", "${file}"],
"args": ["test", "--timeout=3600000", "--only", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -36,20 +46,35 @@
"BUN_DEBUG_jest": "1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
"name": "Attach",
"request": "attach",
"pid": "${command:pickMyProcess}",
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] (fast)",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${file}"],
"args": ["test", "--timeout=3600000", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -57,14 +82,21 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] (verbose)",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${file}"],
"args": ["test", "--timeout=3600000", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "0",
@@ -72,14 +104,21 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] --watch",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "--watch", "${file}"],
"args": ["test", "--timeout=3600000", "--watch", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -87,14 +126,21 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] --hot",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "--hot", "${file}"],
"args": ["test", "--timeout=3600000", "--hot", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -102,14 +148,21 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] --inspect",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${file}"],
"args": ["test", "--timeout=3600000", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -118,7 +171,14 @@
"BUN_INSPECT": "ws://localhost:0/?wait=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -130,7 +190,7 @@
"request": "launch",
"name": "bun test [file] --inspect-brk",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${file}"],
"args": ["test", "--timeout=3600000", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -139,7 +199,14 @@
"BUN_INSPECT": "ws://localhost:0/?break=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -160,7 +227,14 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
@@ -178,7 +252,14 @@
"GOMAXPROCS": "1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
@@ -192,7 +273,14 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
@@ -202,14 +290,18 @@
"args": ["run", "--watch", "${file}"],
"cwd": "${fileDirname}",
"env": {
// "BUN_DEBUG_DEBUGGER": "1",
// "BUN_DEBUG_INTERNAL_DEBUGGER": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
// "BUN_INSPECT": "ws+unix:///var/folders/jk/8fzl9l5119598vsqrmphsw7m0000gn/T/tl15npi7qtf.sock?report=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
@@ -223,7 +315,14 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
@@ -239,7 +338,14 @@
"BUN_INSPECT": "ws://localhost:0/?wait=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -260,7 +366,14 @@
"BUN_INSPECT": "ws://localhost:0/?break=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -273,7 +386,7 @@
"request": "launch",
"name": "bun test [...]",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${input:testName}"],
"args": ["test", "--timeout=3600000", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -281,14 +394,21 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [...] (fast)",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${input:testName}"],
"args": ["test", "--timeout=3600000", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -296,14 +416,21 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [...] (verbose)",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${input:testName}"],
"args": ["test", "--timeout=3600000", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -311,14 +438,21 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [...] --watch",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "--watch", "${input:testName}"],
"args": ["test", "--timeout=3600000", "--watch", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -326,14 +460,21 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [...] --hot",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "--hot", "${input:testName}"],
"args": ["test", "--timeout=3600000", "--hot", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -341,14 +482,21 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [...] --inspect",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${input:testName}"],
"args": ["test", "--timeout=3600000", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -357,7 +505,14 @@
"BUN_INSPECT": "ws://localhost:0/?wait=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -369,7 +524,7 @@
"request": "launch",
"name": "bun test [...] --inspect-brk",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${input:testName}"],
"args": ["test", "--timeout=3600000", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -378,7 +533,14 @@
"BUN_INSPECT": "ws://localhost:0/?break=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -398,7 +560,14 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
// bun test [*]
{
@@ -413,7 +582,14 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
@@ -427,7 +603,14 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
@@ -442,7 +625,14 @@
"BUN_INSPECT": "ws://localhost:0/",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -461,7 +651,14 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
@@ -475,7 +672,14 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
// Windows: bun test [file]
{
@@ -486,7 +690,7 @@
"request": "launch",
"name": "Windows: bun test [file]",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${file}"],
"args": ["test", "--timeout=3600000", "${file}"],
"cwd": "${workspaceFolder}",
"environment": [
{
@@ -511,7 +715,7 @@
"request": "launch",
"name": "Windows: bun test --only [file]",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "--only", "${file}"],
"args": ["test", "--timeout=3600000", "--only", "${file}"],
"cwd": "${workspaceFolder}",
"environment": [
{
@@ -536,7 +740,7 @@
"request": "launch",
"name": "Windows: bun test [file] (fast)",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${file}"],
"args": ["test", "--timeout=3600000", "${file}"],
"cwd": "${workspaceFolder}",
"environment": [
{
@@ -561,7 +765,7 @@
"request": "launch",
"name": "Windows: bun test [file] (verbose)",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${file}"],
"args": ["test", "--timeout=3600000", "${file}"],
"cwd": "${workspaceFolder}",
"environment": [
{
@@ -586,7 +790,7 @@
"request": "launch",
"name": "Windows: bun test [file] --inspect",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${file}"],
"args": ["test", "--timeout=3600000", "${file}"],
"cwd": "${workspaceFolder}",
"environment": [
{
@@ -620,7 +824,7 @@
"request": "launch",
"name": "Windows: bun test [file] --inspect-brk",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${file}"],
"args": ["test", "--timeout=3600000", "${file}"],
"cwd": "${workspaceFolder}",
"environment": [
{
@@ -787,7 +991,7 @@
"request": "launch",
"name": "Windows: bun test [...]",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${input:testName}"],
"args": ["test", "--timeout=3600000", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
@@ -812,7 +1016,7 @@
"request": "launch",
"name": "Windows: bun test [...] (fast)",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${input:testName}"],
"args": ["test", "--timeout=3600000", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
@@ -837,7 +1041,7 @@
"request": "launch",
"name": "Windows: bun test [...] (verbose)",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${input:testName}"],
"args": ["test", "--timeout=3600000", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
@@ -862,7 +1066,7 @@
"request": "launch",
"name": "Windows: bun test [...] --watch",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "--watch", "${input:testName}"],
"args": ["test", "--timeout=3600000", "--watch", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
@@ -887,7 +1091,7 @@
"request": "launch",
"name": "Windows: bun test [...] --hot",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "--hot", "${input:testName}"],
"args": ["test", "--timeout=3600000", "--hot", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
@@ -912,7 +1116,7 @@
"request": "launch",
"name": "Windows: bun test [...] --inspect",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${input:testName}"],
"args": ["test", "--timeout=3600000", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
@@ -946,7 +1150,7 @@
"request": "launch",
"name": "Windows: bun test [...] --inspect-brk",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${input:testName}"],
"args": ["test", "--timeout=3600000", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
@@ -1133,6 +1337,11 @@
"handle SIGPWR nostop noprint pass",
"source ${workspaceFolder}/misctools/gdb/std_gdb_pretty_printers.py",
"source ${workspaceFolder}/misctools/gdb/zig_gdb_pretty_printers.py",
"set substitute-path /webkitbuild/vendor/WebKit ${workspaceFolder}/vendor/WebKit",
"set substitute-path /webkitbuild/.WTF/Headers ${workspaceFolder}/vendor/WebKit/Source/WTF",
// uncomment if you like
// "set disassembly-flavor intel",
"set print asm-demangle",
],
},
],

View File

@@ -1,36 +1,36 @@
## bun tests
**IMPORTANT**: use the `bun agent` command instead of the `bun` command. For example:
**IMPORTANT**: use the `bun bd` command instead of the `bun` command. For example:
✅ Good
```sh
bun agent test internal/ban-words.test.ts
bun agent ./foo.ts
bun bd test internal/ban-words.test.ts
bun bd ./foo.ts
```
The `bun agent` command runs the DEBUG build. If you forget to run the debug build, your changes will not be reflected..
The `bun bd` command runs the DEBUG build. If you forget to run the debug build, your changes will not be reflected..
### Run a file
To run a file, you can use the `bun agent <file-path>` command.
To run a file, you can use the `bun bd <file-path>` command.
```sh
bun agent ./foo.ts
bun bd ./foo.ts
```
### Run tests
To run a single test, you need to use the `bun agent test <test-name>` command.
To run a single test, you need to use the `bun bd test <test-name>` command.
```sh
bun agent test internal/ban-words.test.ts
bun bd test internal/ban-words.test.ts
```
You must ALWAYS make sure to pass a file path to the `bun agent test <file-path>` command. DO NOT try to run ALL the tests at once unless you're in a specific subdirectory.
You must ALWAYS make sure to pass a file path to the `bun bd test <file-path>` command. DO NOT try to run ALL the tests at once unless you're in a specific subdirectory.
### Run a Node.js test
```sh
bun agent --silent node:test test-fs-link
bun bd --silent node:test test-fs-link
```

2
LATEST
View File

@@ -1 +1 @@
1.2.13
1.2.15

View File

@@ -482,7 +482,7 @@ STATIC_MUSL_FLAG ?=
WRAP_SYMBOLS_ON_LINUX =
ifeq ($(OS_NAME), linux)
WRAP_SYMBOLS_ON_LINUX = -Wl,--wrap=fcntl -Wl,--wrap=fcntl64 -Wl,--wrap=stat64 -Wl,--wrap=pow -Wl,--wrap=exp -Wl,--wrap=log -Wl,--wrap=log2 \
WRAP_SYMBOLS_ON_LINUX = -Wl,--wrap=fcntl -Wl,--wrap=fcntl64 -Wl,--wrap=stat64 -Wl,--wrap=pow -Wl,--wrap=exp -Wl,--wrap=exp2 -Wl,--wrap=log -Wl,--wrap=log2 \
-Wl,--wrap=lstat \
-Wl,--wrap=stat \
-Wl,--wrap=fstat \

View File

@@ -47,6 +47,8 @@ Bun supports Linux (x64 & arm64), macOS (x64 & Apple Silicon) and Windows (x64).
> **Linux users** — Kernel version 5.6 or higher is strongly recommended, but the minimum is 5.1.
> **x64 users** — if you see "illegal instruction" or similar errors, check our [CPU requirements](https://bun.sh/docs/installation#cpu-requirements-and-baseline-builds)
```sh
# with install script (recommended)
curl -fsSL https://bun.sh/install | bash

View File

@@ -50,6 +50,10 @@ bench("murmur64v2 (short)", () => {
Bun.hash.murmur64v2(shortStr);
});
bench("rapidhash (short)", () => {
Bun.hash.rapidhash(shortStr);
});
bench("wyhash (128 KB)", () => {
Bun.hash.wyhash(longStr);
});
@@ -94,4 +98,8 @@ bench("murmur64v2 (128 KB)", () => {
Bun.hash.murmur64v2(longStr);
});
bench("rapidhash (128 KB)", () => {
Bun.hash.rapidhash(longStr);
});
run();

View File

@@ -44,10 +44,12 @@
"src/bun.js/bindings/webcrypto/*/*.cpp",
"src/bun.js/bindings/node/*.cpp",
"src/bun.js/bindings/node/crypto/*.cpp",
"src/bun.js/bindings/node/http/*.cpp",
"src/bun.js/bindings/v8/*.cpp",
"src/bun.js/bindings/v8/shim/*.cpp",
"src/bake/*.cpp",
"src/deps/*.cpp",
"src/vm/*.cpp",
"packages/bun-usockets/src/crypto/*.cpp"
]
},
@@ -59,7 +61,9 @@
"packages/bun-usockets/src/internal/*.c",
"packages/bun-usockets/src/crypto/*.c",
"src/bun.js/bindings/uv-posix-polyfills.c",
"src/bun.js/bindings/uv-posix-stubs.c"
"src/bun.js/bindings/uv-posix-stubs.c",
"src/*.c",
"src/bun.js/bindings/node/http/llhttp/*.c"
]
}
]

View File

@@ -20,4 +20,4 @@ src/bake/hmr-runtime-client.ts
src/bake/hmr-runtime-error.ts
src/bake/hmr-runtime-server.ts
src/bake/server/stack-trace-stub.ts
src/bake/shared.ts
src/bake/shared.ts

View File

@@ -4,4 +4,4 @@ src/bun.js/api/BunObject.bind.ts
src/bun.js/bindgen_test.bind.ts
src/bun.js/bindings/NodeModuleModule.bind.ts
src/bun.js/node/node_os.bind.ts
src/fmt.bind.ts
src/fmt.bind.ts

View File

@@ -9,4 +9,4 @@ packages/bun-error/package.json
packages/bun-error/runtime-error.ts
packages/bun-error/sourcemap.ts
packages/bun-error/stack-trace-parser.ts
packages/bun-error/tsconfig.json
packages/bun-error/tsconfig.json

View File

@@ -7,6 +7,9 @@ packages/bun-usockets/src/loop.c
packages/bun-usockets/src/quic.c
packages/bun-usockets/src/socket.c
packages/bun-usockets/src/udp.c
src/asan-config.c
src/bun.js/bindings/node/http/llhttp/api.c
src/bun.js/bindings/node/http/llhttp/http.c
src/bun.js/bindings/node/http/llhttp/llhttp.c
src/bun.js/bindings/uv-posix-polyfills.c
src/bun.js/bindings/uv-posix-stubs.c
src/asan-config.c

View File

@@ -80,6 +80,9 @@ src/bun.js/bindings/JSEnvironmentVariableMap.cpp
src/bun.js/bindings/JSFFIFunction.cpp
src/bun.js/bindings/JSMockFunction.cpp
src/bun.js/bindings/JSNextTickQueue.cpp
src/bun.js/bindings/JSNodePerformanceHooksHistogram.cpp
src/bun.js/bindings/JSNodePerformanceHooksHistogramConstructor.cpp
src/bun.js/bindings/JSNodePerformanceHooksHistogramPrototype.cpp
src/bun.js/bindings/JSPropertyIterator.cpp
src/bun.js/bindings/JSS3File.cpp
src/bun.js/bindings/JSSocketAddressDTO.cpp
@@ -141,6 +144,13 @@ src/bun.js/bindings/node/crypto/JSSign.cpp
src/bun.js/bindings/node/crypto/JSVerify.cpp
src/bun.js/bindings/node/crypto/KeyObject.cpp
src/bun.js/bindings/node/crypto/node_crypto_binding.cpp
src/bun.js/bindings/node/http/JSConnectionsList.cpp
src/bun.js/bindings/node/http/JSConnectionsListConstructor.cpp
src/bun.js/bindings/node/http/JSConnectionsListPrototype.cpp
src/bun.js/bindings/node/http/JSHTTPParser.cpp
src/bun.js/bindings/node/http/JSHTTPParserConstructor.cpp
src/bun.js/bindings/node/http/JSHTTPParserPrototype.cpp
src/bun.js/bindings/node/http/NodeHTTPParser.cpp
src/bun.js/bindings/node/NodeTimers.cpp
src/bun.js/bindings/NodeAsyncHooks.cpp
src/bun.js/bindings/NodeDirent.cpp
@@ -153,6 +163,10 @@ src/bun.js/bindings/NodeTLS.cpp
src/bun.js/bindings/NodeURL.cpp
src/bun.js/bindings/NodeValidator.cpp
src/bun.js/bindings/NodeVM.cpp
src/bun.js/bindings/NodeVMModule.cpp
src/bun.js/bindings/NodeVMScript.cpp
src/bun.js/bindings/NodeVMSourceTextModule.cpp
src/bun.js/bindings/NodeVMSyntheticModule.cpp
src/bun.js/bindings/NoOpForTesting.cpp
src/bun.js/bindings/ObjectBindings.cpp
src/bun.js/bindings/objects.cpp
@@ -161,6 +175,7 @@ src/bun.js/bindings/Path.cpp
src/bun.js/bindings/ProcessBindingBuffer.cpp
src/bun.js/bindings/ProcessBindingConstants.cpp
src/bun.js/bindings/ProcessBindingFs.cpp
src/bun.js/bindings/ProcessBindingHTTPParser.cpp
src/bun.js/bindings/ProcessBindingNatives.cpp
src/bun.js/bindings/ProcessBindingTTYWrap.cpp
src/bun.js/bindings/ProcessBindingUV.cpp
@@ -409,6 +424,7 @@ src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA224.cpp
src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA256.cpp
src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA384.cpp
src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA512.cpp
src/bun.js/bindings/webcrypto/CryptoAlgorithmX25519.cpp
src/bun.js/bindings/webcrypto/CryptoDigest.cpp
src/bun.js/bindings/webcrypto/CryptoKey.cpp
src/bun.js/bindings/webcrypto/CryptoKeyAES.cpp
@@ -449,6 +465,7 @@ src/bun.js/bindings/webcrypto/JSRsaOaepParams.cpp
src/bun.js/bindings/webcrypto/JSRsaOtherPrimesInfo.cpp
src/bun.js/bindings/webcrypto/JSRsaPssParams.cpp
src/bun.js/bindings/webcrypto/JSSubtleCrypto.cpp
src/bun.js/bindings/webcrypto/JSX25519Params.cpp
src/bun.js/bindings/webcrypto/OpenSSLUtilities.cpp
src/bun.js/bindings/webcrypto/PhonyWorkQueue.cpp
src/bun.js/bindings/webcrypto/SerializedCryptoKeyWrapOpenSSL.cpp
@@ -463,4 +480,6 @@ src/bun.js/modules/NodeTTYModule.cpp
src/bun.js/modules/NodeUtilTypesModule.cpp
src/bun.js/modules/ObjectModule.cpp
src/deps/libuwsockets.cpp
src/io/io_darwin.cpp
src/io/io_darwin.cpp
src/vm/Semaphore.cpp
src/vm/SigintWatcher.cpp

View File

@@ -15,4 +15,4 @@ src/codegen/generate-jssink.ts
src/codegen/generate-node-errors.ts
src/codegen/helpers.ts
src/codegen/internal-module-registry-scanner.ts
src/codegen/replacements.ts
src/codegen/replacements.ts

View File

@@ -52,6 +52,7 @@ src/js/internal/debugger.ts
src/js/internal/errors.ts
src/js/internal/fifo.ts
src/js/internal/fixed_queue.ts
src/js/internal/freelist.ts
src/js/internal/fs/cp-sync.ts
src/js/internal/fs/cp.ts
src/js/internal/fs/glob.ts
@@ -160,4 +161,4 @@ src/js/thirdparty/node-fetch.ts
src/js/thirdparty/undici.js
src/js/thirdparty/vercel_fetch.js
src/js/thirdparty/ws.js
src/js/wasi-runner.js
src/js/wasi-runner.js

View File

@@ -17,7 +17,8 @@ src/node-fallbacks/stream.js
src/node-fallbacks/string_decoder.js
src/node-fallbacks/sys.js
src/node-fallbacks/timers.js
src/node-fallbacks/timers.promises.js
src/node-fallbacks/tty.js
src/node-fallbacks/url.js
src/node-fallbacks/util.js
src/node-fallbacks/zlib.js
src/node-fallbacks/zlib.js

View File

@@ -20,4 +20,4 @@ src/bun.js/node/node.classes.ts
src/bun.js/resolve_message.classes.ts
src/bun.js/test/jest.classes.ts
src/bun.js/webcore/encoding.classes.ts
src/bun.js/webcore/response.classes.ts
src/bun.js/webcore/response.classes.ts

View File

@@ -55,11 +55,17 @@ src/bun.js/api/html_rewriter.zig
src/bun.js/api/JSBundler.zig
src/bun.js/api/JSTranspiler.zig
src/bun.js/api/server.zig
src/bun.js/api/server/AnyRequestContext.zig
src/bun.js/api/server/HTMLBundle.zig
src/bun.js/api/server/HTTPStatusText.zig
src/bun.js/api/server/InspectorBunFrontendDevServerAgent.zig
src/bun.js/api/server/NodeHTTPResponse.zig
src/bun.js/api/server/RequestContext.zig
src/bun.js/api/server/ServerConfig.zig
src/bun.js/api/server/ServerWebSocket.zig
src/bun.js/api/server/SSLConfig.zig
src/bun.js/api/server/StaticRoute.zig
src/bun.js/api/server/WebSocketServerContext.zig
src/bun.js/api/streams.classes.zig
src/bun.js/api/Timer.zig
src/bun.js/api/TOMLObject.zig
@@ -86,8 +92,6 @@ src/bun.js/bindings/Exception.zig
src/bun.js/bindings/FetchHeaders.zig
src/bun.js/bindings/FFI.zig
src/bun.js/bindings/generated_classes_list.zig
src/bun.js/bindings/GeneratedBindings.zig
src/bun.js/bindings/GeneratedJS2Native.zig
src/bun.js/bindings/GetterSetter.zig
src/bun.js/bindings/HTTPServerAgent.zig
src/bun.js/bindings/JSArray.zig
@@ -107,6 +111,7 @@ src/bun.js/bindings/JSPropertyIterator.zig
src/bun.js/bindings/JSRef.zig
src/bun.js/bindings/JSRuntimeType.zig
src/bun.js/bindings/JSString.zig
src/bun.js/bindings/JSType.zig
src/bun.js/bindings/JSUint8Array.zig
src/bun.js/bindings/JSValue.zig
src/bun.js/bindings/NodeModuleModule.zig
@@ -135,6 +140,21 @@ src/bun.js/ConsoleObject.zig
src/bun.js/Counters.zig
src/bun.js/Debugger.zig
src/bun.js/event_loop.zig
src/bun.js/event_loop/AnyEventLoop.zig
src/bun.js/event_loop/AnyTask.zig
src/bun.js/event_loop/AnyTaskWithExtraContext.zig
src/bun.js/event_loop/ConcurrentPromiseTask.zig
src/bun.js/event_loop/ConcurrentTask.zig
src/bun.js/event_loop/CppTask.zig
src/bun.js/event_loop/DeferredTaskQueue.zig
src/bun.js/event_loop/EventLoopHandle.zig
src/bun.js/event_loop/GarbageCollectionController.zig
src/bun.js/event_loop/JSCScheduler.zig
src/bun.js/event_loop/ManagedTask.zig
src/bun.js/event_loop/MiniEventLoop.zig
src/bun.js/event_loop/PosixSignalHandle.zig
src/bun.js/event_loop/Task.zig
src/bun.js/event_loop/WorkTask.zig
src/bun.js/hot_reloader.zig
src/bun.js/ipc.zig
src/bun.js/javascript_core_c_api.zig
@@ -222,6 +242,7 @@ src/bun.js/webcore/Response.zig
src/bun.js/webcore/S3Client.zig
src/bun.js/webcore/S3File.zig
src/bun.js/webcore/S3Stat.zig
src/bun.js/webcore/ScriptExecutionContext.zig
src/bun.js/webcore/Sink.zig
src/bun.js/webcore/streams.zig
src/bun.js/webcore/TextDecoder.zig
@@ -237,6 +258,7 @@ src/ci_info.zig
src/cli.zig
src/cli/add_command.zig
src/cli/add_completions.zig
src/cli/audit_command.zig
src/cli/build_command.zig
src/cli/bunx_command.zig
src/cli/colon_list_type.zig
@@ -256,6 +278,7 @@ src/cli/package_manager_command.zig
src/cli/patch_command.zig
src/cli/patch_commit_command.zig
src/cli/pm_trusted_command.zig
src/cli/pm_view_command.zig
src/cli/publish_command.zig
src/cli/remove_command.zig
src/cli/run_command.zig
@@ -383,7 +406,22 @@ src/deps/picohttp.zig
src/deps/picohttpparser.zig
src/deps/tcc.zig
src/deps/uws.zig
src/deps/uws/App.zig
src/deps/uws/BodyReaderMixin.zig
src/deps/uws/ConnectingSocket.zig
src/deps/uws/InternalLoopData.zig
src/deps/uws/ListenSocket.zig
src/deps/uws/Loop.zig
src/deps/uws/Request.zig
src/deps/uws/Response.zig
src/deps/uws/socket.zig
src/deps/uws/SocketContext.zig
src/deps/uws/Timer.zig
src/deps/uws/udp.zig
src/deps/uws/UpgradedDuplex.zig
src/deps/uws/us_socket_t.zig
src/deps/uws/WebSocket.zig
src/deps/uws/WindowsNamedPipe.zig
src/deps/zig-clap/clap.zig
src/deps/zig-clap/clap/args.zig
src/deps/zig-clap/clap/comptime.zig
@@ -432,15 +470,30 @@ src/identity_context.zig
src/import_record.zig
src/ini.zig
src/install/bin.zig
src/install/bun.lock.zig
src/install/dependency.zig
src/install/extract_tarball.zig
src/install/install.zig
src/install/integrity.zig
src/install/lifecycle_script_runner.zig
src/install/lockfile.zig
src/install/lockfile/Buffers.zig
src/install/lockfile/bun.lock.zig
src/install/lockfile/bun.lockb.zig
src/install/lockfile/CatalogMap.zig
src/install/lockfile/lockfile_json_stringify_for_debugging.zig
src/install/lockfile/OverrideMap.zig
src/install/lockfile/Package.zig
src/install/lockfile/Package/Meta.zig
src/install/lockfile/Package/Scripts.zig
src/install/lockfile/Package/WorkspaceMap.zig
src/install/lockfile/printer/tree_printer.zig
src/install/lockfile/printer/Yarn.zig
src/install/lockfile/Tree.zig
src/install/migration.zig
src/install/npm.zig
src/install/PackageManager/CommandLineArguments.zig
src/install/PackageManager/PackageJSONEditor.zig
src/install/PackageManager/PackageManagerOptions.zig
src/install/padding_checker.zig
src/install/patch_install.zig
src/install/repository.zig
@@ -456,7 +509,6 @@ src/io/PipeReader.zig
src/io/pipes.zig
src/io/PipeWriter.zig
src/io/source.zig
src/io/time.zig
src/js_ast.zig
src/js_lexer_tables.zig
src/js_lexer.zig
@@ -607,4 +659,4 @@ src/windows.zig
src/work_pool.zig
src/workaround_missing_symbols.zig
src/wyhash.zig
src/zlib.zig
src/zlib.zig

View File

@@ -46,7 +46,7 @@ endif()
set(BUN_ERROR_SOURCE ${CWD}/packages/bun-error)
absolute_sources(BUN_ERROR_SOURCES ${CWD}/cmake/BunErrorSources.txt)
absolute_sources(BUN_ERROR_SOURCES ${CWD}/cmake/sources/BunErrorSources.txt)
set(BUN_ERROR_OUTPUT ${CODEGEN_PATH}/bun-error)
set(BUN_ERROR_OUTPUTS
@@ -135,7 +135,7 @@ register_command(
set(BUN_NODE_FALLBACKS_SOURCE ${CWD}/src/node-fallbacks)
absolute_sources(BUN_NODE_FALLBACKS_SOURCES ${CWD}/cmake/NodeFallbacksSources.txt)
absolute_sources(BUN_NODE_FALLBACKS_SOURCES ${CWD}/cmake/sources/NodeFallbacksSources.txt)
set(BUN_NODE_FALLBACKS_OUTPUT ${CODEGEN_PATH}/node-fallbacks)
set(BUN_NODE_FALLBACKS_OUTPUTS)
@@ -161,14 +161,9 @@ register_command(
CWD
${BUN_NODE_FALLBACKS_SOURCE}
COMMAND
${BUN_EXECUTABLE} x
esbuild ${ESBUILD_ARGS}
${BUN_EXECUTABLE} run build-fallbacks
${BUN_NODE_FALLBACKS_OUTPUT}
${BUN_NODE_FALLBACKS_SOURCES}
--outdir=${BUN_NODE_FALLBACKS_OUTPUT}
--format=esm
--minify
--bundle
--platform=browser
SOURCES
${BUN_NODE_FALLBACKS_SOURCES}
${BUN_NODE_FALLBACKS_NODE_MODULES}
@@ -235,7 +230,7 @@ register_command(
set(BUN_ZIG_GENERATED_CLASSES_SCRIPT ${CWD}/src/codegen/generate-classes.ts)
absolute_sources(BUN_ZIG_GENERATED_CLASSES_SOURCES ${CWD}/cmake/ZigGeneratedClassesSources.txt)
absolute_sources(BUN_ZIG_GENERATED_CLASSES_SOURCES ${CWD}/cmake/sources/ZigGeneratedClassesSources.txt)
set(BUN_ZIG_GENERATED_CLASSES_OUTPUTS
${CODEGEN_PATH}/ZigGeneratedClasses.h
@@ -268,8 +263,8 @@ register_command(
set(BUN_JAVASCRIPT_CODEGEN_SCRIPT ${CWD}/src/codegen/bundle-modules.ts)
absolute_sources(BUN_JAVASCRIPT_SOURCES ${CWD}/cmake/JavaScriptSources.txt)
absolute_sources(BUN_JAVASCRIPT_CODEGEN_SOURCES ${CWD}/cmake/JavaScriptCodegenSources.txt)
absolute_sources(BUN_JAVASCRIPT_SOURCES ${CWD}/cmake/sources/JavaScriptSources.txt)
absolute_sources(BUN_JAVASCRIPT_CODEGEN_SOURCES ${CWD}/cmake/sources/JavaScriptCodegenSources.txt)
list(APPEND BUN_JAVASCRIPT_CODEGEN_SOURCES
${CWD}/src/bun.js/bindings/InternalModuleRegistry.cpp
@@ -311,7 +306,7 @@ register_command(
set(BUN_BAKE_RUNTIME_CODEGEN_SCRIPT ${CWD}/src/codegen/bake-codegen.ts)
absolute_sources(BUN_BAKE_RUNTIME_SOURCES ${CWD}/cmake/BakeRuntimeSources.txt)
absolute_sources(BUN_BAKE_RUNTIME_SOURCES ${CWD}/cmake/sources/BakeRuntimeSources.txt)
list(APPEND BUN_BAKE_RUNTIME_CODEGEN_SOURCES
${CWD}/src/bun.js/bindings/InternalModuleRegistry.cpp
@@ -344,7 +339,7 @@ register_command(
set(BUN_BINDGEN_SCRIPT ${CWD}/src/codegen/bindgen.ts)
absolute_sources(BUN_BINDGEN_SOURCES ${CWD}/cmake/BindgenSources.txt)
absolute_sources(BUN_BINDGEN_SOURCES ${CWD}/cmake/sources/BindgenSources.txt)
set(BUN_BINDGEN_CPP_OUTPUTS
${CODEGEN_PATH}/GeneratedBindings.cpp
@@ -413,6 +408,7 @@ set(BUN_OBJECT_LUT_SOURCES
${CWD}/src/bun.js/bindings/ProcessBindingConstants.cpp
${CWD}/src/bun.js/bindings/ProcessBindingFs.cpp
${CWD}/src/bun.js/bindings/ProcessBindingNatives.cpp
${CWD}/src/bun.js/bindings/ProcessBindingHTTPParser.cpp
${CWD}/src/bun.js/modules/NodeModuleModule.cpp
${CODEGEN_PATH}/ZigGeneratedClasses.lut.txt
)
@@ -426,6 +422,7 @@ set(BUN_OBJECT_LUT_OUTPUTS
${CODEGEN_PATH}/ProcessBindingConstants.lut.h
${CODEGEN_PATH}/ProcessBindingFs.lut.h
${CODEGEN_PATH}/ProcessBindingNatives.lut.h
${CODEGEN_PATH}/ProcessBindingHTTPParser.lut.h
${CODEGEN_PATH}/NodeModuleModule.lut.h
${CODEGEN_PATH}/ZigGeneratedClasses.lut.h
)
@@ -501,7 +498,7 @@ WEBKIT_ADD_SOURCE_DEPENDENCIES(
# --- Zig ---
absolute_sources(BUN_ZIG_SOURCES ${CWD}/cmake/ZigSources.txt)
absolute_sources(BUN_ZIG_SOURCES ${CWD}/cmake/sources/ZigSources.txt)
list(APPEND BUN_ZIG_SOURCES
${CWD}/build.zig
@@ -598,8 +595,8 @@ set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS "build.zig")
set(BUN_USOCKETS_SOURCE ${CWD}/packages/bun-usockets)
# hand written cpp source files. Full list of "source" code (including codegen) is in BUN_CPP_SOURCES
absolute_sources(BUN_CXX_SOURCES ${CWD}/cmake/CxxSources.txt)
absolute_sources(BUN_C_SOURCES ${CWD}/cmake/CSources.txt)
absolute_sources(BUN_CXX_SOURCES ${CWD}/cmake/sources/CxxSources.txt)
absolute_sources(BUN_C_SOURCES ${CWD}/cmake/sources/CSources.txt)
if(WIN32)
list(APPEND BUN_CXX_SOURCES ${CWD}/src/bun.js/bindings/windows/rescle.cpp)
@@ -737,6 +734,7 @@ target_include_directories(${bun} PRIVATE
${CWD}/src/bun.js/bindings/webcore
${CWD}/src/bun.js/bindings/webcrypto
${CWD}/src/bun.js/bindings/node/crypto
${CWD}/src/bun.js/bindings/node/http
${CWD}/src/bun.js/bindings/sqlite
${CWD}/src/bun.js/bindings/v8
${CWD}/src/bun.js/modules
@@ -749,7 +747,7 @@ target_include_directories(${bun} PRIVATE
${NODEJS_HEADERS_PATH}/include
)
if(NOT WIN32)
if(NOT WIN32)
target_include_directories(${bun} PRIVATE ${CWD}/src/bun.js/bindings/libuv)
endif()
@@ -882,7 +880,7 @@ if(NOT WIN32)
-Wno-nullability-completeness
-Werror
)
if(ENABLE_ASAN)
target_compile_options(${bun} PUBLIC
-fsanitize=address
@@ -940,6 +938,7 @@ if(LINUX)
if(NOT ABI STREQUAL "musl")
target_link_options(${bun} PUBLIC
-Wl,--wrap=exp
-Wl,--wrap=exp2
-Wl,--wrap=expf
-Wl,--wrap=fcntl64
-Wl,--wrap=log
@@ -1019,6 +1018,7 @@ if(WIN32)
target_link_libraries(${bun} PRIVATE
${WEBKIT_LIB_PATH}/WTF.lib
${WEBKIT_LIB_PATH}/JavaScriptCore.lib
${WEBKIT_LIB_PATH}/bmalloc.lib
${WEBKIT_LIB_PATH}/sicudtd.lib
${WEBKIT_LIB_PATH}/sicuind.lib
${WEBKIT_LIB_PATH}/sicuucd.lib
@@ -1027,6 +1027,7 @@ if(WIN32)
target_link_libraries(${bun} PRIVATE
${WEBKIT_LIB_PATH}/WTF.lib
${WEBKIT_LIB_PATH}/JavaScriptCore.lib
${WEBKIT_LIB_PATH}/bmalloc.lib
${WEBKIT_LIB_PATH}/sicudt.lib
${WEBKIT_LIB_PATH}/sicuin.lib
${WEBKIT_LIB_PATH}/sicuuc.lib
@@ -1062,6 +1063,7 @@ set(BUN_DEPENDENCIES
TinyCC
Zlib
LibArchive # must be loaded after zlib
HdrHistogram # must be loaded after zlib
Zstd
)

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
c-ares/c-ares
COMMIT
4f4912bce7374f787b10576851b687935f018e17
d3a507e920e7af18a5efb7f9f1d8044ed4750013
)
register_cmake_command(

View File

@@ -0,0 +1,24 @@
register_repository(
NAME
hdrhistogram
REPOSITORY
HdrHistogram/HdrHistogram_c
COMMIT
652d51bcc36744fd1a6debfeb1a8a5f58b14022c
)
register_cmake_command(
TARGET
hdrhistogram
LIBRARIES
hdr_histogram_static
INCLUDES
include
LIB_PATH
src
ARGS
-DHDR_HISTOGRAM_BUILD_SHARED=OFF
-DHDR_HISTOGRAM_BUILD_STATIC=ON
-DHDR_LOG_REQUIRED=DISABLED
-DHDR_HISTOGRAM_BUILD_PROGRAMS=OFF
)

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
facebook/zstd
COMMIT
794ea1b0afca0f020f4e57b6732332231fb23c70
f8745da6ff1ad1e7bab384bd1f9d742439278e99
)
register_cmake_command(

View File

@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
if(NOT WEBKIT_VERSION)
set(WEBKIT_VERSION eda8b0fb4fb1aa23db9c2b00933df8b58bcdd289)
set(WEBKIT_VERSION 85b01f72bb53299e75bd0889ee67431a84c7bdb6)
endif()
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)
@@ -41,14 +41,6 @@ if(WEBKIT_LOCAL)
return()
endif()
if(EXISTS ${WEBKIT_PATH}/package.json)
file(READ ${WEBKIT_PATH}/package.json WEBKIT_PACKAGE_JSON)
if(WEBKIT_PACKAGE_JSON MATCHES ${WEBKIT_VERSION})
return()
endif()
endif()
if(WIN32)
set(WEBKIT_OS "windows")
elseif(APPLE)
@@ -86,10 +78,18 @@ if(ENABLE_ASAN)
set(WEBKIT_SUFFIX "${WEBKIT_SUFFIX}-asan")
endif()
set(WEBKIT_NAME bun-webkit-${WEBKIT_OS}-${WEBKIT_ARCH}${WEBKIT_SUFFIX})
setx(WEBKIT_NAME bun-webkit-${WEBKIT_OS}-${WEBKIT_ARCH}${WEBKIT_SUFFIX})
set(WEBKIT_FILENAME ${WEBKIT_NAME}.tar.gz)
setx(WEBKIT_DOWNLOAD_URL https://github.com/oven-sh/WebKit/releases/download/autobuild-${WEBKIT_VERSION}/${WEBKIT_FILENAME})
if(EXISTS ${WEBKIT_PATH}/package.json)
file(READ ${WEBKIT_PATH}/package.json WEBKIT_PACKAGE_JSON)
if(WEBKIT_PACKAGE_JSON MATCHES ${WEBKIT_VERSION})
return()
endif()
endif()
file(DOWNLOAD ${WEBKIT_DOWNLOAD_URL} ${CACHE_PATH}/${WEBKIT_FILENAME} SHOW_PROGRESS)
file(ARCHIVE_EXTRACT INPUT ${CACHE_PATH}/${WEBKIT_FILENAME} DESTINATION ${CACHE_PATH} TOUCH)
file(REMOVE ${CACHE_PATH}/${WEBKIT_FILENAME})

View File

@@ -572,7 +572,7 @@ _bun_outdated_completion() {
'--no-progress[Disable the progress bar]' \
'--help[Print this help menu]' &&
ret=0
case $state in
config)
_bun_list_bunfig_toml

View File

@@ -175,6 +175,7 @@ Bun.hash.xxHash3("data", 1234);
Bun.hash.murmur32v3("data", 1234);
Bun.hash.murmur32v2("data", 1234);
Bun.hash.murmur64v2("data", 1234);
Bun.hash.rapidhash("data", 1234);
```
## `Bun.CryptoHasher`

View File

@@ -1 +0,0 @@
See the [`bun test`](https://bun.sh/docs/cli/test) documentation.

View File

@@ -206,6 +206,38 @@ Each call to `console.log` or `console.error` will be broadcast to the terminal
Internally, this reuses the existing WebSocket connection from hot module reloading to send the logs.
### Edit files in the browser
Bun's frontend dev server has support for [Automatic Workspace Folders](https://chromium.googlesource.com/devtools/devtools-frontend/+/main/docs/ecosystem/automatic_workspace_folders.md) in Chrome DevTools, which lets you save edits to files in the browser.
{% image src="/images/bun-chromedevtools.gif" alt="Bun's frontend dev server has support for Automatic Workspace Folders in Chrome DevTools, which lets you save edits to files in the browser." /%}
{% details summary="How it works" %}
Bun's dev server automatically adds a `/.well-known/appspecific/com.chrome.devtools.json` route to the server.
This route returns a JSON object with the following shape:
```json
{
"workspace": {
"root": "/path/to/your/project",
"uuid": "a-unique-identifier-for-this-workspace"
}
}
```
For security reasons, this is only enabled when:
1. The request is coming from localhost, 127.0.0.1, or ::1.
2. Hot Module Reloading is enabled.
3. The `chromeDevToolsAutomaticWorkspaceFolders` flag is set to `true` or `undefined`.
4. There are no other routes that match the request.
You can disable this by passing `development: { chromeDevToolsAutomaticWorkspaceFolders: false }` in `Bun.serve`'s options.
{% /details %}
## Keyboard Shortcuts
While the server is running:

View File

@@ -1,6 +1,6 @@
Use `bun publish` to publish a package to the npm registry.
`bun publish` will automatically pack your package into a tarball, strip workspace protocols from the `package.json` (resolving versions if necessary), and publish to the registry specified in your configuration files. Both `bunfig.toml` and `.npmrc` files are supported.
`bun publish` will automatically pack your package into a tarball, strip catalog and workspace protocols from the `package.json` (resolving versions if necessary), and publish to the registry specified in your configuration files. Both `bunfig.toml` and `.npmrc` files are supported.
```sh
## Publishing the package from the current working directory

View File

@@ -1,49 +1,50 @@
---
name: Use React and JSX
name: Build a React app with Bun
---
React just works with Bun. Bun supports `.jsx` and `.tsx` files out of the box.
Bun supports `.jsx` and `.tsx` files out of the box. React just works with Bun.
Remember that JSX is just a special syntax for including HTML-like syntax in JavaScript files. React uses JSX syntax, as do alternatives like [Preact](https://preactjs.com/) and [Solid](https://www.solidjs.com/). Bun's internal transpiler converts JSX syntax into vanilla JavaScript before execution.
---
Bun _assumes_ you're using React (unless you [configure it otherwise](https://bun.sh/docs/runtime/bunfig#jsx)) so a line like this:
```
const element = <h1>Hello, world!</h1>;
```
---
is internally converted into something like this:
```ts
// jsxDEV
import { jsx } from "react/jsx-dev-runtime";
const element = jsx("h1", { children: "Hello, world!" });
```
---
This code requires `react` to run, so make sure you've installed React.
Create a new React app with `bun init --react`. This gives you a template with a simple React app and a simple API server together in one full-stack app.
```bash
$ bun install react
# Create a new React app
$ bun init --react
# Run the app in development mode
$ bun dev
# Build as a static site for production
$ bun run build
# Run the server in production
$ bun start
```
---
Bun implements special logging for JSX components to make debugging easier.
### Hot Reloading
Run `bun dev` to start the app in development mode. This will start the API server and the React app with hot reloading.
### Full-Stack App
Run `bun start` to start the API server and frontend together in one process.
### Static Site
Run `bun run build` to build the app as a static site. This will create a `dist` directory with the built app and all the assets.
```bash
$ bun run log-my-component.tsx
<Component message="Hello world!" />
```
---
As far as "official support" for React goes, that's it. React is a library like any other, and Bun can run that library. Bun is not a framework, so you should use a framework like [Vite](https://vitejs.dev/) to build an app with server-side rendering and hot reloading in the browser.
Refer to [Runtime > JSX](https://bun.sh/docs/runtime/jsx) for complete documentation on configuring JSX.
├── src/
│ ├── index.tsx # Server entry point with API routes
│ ├── frontend.tsx # React app entry point with HMR
│ ├── App.tsx # Main React component
│ ├── APITester.tsx # Component for testing API endpoints
│ ├── index.html # HTML template
│ ├── index.css # Styles
│ └── *.svg # Static assets
├── package.json # Dependencies and scripts
├── tsconfig.json # TypeScript configuration
├── bunfig.toml # Bun configuration
└── bun.lock # Lock file
```

37
docs/install/audit.md Normal file
View File

@@ -0,0 +1,37 @@
`bun audit` checks your installed packages for known security vulnerabilities.
Run the command in a project with a `bun.lock` file:
```bash
$ bun audit
```
Bun sends the list of installed packages and versions to NPM, and prints a report of any vulnerabilities that were found. Packages installed from registries other than the default registry are skipped.
If no vulnerabilities are found, the command prints:
```
No vulnerabilities found
```
When vulnerabilities are detected, each affected package is listed along with the severity, a short description and a link to the advisory. At the end of the report Bun prints a summary and hints for updating:
```
3 vulnerabilities (1 high, 2 moderate)
To update all dependencies to the latest compatible versions:
bun update
To update all dependencies to the latest versions (including breaking changes):
bun update --latest
```
### `--json`
Use the `--json` flag to print the raw JSON response from the registry instead of the formatted report:
```bash
$ bun audit --json
```
### Exit code
`bun audit` will exit with code `0` if no vulnerabilities are found and `1` if the report lists any vulnerabilities. This will still happen even if `--json` is passed.

296
docs/install/catalogs.md Normal file
View File

@@ -0,0 +1,296 @@
Catalogs in Bun provide a straightforward way to share common dependency versions across multiple packages in a monorepo. Rather than specifying the same versions repeatedly in each workspace package, you define them once in the root package.json and reference them consistently throughout your project.
## Overview
Unlike traditional dependency management where each workspace package needs to independently specify versions, catalogs let you:
1. Define version catalogs in the root package.json
2. Reference these versions with a simple `catalog:` protocol
3. Update all packages simultaneously by changing the version in just one place
This is especially useful in large monorepos where dozens of packages need to use the same version of key dependencies.
## How to Use Catalogs
### Directory Structure Example
Consider a monorepo with the following structure:
```
my-monorepo/
├── package.json
├── bun.lock
└── packages/
├── app/
│ └── package.json
├── ui/
│ └── package.json
└── utils/
└── package.json
```
### 1. Define Catalogs in Root package.json
In your root-level `package.json`, add a `catalog` or `catalogs` field within the `workspaces` object:
```json
{
"name": "my-monorepo",
"workspaces": {
"packages": ["packages/*"],
"catalog": {
"react": "^19.0.0",
"react-dom": "^19.0.0"
},
"catalogs": {
"testing": {
"jest": "30.0.0",
"testing-library": "14.0.0"
}
}
}
}
```
### 2. Reference Catalog Versions in Workspace Packages
In your workspace packages, use the `catalog:` protocol to reference versions:
**packages/app/package.json**
```json
{
"name": "app",
"dependencies": {
"react": "catalog:",
"react-dom": "catalog:",
"jest": "catalog:testing"
}
}
```
**packages/ui/package.json**
```json
{
"name": "ui",
"dependencies": {
"react": "catalog:",
"react-dom": "catalog:"
},
"devDependencies": {
"jest": "catalog:testing",
"testing-library": "catalog:testing"
}
}
```
### 3. Run Bun Install
Run `bun install` to install all dependencies according to the catalog versions.
## Catalog vs Catalogs
Bun supports two ways to define catalogs:
1. **`catalog`** (singular): A single default catalog for commonly used dependencies
```json
"catalog": {
"react": "^19.0.0",
"react-dom": "^19.0.0"
}
```
Reference with simply `catalog:`:
```json
"dependencies": {
"react": "catalog:"
}
```
2. **`catalogs`** (plural): Multiple named catalogs for grouping dependencies
```json
"catalogs": {
"testing": {
"jest": "30.0.0"
},
"ui": {
"tailwind": "4.0.0"
}
}
```
Reference with `catalog:<name>`:
```json
"dependencies": {
"jest": "catalog:testing",
"tailwind": "catalog:ui"
}
```
## Benefits of Using Catalogs
- **Consistency**: Ensures all packages use the same version of critical dependencies
- **Maintenance**: Update a dependency version in one place instead of across multiple package.json files
- **Clarity**: Makes it obvious which dependencies are standardized across your monorepo
- **Simplicity**: No need for complex version resolution strategies or external tools
## Real-World Example
Here's a more comprehensive example for a React application:
**Root package.json**
```json
{
"name": "react-monorepo",
"workspaces": {
"packages": ["packages/*"],
"catalog": {
"react": "^19.0.0",
"react-dom": "^19.0.0",
"react-router-dom": "^6.15.0"
},
"catalogs": {
"build": {
"webpack": "5.88.2",
"babel": "7.22.10"
},
"testing": {
"jest": "29.6.2",
"react-testing-library": "14.0.0"
}
}
},
"devDependencies": {
"typescript": "5.1.6"
}
}
```
**packages/app/package.json**
```json
{
"name": "app",
"dependencies": {
"react": "catalog:",
"react-dom": "catalog:",
"react-router-dom": "catalog:",
"@monorepo/ui": "workspace:*",
"@monorepo/utils": "workspace:*"
},
"devDependencies": {
"webpack": "catalog:build",
"babel": "catalog:build",
"jest": "catalog:testing",
"react-testing-library": "catalog:testing"
}
}
```
**packages/ui/package.json**
```json
{
"name": "@monorepo/ui",
"dependencies": {
"react": "catalog:",
"react-dom": "catalog:"
},
"devDependencies": {
"jest": "catalog:testing",
"react-testing-library": "catalog:testing"
}
}
```
**packages/utils/package.json**
```json
{
"name": "@monorepo/utils",
"dependencies": {
"react": "catalog:"
},
"devDependencies": {
"jest": "catalog:testing"
}
}
```
## Updating Versions
To update versions across all packages, simply change the version in the root package.json:
```json
"catalog": {
"react": "^19.1.0", // Updated from ^19.0.0
"react-dom": "^19.1.0" // Updated from ^19.0.0
}
```
Then run `bun install` to update all packages.
## Lockfile Integration
Bun's lockfile tracks catalog versions, making it easy to ensure consistent installations across different environments. The lockfile includes:
- The catalog definitions from your package.json
- The resolution of each cataloged dependency
```
// bun.lock (excerpt)
{
"lockfileVersion": 1,
"workspaces": {
"": {
"name": "react-monorepo",
},
"packages/app": {
"name": "app",
"dependencies": {
"react": "catalog:",
"react-dom": "catalog:",
...
},
},
...
},
"catalog": {
"react": "^19.0.0",
"react-dom": "^19.0.0",
...
},
"catalogs": {
"build": {
"webpack": "5.88.2",
...
},
...
},
"packages": {
...
}
}
```
## Limitations and Edge Cases
- Catalog references must match a dependency defined in either `catalog` or one of the named `catalogs`
- Empty strings and whitespace in catalog names are ignored (treated as default catalog)
- Invalid dependency versions in catalogs will fail to resolve during `bun install`
- Catalogs are only available within workspaces; they cannot be used outside the monorepo
Bun's catalog system provides a powerful yet simple way to maintain consistency across your monorepo without introducing additional complexity to your workflow.
## Publishing
When you run `bun publish` or `bun pm pack`, Bun automatically replaces
`catalog:` references in your `package.json` with the resolved version numbers.
The published package includes regular semver strings and no longer depends on
your catalog definitions.

View File

@@ -83,6 +83,14 @@ Workspaces have a couple major benefits.
- **Dependencies can be de-duplicated.** If `a` and `b` share a common dependency, it will be _hoisted_ to the root `node_modules` directory. This reduces redundant disk usage and minimizes "dependency hell" issues associated with having multiple versions of a package installed simultaneously.
- **Run scripts in multiple packages.** You can use the [`--filter` flag](https://bun.sh/docs/cli/filter) to easily run `package.json` scripts in multiple packages in your workspace.
## Share versions with Catalogs
When many packages need the same dependency versions, catalogs let you define
those versions once in the root `package.json` and reference them from your
workspaces using the `catalog:` protocol. Updating the catalog automatically
updates every package that references it. See
[Catalogs](https://bun.sh/docs/install/catalogs) for details.
{% callout %}
⚡️ **Speed** — Installs are fast, even for big monorepos. Bun installs the [Remix](https://github.com/remix-run/remix) monorepo in about `500ms` on Linux.

View File

@@ -206,7 +206,7 @@ $ iex "& {$(irm https://bun.sh/install.ps1)} -Version $BUN_LATEST_VERSION"
## Downloading Bun binaries directly
To download Bun binaries directly, you can visit the [releases page](https://github.com/oven-sh/bun/releases) page on GitHub.
To download Bun binaries directly, you can visit the [releases page](https://github.com/oven-sh/bun/releases) on GitHub.
For convenience, here are download links for the latest version:
@@ -223,7 +223,16 @@ For convenience, here are download links for the latest version:
The `musl` binaries are built for distributions that do not ship with the glibc libraries by default, instead relying on musl. The two most popular distros are Void Linux and Alpine Linux, with the latter is used heavily in Docker containers. If you encounter an error like the following: `bun: /lib/x86_64-linux-gnu/libm.so.6: version GLIBC_2.29' not found (required by bun)`, try using the musl binary. Bun's install script automatically chooses the correct binary for your system.
Bun's `x64` binaries target the Haswell CPU architecture, which means they require AVX and AVX2 instructions. For Linux and Windows, the `x64-baseline` binaries are also available which target the Nehalem architecture. If you run into an "Illegal Instruction" error when running Bun, try using the `baseline` binaries instead. Bun's install scripts automatically chooses the correct binary for your system which helps avoid this issue. Baseline builds are slower than regular builds, so use them only if necessary.
### CPU requirements and `baseline` builds
Bun's `x64` binaries target the Haswell CPU architecture, which means they require AVX and AVX2 instructions. For Linux and Windows, the `x64-baseline` binaries are also available which target the Nehalem architecture. If you run into an "Illegal Instruction" error when running Bun, try using the `baseline` binaries instead. Bun's install script automatically chooses the correct binary for your system which helps avoid this issue. Baseline builds are slower than regular builds, so use them only if necessary.
| Build | Intel requirement | AMD requirement |
| ------------ | ------------------------------------------------------------------ | ------------------ |
| x64 | Haswell (4th generation Core) or newer, except some low-end models | Excavator or newer |
| x64-baseline | Nehalem (1st generation Core) or newer | Bulldozer or newer |
Bun does not currently support any CPUs older than the `baseline` target, which mandates the SSE4.2 extension.
Bun also publishes `darwin-x64-baseline` binaries, but these are just a copy of the `darwin-x64` ones so they still have the same CPU requirement. We only maintain these since some tools expect them to exist. Bun requires macOS 13.0 or later, which does not support any CPUs that don't meet our requirement.

View File

@@ -183,6 +183,9 @@ export default {
page("install/workspaces", "Workspaces", {
description: "Bun's package manager supports workspaces and monorepo development workflows.",
}),
page("install/catalogs", "Catalogs", {
description: "Use catalogs to share dependency versions between packages in a monorepo.",
}),
page("install/lifecycle", "Lifecycle scripts", {
description: "How Bun handles package lifecycle scripts with trustedDependencies",
}),
@@ -204,6 +207,9 @@ export default {
description:
"Patch dependencies in your project to fix bugs or add features without vendoring the entire package.",
}),
page("install/audit", "Audit dependencies", {
description: "Check installed packages for vulnerabilities.",
}),
page("install/npmrc", ".npmrc support", {
description: "Bun supports loading some configuration options from .npmrc",
}),
@@ -389,7 +395,7 @@ export default {
page("api/cc", "C Compiler", {
description: `Build & run native C from JavaScript with Bun's native C compiler API`,
}), // "`bun:ffi`"),
page("api/test", "Testing", {
page("cli/test", "Testing", {
description: `Bun's built-in test runner is fast and uses Jest-compatible syntax.`,
}), // "`bun:test`"),
page("api/utils", "Utils", {

View File

@@ -19,31 +19,43 @@ Click the link in the right column to jump to the associated documentation.
---
- HTTP server
- HTTP Server
- [`Bun.serve`](https://bun.sh/docs/api/http#bun-serve)
---
- Shell
- [`$`](https://bun.sh/docs/runtime/shell)
---
- Bundler
- [`Bun.build`](https://bun.sh/docs/bundler)
---
- File I/O
- [`Bun.file`](https://bun.sh/docs/api/file-io#reading-files-bun-file)
[`Bun.write`](https://bun.sh/docs/api/file-io#writing-files-bun-write)
- [`Bun.file`](https://bun.sh/docs/api/file-io#reading-files-bun-file), [`Bun.write`](https://bun.sh/docs/api/file-io#writing-files-bun-write), `Bun.stdin`, `Bun.stdout`, `Bun.stderr`
---
- Child processes
- [`Bun.spawn`](https://bun.sh/docs/api/spawn#spawn-a-process-bun-spawn)
[`Bun.spawnSync`](https://bun.sh/docs/api/spawn#blocking-api-bun-spawnsync)
- Child Processes
- [`Bun.spawn`](https://bun.sh/docs/api/spawn#spawn-a-process-bun-spawn), [`Bun.spawnSync`](https://bun.sh/docs/api/spawn#blocking-api-bun-spawnsync)
---
- TCP
- [`Bun.listen`](https://bun.sh/docs/api/tcp#start-a-server-bun-listen)
[`Bun.connect`](https://bun.sh/docs/api/tcp#start-a-server-bun-listen)
- TCP Sockets
- [`Bun.listen`](https://bun.sh/docs/api/tcp#start-a-server-bun-listen), [`Bun.connect`](https://bun.sh/docs/api/tcp#start-a-server-bun-listen)
---
- UDP Sockets
- [`Bun.udpSocket`](https://bun.sh/docs/api/udp)
---
- WebSockets
- `new WebSocket()` (client), [`Bun.serve`](https://bun.sh/docs/api/websockets) (server)
---
@@ -57,44 +69,53 @@ Click the link in the right column to jump to the associated documentation.
---
- Streaming HTML Transformations
- Streaming HTML
- [`HTMLRewriter`](https://bun.sh/docs/api/html-rewriter)
---
- Hashing
- [`Bun.hash`](https://bun.sh/docs/api/hashing#bun-hash)
[`Bun.CryptoHasher`](https://bun.sh/docs/api/hashing#bun-cryptohasher)
- [`Bun.password`](https://bun.sh/docs/api/hashing#bun-password), [`Bun.hash`](https://bun.sh/docs/api/hashing#bun-hash), [`Bun.CryptoHasher`](https://bun.sh/docs/api/hashing#bun-cryptohasher), `Bun.sha`
---
- import.meta
- [`import.meta`](https://bun.sh/docs/api/import-meta)
---
<!-- - [DNS](https://bun.sh/docs/api/dns)
- `Bun.dns`
--- -->
- SQLite
- [`bun:sqlite`](https://bun.sh/docs/api/sqlite)
---
- FFI
- PostgreSQL Client
- [`Bun.SQL`](https://bun.sh/docs/api/sql), `Bun.sql`
---
- Redis (Valkey) Client
- [`Bun.RedisClient`](https://bun.sh/docs/api/redis), `Bun.redis`
---
- FFI (Foreign Function Interface)
- [`bun:ffi`](https://bun.sh/docs/api/ffi)
---
- DNS
- [`Bun.dns.lookup`](https://bun.sh/docs/api/dns), `Bun.dns.prefetch`, `Bun.dns.getCacheStats`
---
- Testing
- [`bun:test`](https://bun.sh/docs/cli/test)
---
- Node-API
- [`Node-API`](https://bun.sh/docs/api/node-api)
- Workers
- [`new Worker()`](https://bun.sh/docs/api/workers)
---
- Module Loaders
- [`Bun.plugin`](https://bun.sh/docs/bundler/plugins)
---
@@ -103,27 +124,84 @@ Click the link in the right column to jump to the associated documentation.
---
- Cookies
- [`Bun.Cookie`](https://bun.sh/docs/api/cookie), [`Bun.CookieMap`](https://bun.sh/docs/api/cookie)
---
- Node-API
- [`Node-API`](https://bun.sh/docs/api/node-api)
---
- `import.meta`
- [`import.meta`](https://bun.sh/docs/api/import-meta)
---
- Utilities
- [`Bun.version`](https://bun.sh/docs/api/utils#bun-version)
[`Bun.revision`](https://bun.sh/docs/api/utils#bun-revision)
[`Bun.env`](https://bun.sh/docs/api/utils#bun-env)
[`Bun.main`](https://bun.sh/docs/api/utils#bun-main)
[`Bun.sleep()`](https://bun.sh/docs/api/utils#bun-sleep)
[`Bun.sleepSync()`](https://bun.sh/docs/api/utils#bun-sleepsync)
[`Bun.which()`](https://bun.sh/docs/api/utils#bun-which)
[`Bun.peek()`](https://bun.sh/docs/api/utils#bun-peek)
[`Bun.openInEditor()`](https://bun.sh/docs/api/utils#bun-openineditor)
[`Bun.deepEquals()`](https://bun.sh/docs/api/utils#bun-deepequals)
[`Bun.escapeHTML()`](https://bun.sh/docs/api/utils#bun-escapehtml)
[`Bun.fileURLToPath()`](https://bun.sh/docs/api/utils#bun-fileurltopath)
[`Bun.pathToFileURL()`](https://bun.sh/docs/api/utils#bun-pathtofileurl)
[`Bun.gzipSync()`](https://bun.sh/docs/api/utils#bun-gzipsync)
[`Bun.gunzipSync()`](https://bun.sh/docs/api/utils#bun-gunzipsync)
[`Bun.deflateSync()`](https://bun.sh/docs/api/utils#bun-deflatesync)
[`Bun.inflateSync()`](https://bun.sh/docs/api/utils#bun-inflatesync)
[`Bun.inspect()`](https://bun.sh/docs/api/utils#bun-inspect)
[`Bun.nanoseconds()`](https://bun.sh/docs/api/utils#bun-nanoseconds)
[`Bun.readableStreamTo*()`](https://bun.sh/docs/api/utils#bun-readablestreamto)
[`Bun.resolveSync()`](https://bun.sh/docs/api/utils#bun-resolvesync)
- [`Bun.version`](https://bun.sh/docs/api/utils#bun-version), [`Bun.revision`](https://bun.sh/docs/api/utils#bun-revision), [`Bun.env`](https://bun.sh/docs/api/utils#bun-env), [`Bun.main`](https://bun.sh/docs/api/utils#bun-main)
---
- Sleep & Timing
- [`Bun.sleep()`](https://bun.sh/docs/api/utils#bun-sleep), [`Bun.sleepSync()`](https://bun.sh/docs/api/utils#bun-sleepsync), [`Bun.nanoseconds()`](https://bun.sh/docs/api/utils#bun-nanoseconds)
---
- Random & UUID
- [`Bun.randomUUIDv7()`](https://bun.sh/docs/api/utils#bun-randomuuidv7)
---
- System & Environment
- [`Bun.which()`](https://bun.sh/docs/api/utils#bun-which)
---
- Comparison & Inspection
- [`Bun.peek()`](https://bun.sh/docs/api/utils#bun-peek), [`Bun.deepEquals()`](https://bun.sh/docs/api/utils#bun-deepequals), `Bun.deepMatch`, [`Bun.inspect()`](https://bun.sh/docs/api/utils#bun-inspect)
---
- String & Text Processing
- [`Bun.escapeHTML()`](https://bun.sh/docs/api/utils#bun-escapehtml), [`Bun.stringWidth()`](https://bun.sh/docs/api/utils#bun-stringwidth), `Bun.indexOfLine`
---
- URL & Path Utilities
- [`Bun.fileURLToPath()`](https://bun.sh/docs/api/utils#bun-fileurltopath), [`Bun.pathToFileURL()`](https://bun.sh/docs/api/utils#bun-pathtofileurl)
---
- Compression
- [`Bun.gzipSync()`](https://bun.sh/docs/api/utils#bun-gzipsync), [`Bun.gunzipSync()`](https://bun.sh/docs/api/utils#bun-gunzipsync), [`Bun.deflateSync()`](https://bun.sh/docs/api/utils#bun-deflatesync), [`Bun.inflateSync()`](https://bun.sh/docs/api/utils#bun-inflatesync), `Bun.zstdCompressSync()`, `Bun.zstdDecompressSync()`, `Bun.zstdCompress()`, `Bun.zstdDecompress()`
---
- Stream Processing
- [`Bun.readableStreamTo*()`](https://bun.sh/docs/api/utils#bun-readablestreamto), `Bun.readableStreamToBytes()`, `Bun.readableStreamToBlob()`, `Bun.readableStreamToFormData()`, `Bun.readableStreamToJSON()`, `Bun.readableStreamToArray()`
---
- Memory & Buffer Management
- `Bun.ArrayBufferSink`, `Bun.allocUnsafe`, `Bun.concatArrayBuffers`
---
- Module Resolution
- [`Bun.resolveSync()`](https://bun.sh/docs/api/utils#bun-resolvesync)
---
- Parsing & Formatting
- [`Bun.semver`](https://bun.sh/docs/api/semver), `Bun.TOML.parse`, [`Bun.color`](https://bun.sh/docs/api/color)
---
- Low-level / Internals
- `Bun.mmap`, `Bun.gc`, `Bun.generateHeapSnapshot`, [`bun:jsc`](https://bun.sh/docs/api/bun-jsc)
---
{% /table %}

View File

@@ -76,7 +76,7 @@ The `define` field allows you to replace certain global identifiers with constan
### `loader`
Configure how Bun maps file extensions to loaders. This is useful for loading files that aren't natively supported by Bun. If
Configure how Bun maps file extensions to loaders. This is useful for loading files that aren't natively supported by Bun.
```toml
[loader]

View File

@@ -120,7 +120,7 @@ This page is updated regularly to reflect compatibility status of the latest ver
### [`node:net`](https://nodejs.org/api/net.html)
🟡 `SocketAddress` class not exposed (but implemented). `BlockList` exists but is a no-op.
🟢 Fully implemented.
### [`node:perf_hooks`](https://nodejs.org/api/perf_hooks.html)

View File

@@ -102,7 +102,7 @@ Once the plugin is registered, `.yaml` and `.yml` files can be directly imported
{% codetabs %}
```ts#index.ts
import data from "./data.yml"
import * as data from "./data.yml"
console.log(data);
```

View File

@@ -17,6 +17,7 @@ console.log(Bun.hash.xxHash3(input)); // bigint
console.log(Bun.hash.murmur32v3(input)); // number
console.log(Bun.hash.murmur32v2(input)); // number
console.log(Bun.hash.murmur64v2(input)); // bigint
console.log(Bun.hash.rapidhash(input)); // bigint
// Second argument accepts a seed where relevant
console.log(Bun.hash(input, 12345));

19
misctools/lldb/init.lldb Normal file
View File

@@ -0,0 +1,19 @@
# This file is separate from .lldbinit because it has to be in the same directory as the Python
# modules in order for the "attach" action to work.
# Tell LLDB what to do when the debugged process receives SIGPWR: pass it through to the process
# (-p), but do not stop the process (-s) or notify the user (-n).
#
# JSC's garbage collector sends this signal (as configured by Bun WebKit in
# Thread::initializePlatformThreading() in ThreadingPOSIX.cpp) to the JS thread to suspend or resume
# it. So stopping the process would just create noise when debugging any long-running script.
process handle -p true -s false -n false SIGPWR
command script import -c lldb_pretty_printers.py
type category enable zig.lang
type category enable zig.std
command script import -c lldb_webkit.py
command script delete btjs
command alias btjs p {printf("gathering btjs trace...\n");printf("%s\n", (char*)dumpBtjsTrace())}

View File

@@ -1,7 +1,7 @@
{
"private": true,
"name": "bun",
"version": "1.2.14",
"version": "1.2.16",
"workspaces": [
"./packages/bun-types",
"./packages/@types/bun"
@@ -26,7 +26,8 @@
"build": "bun run build:debug",
"watch": "zig build check --watch -fincremental --prominent-compile-errors --global-cache-dir build/debug/zig-check-cache --zig-lib-dir vendor/zig/lib",
"watch-windows": "zig build check-windows --watch -fincremental --prominent-compile-errors --global-cache-dir build/debug/zig-check-cache --zig-lib-dir vendor/zig/lib",
"agent": "(bun run --silent build:debug &> /tmp/bun.debug.build.log || (cat /tmp/bun.debug.build.log && rm -rf /tmp/bun.debug.build.log && exit 1)) && rm -f /tmp/bun.debug.build.log && ./build/debug/bun-debug",
"bd:v": "(bun run --silent build:debug &> /tmp/bun.debug.build.log || (cat /tmp/bun.debug.build.log && rm -rf /tmp/bun.debug.build.log && exit 1)) && rm -f /tmp/bun.debug.build.log && ./build/debug/bun-debug",
"bd": "BUN_DEBUG_QUIET_LOGS=1 bun bd:v",
"build:debug": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -B build/debug",
"build:debug:asan": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DENABLE_ASAN=ON -B build/debug-asan",
"build:valgrind": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DENABLE_BASELINE=ON -ENABLE_VALGRIND=ON -B build/debug-valgrind",
@@ -75,6 +76,8 @@
"zig-format:check": "bun run analysis:no-llvm --target zig-format-check",
"prettier": "bunx prettier@latest --plugin=prettier-plugin-organize-imports --config .prettierrc --write scripts packages src docs 'test/**/*.{test,spec}.{ts,tsx,js,jsx,mts,mjs,cjs,cts}' '!test/**/*fixture*.*'",
"node:test": "node ./scripts/runner.node.mjs --quiet --exec-path=$npm_execpath --node-tests ",
"clean:zig": "rm -rf build/debug/cache/zig build/debug/CMakeCache.txt 'build/debug/*.o' .zig-cache zig-out || true"
"node:test:cp": "bun ./scripts/fetch-node-test.ts ",
"clean:zig": "rm -rf build/debug/cache/zig build/debug/CMakeCache.txt 'build/debug/*.o' .zig-cache zig-out || true",
"sync-webkit-source": "bun ./scripts/sync-webkit-source.ts"
}
}

View File

@@ -743,7 +743,7 @@ export abstract class BaseDebugAdapter<T extends Inspector = Inspector>
source,
request,
// It is theoretically possible for a breakpoint to resolve to multiple locations.
// In that case, send a seperate `breakpoint` event for each one, excluding the first.
// In that case, send a separate `breakpoint` event for each one, excluding the first.
notify: i > 0,
}),
);

View File

@@ -42,11 +42,11 @@ export default class RuntimeError {
original: Error;
stack: StackFrame[];
static from(error: Error) {
static from(error: Error): RuntimeError {
const runtime = new RuntimeError();
runtime.original = error;
runtime.stack = this.parseStack(error);
return RuntimeError;
return runtime;
}
/**

View File

@@ -1865,6 +1865,7 @@ declare module "bun" {
murmur32v3: (data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer, seed?: number) => number;
murmur32v2: (data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer, seed?: number) => number;
murmur64v2: (data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer, seed?: bigint) => bigint;
rapidhash: (data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer, seed?: bigint) => bigint;
}
type JavaScriptLoader = "jsx" | "js" | "ts" | "tsx";
@@ -3304,6 +3305,8 @@ declare module "bun" {
interface BunRequest<T extends string = string> extends Request {
params: RouterTypes.ExtractRouteParams<T>;
readonly cookies: CookieMap;
clone(): BunRequest<T>;
}
interface GenericServeOptions {
@@ -3355,6 +3358,30 @@ declare module "bun" {
* @default false
*/
console?: boolean;
/**
* Enable automatic workspace folders for Chrome DevTools
*
* This lets you persistently edit files in the browser. It works by adding the following route to the server:
* `/.well-known/appspecific/com.chrome.devtools.json`
*
* The response is a JSON object with the following shape:
* ```json
* {
* "workspace": {
* "root": "<cwd>",
* "uuid": "<uuid>"
* }
* }
* ```
*
* The `root` field is the current working directory of the server.
* The `"uuid"` field is a hash of the file that started the server and a hash of the current working directory.
*
* For security reasons, if the remote socket address is not from localhost, 127.0.0.1, or ::1, the request is ignored.
* @default true
*/
chromeDevToolsAutomaticWorkspaceFolders?: boolean;
};
error?: (this: Server, error: ErrorLike) => Response | Promise<Response> | void | Promise<void>;
@@ -3660,7 +3687,7 @@ declare module "bun" {
* the well-known CAs curated by Mozilla. Mozilla's CAs are completely
* replaced when CAs are explicitly specified using this option.
*/
ca?: string | Buffer | BunFile | Array<string | Buffer | BunFile> | undefined;
ca?: string | BufferSource | BunFile | Array<string | BufferSource | BunFile> | undefined;
/**
* Cert chains in PEM format. One cert chain should be provided per
* private key. Each cert chain should consist of the PEM formatted
@@ -3672,7 +3699,7 @@ declare module "bun" {
* intermediate certificates are not provided, the peer will not be
* able to validate the certificate, and the handshake will fail.
*/
cert?: string | Buffer | BunFile | Array<string | Buffer | BunFile> | undefined;
cert?: string | BufferSource | BunFile | Array<string | BufferSource | BunFile> | undefined;
/**
* Private keys in PEM format. PEM allows the option of private keys
* being encrypted. Encrypted keys will be decrypted with
@@ -3683,13 +3710,25 @@ declare module "bun" {
* object.passphrase is optional. Encrypted keys will be decrypted with
* object.passphrase if provided, or options.passphrase if it is not.
*/
key?: string | Buffer | BunFile | Array<string | Buffer | BunFile> | undefined;
key?: string | BufferSource | BunFile | Array<string | BufferSource | BunFile> | undefined;
/**
* Optionally affect the OpenSSL protocol behavior, which is not
* usually necessary. This should be used carefully if at all! Value is
* a numeric bitmask of the SSL_OP_* options from OpenSSL Options
*/
secureOptions?: number | undefined; // Value is a numeric bitmask of the `SSL_OP_*` options
keyFile?: string;
certFile?: string;
ALPNProtocols?: string | BufferSource;
ciphers?: string;
clientRenegotiationLimit?: number;
clientRenegotiationWindow?: number;
}
// Note for contributors: TLSOptionsAsDeprecated should be considered immutable
@@ -5403,6 +5442,42 @@ declare module "bun" {
options?: ZlibCompressionOptions | LibdeflateCompressionOptions,
): Uint8Array;
/**
* Compresses a chunk of data with the Zstandard (zstd) compression algorithm.
* @param data The buffer of data to compress
* @param options Compression options to use
* @returns The output buffer with the compressed data
*/
function zstdCompressSync(
data: NodeJS.TypedArray | Buffer | string | ArrayBuffer,
options?: { level?: number },
): Buffer;
/**
* Compresses a chunk of data with the Zstandard (zstd) compression algorithm.
* @param data The buffer of data to compress
* @param options Compression options to use
* @returns A promise that resolves to the output buffer with the compressed data
*/
function zstdCompress(
data: NodeJS.TypedArray | Buffer | string | ArrayBuffer,
options?: { level?: number },
): Promise<Buffer>;
/**
* Decompresses a chunk of data with the Zstandard (zstd) decompression algorithm.
* @param data The buffer of data to decompress
* @returns The output buffer with the decompressed data
*/
function zstdDecompressSync(data: NodeJS.TypedArray | Buffer | string | ArrayBuffer): Buffer;
/**
* Decompresses a chunk of data with the Zstandard (zstd) decompression algorithm.
* @param data The buffer of data to decompress
* @returns A promise that resolves to the output buffer with the decompressed data
*/
function zstdDecompress(data: NodeJS.TypedArray | Buffer | string | ArrayBuffer): Promise<Buffer>;
type Target =
/**
* For generating bundles that are intended to be run by the Bun runtime. In many cases,
@@ -5797,31 +5872,76 @@ declare module "bun" {
index: string;
}
/**
* Represents a TCP or TLS socket connection used for network communication.
* This interface provides methods for reading, writing, managing the connection state,
* and handling TLS-specific features if applicable.
*
* Sockets are created using `Bun.connect()` or accepted by a `Bun.listen()` server.
*
* @category HTTP & Networking
*/
interface Socket<Data = undefined> extends Disposable {
/**
* Write `data` to the socket
* Writes `data` to the socket. This method is unbuffered and non-blocking. This uses the `sendto(2)` syscall internally.
*
* @param data The data to write to the socket
* @param byteOffset The offset in the buffer to start writing from (defaults to 0)
* @param byteLength The number of bytes to write (defaults to the length of the buffer)
* For optimal performance with multiple small writes, consider batching multiple
* writes together into a single `socket.write()` call.
*
* When passed a string, `byteOffset` and `byteLength` refer to the UTF-8 offset, not the string character offset.
* @param data The data to write. Can be a string (encoded as UTF-8), `ArrayBuffer`, `TypedArray`, or `DataView`.
* @param byteOffset The offset in bytes within the buffer to start writing from. Defaults to 0. Ignored for strings.
* @param byteLength The number of bytes to write from the buffer. Defaults to the remaining length of the buffer from the offset. Ignored for strings.
* @returns The number of bytes written. Returns `-1` if the socket is closed or shutting down. Can return less than the input size if the socket's buffer is full (backpressure).
* @example
* ```ts
* // Send a string
* const bytesWritten = socket.write("Hello, world!\n");
*
* This is unbuffered as of Bun v0.2.2. That means individual write() calls
* will be slow. In the future, Bun will buffer writes and flush them at the
* end of the tick, when the event loop is idle, or sooner if the buffer is full.
* // Send binary data
* const buffer = new Uint8Array([0x01, 0x02, 0x03]);
* socket.write(buffer);
*
* // Send part of a buffer
* const largeBuffer = new Uint8Array(1024);
* // ... fill largeBuffer ...
* socket.write(largeBuffer, 100, 50); // Write 50 bytes starting from index 100
* ```
*/
write(data: string | BufferSource, byteOffset?: number, byteLength?: number): number;
/**
* The data context for the socket.
* The user-defined data associated with this socket instance.
* This can be set when the socket is created via `Bun.connect({ data: ... })`.
* It can be read or updated at any time.
*
* @example
* ```ts
* // In a socket handler
* function open(socket: Socket<{ userId: string }>) {
* console.log(`Socket opened for user: ${socket.data.userId}`);
* socket.data.lastActivity = Date.now(); // Update data
* }
* ```
*/
data: Data;
/**
* Like {@link Socket.write} except it includes a TCP FIN packet
* Sends the final data chunk and initiates a graceful shutdown of the socket's write side.
* After calling `end()`, no more data can be written using `write()` or `end()`.
* The socket remains readable until the remote end also closes its write side or the connection is terminated.
* This sends a TCP FIN packet after writing the data.
*
* Use it to send your last message and close the connection.
* @param data Optional final data to write before closing. Same types as `write()`.
* @param byteOffset Optional offset for buffer data.
* @param byteLength Optional length for buffer data.
* @returns The number of bytes written for the final chunk. Returns `-1` if the socket was already closed or shutting down.
* @example
* ```ts
* // send some data and close the write side
* socket.end("Goodbye!");
* // or close write side without sending final data
* socket.end();
* ```
*/
end(data?: string | BufferSource, byteOffset?: number, byteLength?: number): number;
@@ -5848,20 +5968,33 @@ declare module "bun" {
timeout(seconds: number): void;
/**
* Forcefully close the socket. The other end may not receive all data, and
* the socket will be closed immediately.
* Forcefully closes the socket connection immediately. This is an abrupt termination, unlike the graceful shutdown initiated by `end()`.
* It uses `SO_LINGER` with `l_onoff=1` and `l_linger=0` before calling `close(2)`.
* Consider using {@link close close()} or {@link end end()} for graceful shutdowns.
*
* This passes `SO_LINGER` with `l_onoff` set to `1` and `l_linger` set to
* `0` and then calls `close(2)`.
* @example
* ```ts
* socket.terminate();
* ```
*/
terminate(): void;
/**
* Shutdown writes to a socket
* Shuts down the write-half or both halves of the connection.
* This allows the socket to enter a half-closed state where it can still receive data
* but can no longer send data (`halfClose = true`), or close both read and write
* (`halfClose = false`, similar to `end()` but potentially more immediate depending on OS).
* Calls `shutdown(2)` syscall internally.
*
* This makes the socket a half-closed socket. It can still receive data.
* @param halfClose If `true`, only shuts down the write side (allows receiving). If `false` or omitted, shuts down both read and write. Defaults to `false`.
* @example
* ```ts
* // Stop sending data, but allow receiving
* socket.shutdown(true);
*
* This calls [shutdown(2)](https://man7.org/linux/man-pages/man2/shutdown.2.html) internally
* // Shutdown both reading and writing
* socket.shutdown();
* ```
*/
shutdown(halfClose?: boolean): void;
@@ -5887,6 +6020,11 @@ declare module "bun" {
/**
* Flush any buffered data to the socket
* This attempts to send the data immediately, but success depends on the network conditions
* and the receiving end.
* It might be necessary after several `write` calls if immediate sending is critical,
* though often the OS handles flushing efficiently. Note that `write` calls outside
* `open`/`data`/`drain` might benefit from manual `cork`/`flush`.
*/
flush(): void;
@@ -5908,17 +6046,31 @@ declare module "bun" {
/**
* Remote IP address connected to the socket
* @example "192.168.1.100" | "2001:db8::1"
*/
readonly remoteAddress: string;
/**
* Remote port connected to the socket
* @example 8080
*/
readonly remotePort: number;
/**
* IP protocol family used for the local endpoint of the socket
* @example "IPv4" | "IPv6"
*/
readonly localFamily: "IPv4" | "IPv6";
/**
* Local IP address connected to the socket
* @example "192.168.1.100" | "2001:db8::1"
*/
readonly localAddress: string;
/**
* local port connected to the socket
* @example 8080
*/
readonly localPort: number;
@@ -6022,7 +6174,7 @@ declare module "bun" {
* certificate.
* @return A certificate object.
*/
getPeerCertificate(): import("tls").PeerCertificate;
getPeerCertificate(): import("node:tls").PeerCertificate;
getPeerX509Certificate(): import("node:crypto").X509Certificate;
/**
@@ -6082,6 +6234,8 @@ declare module "bun" {
/**
* See `Session Resumption` for more information.
* @return `true` if the session was reused, `false` otherwise.
* **TLS Only:** Checks if the current TLS session was resumed from a previous session.
* Returns `true` if the session was resumed, `false` otherwise.
*/
isSessionReused(): boolean;
@@ -6124,9 +6278,98 @@ declare module "bun" {
setKeepAlive(enable?: boolean, initialDelay?: number): boolean;
/**
* The number of bytes written to the socket.
* The total number of bytes successfully written to the socket since it was established.
* This includes data currently buffered by the OS but not yet acknowledged by the remote peer.
*/
readonly bytesWritten: number;
/**
* Alias for `socket.end()`. Allows the socket to be used with `using` declarations
* for automatic resource management.
* @example
* ```ts
* async function processSocket() {
* using socket = await Bun.connect({ ... });
* socket.write("Data");
* // socket.end() is called automatically when exiting the scope
* }
* ```
*/
[Symbol.dispose](): void;
resume(): void;
pause(): void;
/**
* If this is a TLS Socket
*/
renegotiate(): void;
/**
* Sets the verify mode of the socket.
*
* @param requestCert Whether to request a certificate.
* @param rejectUnauthorized Whether to reject unauthorized certificates.
*/
setVerifyMode(requestCert: boolean, rejectUnauthorized: boolean): void;
getSession(): void;
/**
* Sets the session of the socket.
*
* @param session The session to set.
*/
setSession(session: string | Buffer | BufferSource): void;
/**
* Exports the keying material of the socket.
*
* @param length The length of the keying material to export.
* @param label The label of the keying material to export.
* @param context The context of the keying material to export.
*/
exportKeyingMaterial(length: number, label: string, context?: string | BufferSource): void;
/**
* Upgrades the socket to a TLS socket.
*
* @param options The options for the upgrade.
* @returns A tuple containing the raw socket and the TLS socket.
* @see {@link TLSUpgradeOptions}
*/
upgradeTLS<Data>(options: TLSUpgradeOptions<Data>): [raw: Socket<Data>, tls: Socket<Data>];
/**
* Closes the socket.
*
* This is a wrapper around `end()` and `shutdown()`.
*
* @see {@link end}
* @see {@link shutdown}
*/
close(): void;
/**
* Returns the servername of the socket.
*
* @see {@link setServername}
*/
getServername(): string;
/**
* Sets the servername of the socket.
*
* @see {@link getServername}
*/
setServername(name: string): void;
}
interface TLSUpgradeOptions<Data> {
data?: Data;
tls: TLSOptions | boolean;
socket: SocketHandler<Data>;
}
interface SocketListener<Data = undefined> extends Disposable {
@@ -6227,6 +6470,22 @@ declare module "bun" {
* The per-instance data context
*/
data?: Data;
/**
* Whether to allow half-open connections.
*
* A half-open connection occurs when one end of the connection has called `close()`
* or sent a FIN packet, while the other end remains open. When set to `true`:
*
* - The socket won't automatically send FIN when the remote side closes its end
* - The local side can continue sending data even after the remote side has closed
* - The application must explicitly call `end()` to fully close the connection
*
* When `false`, the socket automatically closes both ends of the connection when
* either side closes.
*
* @default false
*/
allowHalfOpen?: boolean;
}
interface TCPSocketListenOptions<Data = undefined> extends SocketOptions<Data> {
@@ -6241,7 +6500,7 @@ declare module "bun" {
/**
* The TLS configuration object with which to create the server
*/
tls?: TLSOptions;
tls?: TLSOptions | boolean;
/**
* Whether to use exclusive mode.
*
@@ -6287,7 +6546,7 @@ declare module "bun" {
/**
* TLS Configuration with which to create the socket
*/
tls?: boolean;
tls?: TLSOptions | boolean;
/**
* Whether to use exclusive mode.
*
@@ -6303,22 +6562,8 @@ declare module "bun" {
* @default false
*/
exclusive?: boolean;
/**
* Whether to allow half-open connections.
*
* A half-open connection occurs when one end of the connection has called `close()`
* or sent a FIN packet, while the other end remains open. When set to `true`:
*
* - The socket won't automatically send FIN when the remote side closes its end
* - The local side can continue sending data even after the remote side has closed
* - The application must explicitly call `end()` to fully close the connection
*
* When `false` (default), the socket automatically closes both ends of the connection
* when either side closes.
*
* @default false
*/
allowHalfOpen?: boolean;
reusePort?: boolean;
ipv6Only?: boolean;
}
interface UnixSocketOptions<Data = undefined> extends SocketOptions<Data> {
@@ -6329,14 +6574,14 @@ declare module "bun" {
/**
* TLS Configuration with which to create the socket
*/
tls?: TLSOptions;
tls?: TLSOptions | boolean;
}
interface FdSocketOptions<Data = undefined> extends SocketOptions<Data> {
/**
* TLS Configuration with which to create the socket
*/
tls?: TLSOptions;
tls?: TLSOptions | boolean;
/**
* The file descriptor to connect to
*/
@@ -6605,7 +6850,7 @@ declare module "bun" {
* incoming messages, and `subprocess.send` can send messages to the subprocess. Messages are serialized
* using the JSC serialize API, which allows for the same types that `postMessage`/`structuredClone` supports.
*
* The subprocess can send and recieve messages by using `process.send` and `process.on("message")`,
* The subprocess can send and receive messages by using `process.send` and `process.on("message")`,
* respectively. This is the same API as what Node.js exposes when `child_process.fork()` is used.
*
* Currently, this is only compatible with processes that are other `bun` instances.
@@ -7443,9 +7688,16 @@ declare module "bun" {
workspaces: {
[workspace: string]: BunLockFileWorkspacePackage;
};
/** @see https://bun.sh/docs/install/overrides */
overrides?: Record<string, string>;
/** @see https://bun.sh/docs/install/patch */
patchedDependencies?: Record<string, string>;
/** @see https://bun.sh/docs/install/lifecycle#trusteddependencies */
trustedDependencies?: string[];
/** @see https://bun.sh/docs/install/catalogs */
catalog?: Record<string, string>;
/** @see https://bun.sh/docs/install/catalogs */
catalogs?: Record<string, Record<string, string>>;
/**
* ```

View File

@@ -10,6 +10,7 @@ declare module "bun" {
type NodeCryptoWebcryptoSubtleCrypto = import("crypto").webcrypto.SubtleCrypto;
type NodeCryptoWebcryptoCryptoKey = import("crypto").webcrypto.CryptoKey;
type NodeCryptoWebcryptoCryptoKeyPair = import("crypto").webcrypto.CryptoKeyPair;
type LibEmptyOrBunWebSocket = LibDomIsLoaded extends true ? {} : Bun.WebSocket;
@@ -884,6 +885,8 @@ declare var CryptoKey: {
new (): CryptoKey;
};
interface CryptoKeyPair extends Bun.__internal.NodeCryptoWebcryptoCryptoKeyPair {}
interface Position {
lineText: string;
file: string;

View File

@@ -596,6 +596,19 @@ declare module "bun" {
* @returns Promise that resolves with the value of the key, or null if the key doesn't exist
*/
getex(key: string | ArrayBufferView | Blob): Promise<string | null>;
/**
* Ping the server
* @returns Promise that resolves with "PONG" if the server is reachable, or throws an error if the server is not reachable
*/
ping(): Promise<"PONG">;
/**
* Ping the server with a message
* @param message The message to send to the server
* @returns Promise that resolves with the message if the server is reachable, or throws an error if the server is not reachable
*/
ping(message: string | ArrayBufferView | Blob): Promise<string>;
}
/**

View File

@@ -88,15 +88,19 @@ declare module "bun:test" {
*/
export function setSystemTime(now?: Date | number): ThisType<void>;
interface Jest {
restoreAllMocks(): void;
clearAllMocks(): void;
fn<T extends (...args: any[]) => any>(func?: T): Mock<T>;
setSystemTime(now?: number | Date): void;
setTimeout(milliseconds: number): void;
}
export const jest: Jest;
export namespace jest {
function restoreAllMocks(): void;
function clearAllMocks(): void;
function fn<T extends (...args: any[]) => any>(func?: T): Mock<T>;
function setSystemTime(now?: number | Date): void;
function setTimeout(milliseconds: number): void;
function useFakeTimers(): void;
function useRealTimers(): void;
function spyOn<T extends object, K extends keyof T>(
obj: T,
methodOrPropertyValue: K,
): Mock<Extract<T[K], (...args: any[]) => any>>;
/**
* Constructs the type of a mock function, e.g. the return type of `jest.fn()`.
*/
@@ -146,7 +150,7 @@ declare module "bun:test" {
export function spyOn<T extends object, K extends keyof T>(
obj: T,
methodOrPropertyValue: K,
): Mock<T[K] extends (...args: any[]) => any ? T[K] : never>;
): Mock<Extract<T[K], (...args: any[]) => any>>;
interface FunctionLike {
readonly name: string;

View File

@@ -440,7 +440,7 @@ struct us_socket_t* us_socket_context_connect_resolved_dns(struct us_socket_cont
socket->flags.is_paused = 0;
socket->flags.is_ipc = 0;
socket->connect_state = NULL;
socket->connect_next = NULL;
us_internal_socket_context_link_socket(context, socket);
@@ -459,7 +459,7 @@ static void init_addr_with_port(struct addrinfo* info, int port, struct sockaddr
}
}
static int try_parse_ip(const char *ip_str, int port, struct sockaddr_storage *storage) {
static bool try_parse_ip(const char *ip_str, int port, struct sockaddr_storage *storage) {
memset(storage, 0, sizeof(struct sockaddr_storage));
// Try to parse as IPv4
struct sockaddr_in *addr4 = (struct sockaddr_in *)storage;
@@ -469,7 +469,7 @@ static int try_parse_ip(const char *ip_str, int port, struct sockaddr_storage *s
#ifdef __APPLE__
addr4->sin_len = sizeof(struct sockaddr_in);
#endif
return 0;
return 1;
}
// Try to parse as IPv6
@@ -480,17 +480,17 @@ static int try_parse_ip(const char *ip_str, int port, struct sockaddr_storage *s
#ifdef __APPLE__
addr6->sin6_len = sizeof(struct sockaddr_in6);
#endif
return 0;
return 1;
}
// If we reach here, the input is neither IPv4 nor IPv6
return 1;
return 0;
}
void *us_socket_context_connect(int ssl, struct us_socket_context_t *context, const char *host, int port, int options, int socket_ext_size, int* is_connecting) {
void *us_socket_context_connect(int ssl, struct us_socket_context_t *context, const char *host, int port, int options, int socket_ext_size, int* has_dns_resolved) {
#ifndef LIBUS_NO_SSL
if (ssl == 1) {
return us_internal_ssl_socket_context_connect((struct us_internal_ssl_socket_context_t *) context, host, port, options, socket_ext_size, is_connecting);
return us_internal_ssl_socket_context_connect((struct us_internal_ssl_socket_context_t *) context, host, port, options, socket_ext_size, has_dns_resolved);
}
#endif
@@ -498,8 +498,8 @@ void *us_socket_context_connect(int ssl, struct us_socket_context_t *context, co
// fast path for IP addresses in text form
struct sockaddr_storage addr;
if (try_parse_ip(host, port, &addr) == 0) {
*is_connecting = 1;
if (try_parse_ip(host, port, &addr)) {
*has_dns_resolved = 1;
return us_socket_context_connect_resolved_dns(context, &addr, options, socket_ext_size);
}
@@ -518,7 +518,7 @@ void *us_socket_context_connect(int ssl, struct us_socket_context_t *context, co
if (result->entries && result->entries->info.ai_next == NULL) {
struct sockaddr_storage addr;
init_addr_with_port(&result->entries->info, port, &addr);
*is_connecting = 1;
*has_dns_resolved = 1;
struct us_socket_t *s = us_socket_context_connect_resolved_dns(context, &addr, options, socket_ext_size);
Bun__addrinfo_freeRequest(ai_req, s == NULL);
return s;

View File

@@ -213,7 +213,7 @@ struct us_internal_ssl_socket_t *ssl_on_open(struct us_internal_ssl_socket_t *s,
s->ssl_read_wants_write = 0;
s->fatal_error = 0;
s->handshake_state = HANDSHAKE_PENDING;
SSL_set_bio(s->ssl, loop_ssl_data->shared_rbio, loop_ssl_data->shared_wbio);
// if we allow renegotiation, we need to set the mode here
@@ -255,7 +255,7 @@ struct us_internal_ssl_socket_t *ssl_on_open(struct us_internal_ssl_socket_t *s,
}
/// @brief Complete the shutdown or do a fast shutdown when needed, this should only be called before closing the socket
/// @param s
/// @param s
int us_internal_handle_shutdown(struct us_internal_ssl_socket_t *s, int force_fast_shutdown) {
// if we are already shutdown or in the middle of a handshake we dont need to do anything
// Scenarios:
@@ -265,7 +265,7 @@ int us_internal_handle_shutdown(struct us_internal_ssl_socket_t *s, int force_fa
// 4 - we are in the middle of a handshake
// 5 - we received a fatal error
if(us_internal_ssl_socket_is_shut_down(s) || s->fatal_error || !SSL_is_init_finished(s->ssl)) return 1;
// we are closing the socket but did not sent a shutdown yet
int state = SSL_get_shutdown(s->ssl);
int sent_shutdown = state & SSL_SENT_SHUTDOWN;
@@ -277,7 +277,7 @@ int us_internal_handle_shutdown(struct us_internal_ssl_socket_t *s, int force_fa
// Zero means that we should wait for the peer to close the connection
// but we are already closing the connection so we do a fast shutdown here
int ret = SSL_shutdown(s->ssl);
if(ret == 0 && force_fast_shutdown) {
if(ret == 0 && force_fast_shutdown) {
// do a fast shutdown (dont wait for peer)
ret = SSL_shutdown(s->ssl);
}
@@ -397,7 +397,7 @@ void us_internal_update_handshake(struct us_internal_ssl_socket_t *s) {
// nothing todo here, renegotiation must be handled in SSL_read
if (s->handshake_state != HANDSHAKE_PENDING)
return;
if (us_internal_ssl_socket_is_closed(s) || us_internal_ssl_socket_is_shut_down(s) ||
(s->ssl && SSL_get_shutdown(s->ssl) & SSL_RECEIVED_SHUTDOWN)) {
@@ -422,7 +422,7 @@ void us_internal_update_handshake(struct us_internal_ssl_socket_t *s) {
s->fatal_error = 1;
}
us_internal_trigger_handshake_callback(s, 0);
return;
}
s->handshake_state = HANDSHAKE_PENDING;
@@ -504,7 +504,7 @@ restart:
loop_ssl_data->ssl_read_output +
LIBUS_RECV_BUFFER_PADDING + read,
LIBUS_RECV_BUFFER_LENGTH - read);
if (just_read <= 0) {
int err = SSL_get_error(s->ssl, just_read);
// as far as I know these are the only errors we want to handle
@@ -603,7 +603,7 @@ restart:
goto restart;
}
}
// Trigger writable if we failed last SSL_write with SSL_ERROR_WANT_READ
// Trigger writable if we failed last SSL_write with SSL_ERROR_WANT_READ
// If we failed SSL_read because we need to write more data (SSL_ERROR_WANT_WRITE) we are not going to trigger on_writable, we will wait until the next on_data or on_writable event
// SSL_read will try to flush the write buffer and if fails with SSL_ERROR_WANT_WRITE means the socket is not in a writable state anymore and only makes sense to trigger on_writable if we can write more data
// Otherwise we possible would trigger on_writable -> on_data event in a recursive loop
@@ -1133,7 +1133,7 @@ int us_verify_callback(int preverify_ok, X509_STORE_CTX *ctx) {
}
SSL_CTX *create_ssl_context_from_bun_options(
struct us_bun_socket_context_options_t options,
struct us_bun_socket_context_options_t options,
enum create_bun_socket_error_t *err) {
ERR_clear_error();
@@ -1250,8 +1250,8 @@ SSL_CTX *create_ssl_context_from_bun_options(
return NULL;
}
// It may return spurious errors here.
ERR_clear_error();
// It may return spurious errors here.
ERR_clear_error();
if (options.reject_unauthorized) {
SSL_CTX_set_verify(ssl_context,
@@ -1755,7 +1755,7 @@ int us_internal_ssl_socket_raw_write(struct us_internal_ssl_socket_t *s,
int us_internal_ssl_socket_write(struct us_internal_ssl_socket_t *s,
const char *data, int length, int msg_more) {
if (us_socket_is_closed(0, &s->s) || us_internal_ssl_socket_is_shut_down(s) || length == 0) {
return 0;
}
@@ -1989,7 +1989,7 @@ ssl_wrapped_context_on_end(struct us_internal_ssl_socket_t *s) {
if (wrapped_context->events.on_end) {
wrapped_context->events.on_end((struct us_socket_t *)s);
}
return s;
}
@@ -2082,7 +2082,7 @@ struct us_internal_ssl_socket_t *us_internal_ssl_socket_wrap_with_tls(
struct us_socket_context_t *context = us_create_bun_ssl_socket_context(
old_context->loop, sizeof(struct us_wrapped_socket_context_t),
options, &err);
// Handle SSL context creation failure
if (UNLIKELY(!context)) {
return NULL;
@@ -2186,4 +2186,4 @@ us_socket_context_on_socket_connect_error(
return socket;
}
#endif
#endif

View File

@@ -785,6 +785,23 @@
"items": {
"type": "string"
}
},
"catalog": {
"type": "object",
"description": "A single default catalog for commonly used dependencies. Referenced with 'catalog:' in workspace package dependencies.",
"additionalProperties": {
"type": "string"
}
},
"catalogs": {
"type": "object",
"description": "Multiple named catalogs for grouping dependencies. Referenced with 'catalog:catalogName' in workspace package dependencies.",
"additionalProperties": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
}
}
}

View File

@@ -1,4 +1,4 @@
# Version: 7
# Version: 8
# A script that installs the dependencies needed to build and test Bun.
# This should work on Windows 10 or newer with PowerShell.

View File

@@ -1,5 +1,5 @@
#!/bin/sh
# Version: 10
# Version: 11
# A script that installs the dependencies needed to build and test Bun.
# This should work on macOS and Linux with a POSIX shell.

112
scripts/fetch-node-test.ts Normal file
View File

@@ -0,0 +1,112 @@
import { mkdirSync, writeFileSync } from "fs";
import path, { dirname, join } from "path";
const options: RequestInit = {};
if (process.env.GITHUB_TOKEN) {
options.headers = {
Authorization: `Bearer ${process.env.GITHUB_TOKEN}`,
};
}
async function fetchNodeTest(testName: string) {
const nodeRepoUrl = "https://raw.githubusercontent.com/nodejs/node/main";
const extensions = ["js", "mjs", "ts"];
const testDirs = ["test/parallel", "test/sequential"];
// Try different combinations of test name patterns
const testNameVariations = [
testName,
testName.startsWith("test-") ? testName : `test-${testName}`,
testName.replace(/^test-/, ""),
];
for (const testDir of testDirs) {
for (const nameVariation of testNameVariations) {
// Try with extensions
for (const ext of extensions) {
const testPath = `${testDir}/${nameVariation}.${ext}`;
const url = `${nodeRepoUrl}/${testPath}`;
try {
console.log(`Trying: ${url}`);
const response = await fetch(url, options);
if (response.ok) {
const content = await response.text();
const localPath = join("test/js/node", testPath);
// Create directory if it doesn't exist
mkdirSync(dirname(localPath), { recursive: true });
// Write the file
writeFileSync(localPath, content);
console.log(
`✅ Successfully fetched and saved: ${localPath} (${new Intl.NumberFormat("en-US", {
notation: "compact",
unit: "kilobyte",
}).format(Buffer.byteLength(content, "utf-8"))})`,
);
return localPath;
}
} catch (error) {
// Continue to next variation
}
}
// Try without extension
const testPath = `${testDir}/${nameVariation}`;
const url = `${nodeRepoUrl}/${testPath}`;
try {
console.log(`Trying: ${url}`);
const response = await fetch(url, options);
if (response.ok) {
const content = await response.text();
const localPath = join("test/js/node", testPath);
// Create directory if it doesn't exist
mkdirSync(dirname(localPath), { recursive: true });
// Write the file
writeFileSync(localPath, content);
console.log(
`✅ Successfully fetched and saved: ${localPath} (${new Intl.NumberFormat("en-US", {
notation: "compact",
unit: "kilobyte",
}).format(Buffer.byteLength(content, "utf-8"))})`,
);
return localPath;
}
} catch (error) {
// Continue to next variation
}
}
}
throw new Error(`❌ Could not find test: ${testName}`);
}
// Get test name from command line arguments
let testName = process.argv[2];
if (testName.startsWith(path.join(import.meta.dirname, ".."))) {
testName = testName.slice(path.join(import.meta.dirname, "..").length);
}
if (testName.startsWith("test/parallel/")) {
testName = testName.replace("test/parallel/", "");
} else if (testName.startsWith("test/sequential/")) {
testName = testName.replace("test/sequential/", "");
}
if (!testName) {
console.error("Usage: bun scripts/fetch-node-test.ts <test-name>");
process.exit(1);
}
try {
await fetchNodeTest(testName);
} catch (error) {
console.error(error.message);
process.exit(1);
}

63
scripts/gamble.ts Executable file
View File

@@ -0,0 +1,63 @@
#!/usr/bin/env bun
// usage: bun scripts/gamble.ts <number of attempts> <timeout in seconds> <command>
import assert from "node:assert";
const attempts = parseInt(process.argv[2]);
const timeout = parseFloat(process.argv[3]);
const argv = process.argv.slice(4);
let numTimedOut = 0;
const signals = new Map<string, number>();
const codes = new Map<number, number>();
let numOk = 0;
for (let i = 0; i < attempts; i++) {
const proc = Bun.spawn({
cmd: argv,
timeout: 1000 * timeout,
stdin: null,
stdout: "ignore",
stderr: "pipe",
});
await proc.exited;
const errors = await new Response(proc.stderr).text();
const { signalCode: signal, exitCode } = proc;
if (signal === "SIGTERM") {
// sent for timeouts
numTimedOut += 1;
} else if (signal) {
const newCount = 1 + (signals.get(signal) ?? 0);
signals.set(signal, newCount);
} else if (exitCode !== 0) {
// if null there should have been a signal
assert(exitCode !== null);
const newCount = 1 + (codes.get(exitCode) ?? 0);
codes.set(exitCode, newCount);
} else {
numOk += 1;
}
if (exitCode !== 0) console.log(errors);
process.stdout.write(exitCode === 0 ? "." : "!");
}
process.stdout.write("\n");
const width = attempts.toString().length;
const pad = (num: number): string => num.toString().padStart(width, " ");
const green = (text: string) => console.log(`\x1b[32m${text}\x1b[0m`);
const red = (text: string) => console.log(`\x1b[31m${text}\x1b[0m`);
green(`${pad(numOk)}/${attempts} OK`);
if (numTimedOut > 0) {
red(`${pad(numTimedOut)}/${attempts} timeout`);
}
for (const [signal, count] of signals.entries()) {
red(`${pad(count)}/${attempts} ${signal}`);
}
for (const [code, count] of codes.entries()) {
red(`${pad(count)}/${attempts} code ${code}`);
}
process.exit(numOk === attempts ? 0 : 1);

View File

@@ -18,12 +18,14 @@ async function globSources(output, patterns, excludes = []) {
}
total += paths.length;
const sources = paths
.map(path => normalize(relative(root, path)))
.sort((a, b) => a.localeCompare(b))
.join("\n");
const sources =
paths
.map(path => normalize(relative(root, path)))
.sort((a, b) => a.localeCompare(b))
.join("\n")
.trim() + "\n";
await write(join(root, "cmake", output), sources);
await write(join(root, "cmake", "sources", output), sources);
}
const input = await file(join(root, "cmake", "Sources.json")).json();

View File

@@ -858,7 +858,8 @@ function getSshKeys() {
const sshFiles = readdirSync(sshPath, { withFileTypes: true, encoding: "utf-8" });
const publicPaths = sshFiles
.filter(entry => entry.isFile() && entry.name.endsWith(".pub"))
.map(({ name }) => join(sshPath, name));
.map(({ name }) => join(sshPath, name))
.filter(path => !readFile(path, { cache: true }).startsWith("ssh-ed25519"));
sshKeys.push(
...publicPaths.map(publicPath => ({

View File

@@ -0,0 +1,26 @@
import { existsSync } from "node:fs";
import { dirname, join } from "node:path";
const bunRepo = dirname(import.meta.dir);
const webkitRepo = join(bunRepo, "vendor/WebKit");
if (!existsSync(webkitRepo)) {
console.log("could not find WebKit clone");
console.log("clone https://github.com/oven-sh/WebKit.git to vendor/WebKit");
console.log("or create a symlink/worktree to an existing clone");
process.exit(1);
}
process.chdir(webkitRepo);
const checkedOutCommit = (await Bun.$`git rev-parse HEAD`.text()).trim();
const cmakeContents = await Bun.file(join(bunRepo, "cmake/tools/SetupWebKit.cmake")).text();
const expectedCommit = cmakeContents.match(/set\(WEBKIT_VERSION ([0-9a-f]{40})\)/)![1];
if (checkedOutCommit == expectedCommit) {
console.log(`already at commit ${expectedCommit}`);
} else {
console.log(`changing from ${checkedOutCommit} to ${expectedCommit}`);
await Bun.$`git checkout main`;
await Bun.$`git pull`;
// it is OK that this leaves you with a detached HEAD
await Bun.$`git checkout ${expectedCommit}`;
}

View File

@@ -290,7 +290,7 @@ export async function spawn(command, options = {}) {
if (exitCode !== 0 && isWindows) {
const exitReason = getWindowsExitReason(exitCode);
if (exitReason) {
exitCode = exitReason;
signalCode = exitReason;
}
}
@@ -386,7 +386,7 @@ export function spawnSync(command, options = {}) {
if (exitCode !== 0 && isWindows) {
const exitReason = getWindowsExitReason(exitCode);
if (exitReason) {
exitCode = exitReason;
signalCode = exitReason;
}
}
@@ -442,9 +442,37 @@ export function spawnSyncSafe(command, options = {}) {
* @returns {string | undefined}
*/
export function getWindowsExitReason(exitCode) {
const ntStatusPath = "C:\\Program Files (x86)\\Windows Kits\\10\\Include\\10.0.22621.0\\shared\\ntstatus.h";
const nthStatus = readFile(ntStatusPath, { cache: true });
const windowsKitPath = "C:\\Program Files (x86)\\Windows Kits";
if (!existsSync(windowsKitPath)) {
return;
}
const windowsKitPaths = readdirSync(windowsKitPath)
.filter(filename => isFinite(parseInt(filename)))
.sort((a, b) => parseInt(b) - parseInt(a));
let ntStatusPath;
for (const windowsKitPath of windowsKitPaths) {
const includePath = `${windowsKitPath}\\Include`;
if (!existsSync(includePath)) {
continue;
}
const windowsSdkPaths = readdirSync(includePath).sort();
for (const windowsSdkPath of windowsSdkPaths) {
const statusPath = `${includePath}\\${windowsSdkPath}\\shared\\ntstatus.h`;
if (existsSync(statusPath)) {
ntStatusPath = statusPath;
break;
}
}
}
if (!ntStatusPath) {
return;
}
const nthStatus = readFile(ntStatusPath, { cache: true });
const match = nthStatus.match(new RegExp(`(STATUS_\\w+).*0x${exitCode?.toString(16)}`, "i"));
if (match) {
const [, exitReason] = match;

View File

@@ -0,0 +1,155 @@
import * as fs from "fs";
import * as path from "path";
/**
* Removes unreferenced top-level const declarations from a Zig file
* Handles patterns like: const <IDENTIFIER> = @import(...) or const <IDENTIFIER> = ...
*/
export function removeUnreferencedImports(content: string): string {
let modified = true;
let result = content;
// Keep iterating until no more changes are made
while (modified) {
modified = false;
const lines = result.split("\n");
const newLines: string[] = [];
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
// Match top-level const declarations: const <IDENTIFIER> = ...
const constMatch = line.match(/^const\s+([a-zA-Z_][a-zA-Z0-9_]*)\s*=(.*)$/);
if (constMatch) {
const identifier = constMatch[1];
const assignmentPart = constMatch[2];
// Skip lines that contain '{' in the assignment (likely structs/objects)
if (assignmentPart.includes("{")) {
newLines.push(line);
continue;
}
// Check if this identifier is referenced anywhere else in the file
const isReferenced = isIdentifierReferenced(identifier, lines, i);
if (!isReferenced) {
// Skip this line (delete it)
modified = true;
console.log(`Removing unreferenced import: ${identifier}`);
continue;
}
}
newLines.push(line);
}
result = newLines.join("\n");
}
return result;
}
/**
* Check if an identifier is referenced anywhere in the file except at the declaration line
*/
function isIdentifierReferenced(identifier: string, lines: string[], declarationLineIndex: number): boolean {
// Create a regex that matches the identifier as a whole word
// This prevents matching partial words (e.g. "std" shouldn't match "stdx")
const identifierRegex = new RegExp(`\\b${escapeRegex(identifier)}\\b`);
for (let i = 0; i < lines.length; i++) {
// Skip the declaration line itself
if (i === declarationLineIndex) {
continue;
}
const line = lines[i];
// Check if the identifier appears in this line
if (identifierRegex.test(line)) {
return true;
}
}
return false;
}
/**
* Escape special regex characters in a string
*/
function escapeRegex(string: string): string {
return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
}
/**
* Process a single Zig file
*/
export function processZigFile(filePath: string): void {
try {
const content = fs.readFileSync(filePath, "utf-8");
const cleaned = removeUnreferencedImports(content);
if (content !== cleaned) {
fs.writeFileSync(filePath, cleaned);
console.log(`Cleaned: ${filePath}`);
} else {
console.log(`No changes: ${filePath}`);
}
} catch (error) {
console.error(`Error processing ${filePath}:`, error);
}
}
/**
* Process multiple Zig files or directories
*/
export function processFiles(paths: string[]): void {
for (const inputPath of paths) {
const stat = fs.statSync(inputPath);
if (stat.isDirectory()) {
// Process all .zig files in directory recursively
processDirectory(inputPath);
} else if (inputPath.endsWith(".zig")) {
processZigFile(inputPath);
} else {
console.warn(`Skipping non-Zig file: ${inputPath}`);
}
}
}
/**
* Recursively process all .zig files in a directory
*/
function processDirectory(dirPath: string): void {
const entries = fs.readdirSync(dirPath, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(dirPath, entry.name);
if (entry.isDirectory()) {
processDirectory(fullPath);
} else if (entry.name.endsWith(".zig")) {
processZigFile(fullPath);
}
}
}
// CLI usage
if (require.main === module) {
const args = process.argv.slice(2);
if (args.length === 0) {
console.log("Usage: bun zig-remove-unreferenced-top-level-decls.ts <file1.zig> [file2.zig] [directory]...");
console.log("");
console.log("Examples:");
console.log(" bun zig-remove-unreferenced-top-level-decls.ts file.zig");
console.log(" bun zig-remove-unreferenced-top-level-decls.ts src/");
console.log(" bun zig-remove-unreferenced-top-level-decls.ts file1.zig file2.zig src/");
process.exit(1);
}
processFiles(args);
}

View File

@@ -3,7 +3,6 @@ const Environment = @import("./env.zig");
const Output = @import("output.zig");
const use_mimalloc = bun.use_mimalloc;
const StringTypes = @import("./string_types.zig");
const Mimalloc = bun.Mimalloc;
const bun = @import("bun");

View File

@@ -1,6 +1,5 @@
const std = @import("std");
const bun = @import("bun");
const string = bun.string;
const ImportRecord = @import("./import_record.zig").ImportRecord;
const ImportKind = @import("./import_record.zig").ImportKind;
const lol = @import("./deps/lol-html.zig");

View File

@@ -26,7 +26,6 @@ const std = @import("std");
const builtin = @import("builtin");
const bun = @import("bun");
const assert = bun.assert;
const testing = std.testing;
const Thread = std.Thread;
const Futex = bun.Futex;

View File

@@ -17,7 +17,6 @@
const std = @import("std");
const builtin = @import("builtin");
const windows = std.os.windows;
const testing = std.testing;
const assert = bun.assert;
const Progress = @This();
const bun = @import("bun");

View File

@@ -46,7 +46,7 @@ pub const HashType = u32;
const no_watch_item: WatchItemIndex = std.math.maxInt(WatchItemIndex);
/// Initializes a watcher. Each watcher is tied to some context type, which
/// recieves watch callbacks on the watcher thread. This function does not
/// receives watch callbacks on the watcher thread. This function does not
/// actually start the watcher thread.
///
/// const watcher = try Watcher.init(T, instance_of_t, fs, bun.default_allocator)
@@ -670,12 +670,9 @@ const std = @import("std");
const bun = @import("bun");
const string = bun.string;
const Output = bun.Output;
const Global = bun.Global;
const Environment = bun.Environment;
const strings = bun.strings;
const stringZ = bun.stringZ;
const FeatureFlags = bun.FeatureFlags;
const options = @import("./options.zig");
const Mutex = bun.Mutex;
const Futex = @import("./futex.zig");
const PackageJSON = @import("./resolver/package_json.zig").PackageJSON;

View File

@@ -1,8 +1,6 @@
const std = @import("std");
const FeatureFlags = @import("./feature_flags.zig");
const Environment = @import("./env.zig");
const FixedBufferAllocator = std.heap.FixedBufferAllocator;
const bun = @import("bun");
const OOM = bun.OOM;

View File

@@ -1,12 +1,10 @@
const mem = @import("std").mem;
const builtin = @import("std").builtin;
const std = @import("std");
const bun = @import("bun");
const log = bun.Output.scoped(.mimalloc, true);
const assert = bun.assert;
const Allocator = mem.Allocator;
const mimalloc = @import("./mimalloc.zig");
const FeatureFlags = @import("../feature_flags.zig");
const Environment = @import("../env.zig");
fn mimalloc_free(

View File

@@ -1,5 +1,4 @@
const mem = @import("std").mem;
const builtin = @import("std").builtin;
const std = @import("std");
const mimalloc = @import("./mimalloc.zig");

View File

@@ -1,24 +1,9 @@
const bun = @import("bun");
const string = bun.string;
const Output = bun.Output;
const Global = bun.Global;
const Environment = bun.Environment;
const strings = bun.strings;
const MutableString = bun.MutableString;
const stringZ = bun.stringZ;
const default_allocator = bun.default_allocator;
const FeatureFlags = bun.FeatureFlags;
const sync = @import("../sync.zig");
const std = @import("std");
const HTTP = bun.http;
const URL = @import("../url.zig").URL;
const Fs = @import("../fs.zig");
const Analytics = @import("./analytics_schema.zig").analytics;
const Writer = @import("./analytics_schema.zig").Writer;
const Headers = bun.http.Headers;
const Futex = @import("../futex.zig");
const Semver = bun.Semver;
/// Enables analytics. This is used by:
@@ -274,7 +259,6 @@ pub const EventName = enum(u8) {
};
var random: std.rand.DefaultPrng = undefined;
const DotEnv = @import("../env_loader.zig");
const platform_arch = if (Environment.isAarch64) Analytics.Architecture.arm else Analytics.Architecture.x64;

View File

@@ -1,6 +1,5 @@
const std = @import("std");
const bun = @import("bun");
const unicode = std.unicode;
const js_ast = bun.JSAst;

View File

@@ -664,7 +664,7 @@ pub const FilePoll = struct {
/// Only intended to be used from EventLoop.Pollable
fn deactivate(this: *FilePoll, loop: *Loop) void {
loop.num_polls -= @as(i32, @intFromBool(this.flags.contains(.has_incremented_poll_count)));
if (this.flags.contains(.has_incremented_poll_count)) loop.dec();
this.flags.remove(.has_incremented_poll_count);
loop.subActive(@as(u32, @intFromBool(this.flags.contains(.has_incremented_active_count))));
@@ -676,7 +676,7 @@ pub const FilePoll = struct {
fn activate(this: *FilePoll, loop: *Loop) void {
this.flags.remove(.closed);
loop.num_polls += @as(i32, @intFromBool(!this.flags.contains(.has_incremented_poll_count)));
if (!this.flags.contains(.has_incremented_poll_count)) loop.inc();
this.flags.insert(.has_incremented_poll_count);
if (this.flags.contains(.keeps_event_loop_alive)) {

View File

@@ -104,11 +104,11 @@ JSC::JSInternalPromise* bakeModuleLoaderFetch(JSC::JSGlobalObject* globalObject,
auto& vm = JSC::getVM(globalObject);
auto scope = DECLARE_THROW_SCOPE(vm);
auto moduleKey = key.toWTFString(globalObject);
if (UNLIKELY(scope.exception()))
if (scope.exception()) [[unlikely]]
return rejectedInternalPromise(globalObject, scope.exception()->value());
if (moduleKey.startsWith("bake:/"_s)) {
if (LIKELY(global->m_perThreadData)) {
if (global->m_perThreadData) [[likely]] {
BunString source = BakeProdLoad(global->m_perThreadData, Bun::toString(moduleKey));
if (source.tag != BunStringTag::Dead) {
JSC::SourceOrigin origin = JSC::SourceOrigin(WTF::URL(moduleKey));

View File

@@ -40,7 +40,7 @@ extern "C" JSC::JSPromise* BakeRenderRoutesForProdStatic(
NakedPtr<JSC::Exception> returnedException = nullptr;
auto result = JSC::profiledCall(global, JSC::ProfilingReason::API, cb, callData, JSC::jsUndefined(), args, returnedException);
if (UNLIKELY(returnedException)) {
if (returnedException) [[unlikely]] {
// This should be impossible because it returns a promise.
return JSC::JSPromise::rejectedPromise(global, returnedException->value());
}

View File

@@ -11,7 +11,6 @@ const DevServer = @This();
pub const debug = bun.Output.Scoped(.DevServer, false);
pub const igLog = bun.Output.scoped(.IncrementalGraph, false);
pub const mapLog = bun.Output.scoped(.SourceMapStore, false);
const DebugHTTPServer = @import("../bun.js/api/server.zig").DebugHTTPServer;
pub const Options = struct {
/// Arena must live until DevServer.deinit()
@@ -113,7 +112,7 @@ watcher_atomics: WatcherAtomics,
/// and bundling times, where the test harness (bake-harness.ts) would not wait
/// long enough for processing to complete. Checking client logs, for example,
/// not only must wait on DevServer, but also wait on all connected WebSocket
/// clients to recieve their update, but also wait for those modules
/// clients to receive their update, but also wait for those modules
/// (potentially async) to finish loading.
///
/// To solve the first part of this, DevServer exposes a special WebSocket
@@ -134,7 +133,7 @@ testing_batch_events: union(enum) {
enable_after_bundle,
/// DevServer will not start new bundles, but instead write all files into
/// this `TestingBatch` object. Additionally, writes into this will signal
/// a message saying that new files have been seen. Once DevServer recieves
/// a message saying that new files have been seen. Once DevServer receives
/// that signal, or times out, it will "release" this batch.
enabled: TestingBatch,
},
@@ -441,7 +440,7 @@ pub fn init(options: Options) bun.JSOOM!*DevServer {
.memory_visualizer_timer = .initPaused(.DevServerMemoryVisualizerTick),
.has_pre_crash_handler = bun.FeatureFlags.bake_debugging_features and
options.dump_state_on_crash orelse
bun.getRuntimeFeatureFlag("BUN_DUMP_STATE_ON_CRASH"),
bun.getRuntimeFeatureFlag(.BUN_DUMP_STATE_ON_CRASH),
.frontend_only = options.framework.file_system_router_types.len == 0,
.client_graph = .empty,
.server_graph = .empty,
@@ -471,7 +470,7 @@ pub fn init(options: Options) bun.JSOOM!*DevServer {
else
true
else
bun.getRuntimeFeatureFlag("BUN_ASSUME_PERFECT_INCREMENTAL"),
bun.getRuntimeFeatureFlag(.BUN_ASSUME_PERFECT_INCREMENTAL),
.relative_path_buf_lock = .unlocked,
.testing_batch_events = .disabled,
.broadcast_console_log_from_browser_to_server = options.broadcast_console_log_from_browser_to_server,
@@ -1304,7 +1303,7 @@ fn ensureRouteIsBundled(
kind: DeferredRequest.Handler.Kind,
req: *Request,
resp: AnyResponse,
) bun.OOM!void {
) bun.JSError!void {
assert(dev.magic == .valid);
assert(dev.server != null);
sw: switch (dev.routeBundlePtr(route_bundle_index).server_state) {
@@ -1417,7 +1416,7 @@ fn ensureRouteIsBundled(
);
},
.loaded => switch (kind) {
.server_handler => dev.onFrameworkRequestWithBundle(route_bundle_index, .{ .stack = req }, resp),
.server_handler => try dev.onFrameworkRequestWithBundle(route_bundle_index, .{ .stack = req }, resp),
.bundled_html_page => dev.onHtmlRequestWithBundle(route_bundle_index, resp, bun.http.Method.which(req.method()) orelse .POST),
},
}
@@ -1526,7 +1525,7 @@ fn onFrameworkRequestWithBundle(
route_bundle_index: RouteBundle.Index,
req: bun.JSC.API.SavedRequest.Union,
resp: AnyResponse,
) void {
) bun.JSError!void {
const route_bundle = dev.routeBundlePtr(route_bundle_index);
assert(route_bundle.data == .framework);
const bundle = &route_bundle.data.framework;
@@ -1560,7 +1559,7 @@ fn onFrameworkRequestWithBundle(
if (route.file_layout != .none) n += 1;
route = dev.router.routePtr(route.parent.unwrap() orelse break);
}
const arr = JSValue.createEmptyArray(global, n);
const arr = try JSValue.createEmptyArray(global, n);
route = dev.router.routePtr(bundle.route_index);
var route_name = bun.String.createUTF8(dev.relativePath(keys[fromOpaqueFileId(.server, route.file_page.unwrap().?).get()]));
arr.putIndex(global, 0, route_name.transferToJS(global));
@@ -2124,7 +2123,7 @@ fn generateClientBundle(dev: *DevServer, route_bundle: *RouteBundle) bun.OOM![]u
return client_bundle;
}
fn generateCssJSArray(dev: *DevServer, route_bundle: *RouteBundle) bun.OOM!JSC.JSValue {
fn generateCssJSArray(dev: *DevServer, route_bundle: *RouteBundle) bun.JSError!JSC.JSValue {
assert(route_bundle.data == .framework); // a JSC.JSValue has no purpose, and therefore isn't implemented.
if (Environment.allow_assert) assert(!route_bundle.data.framework.cached_css_file_array.has());
assert(route_bundle.server_state == .loaded); // page is unfit to load
@@ -2144,7 +2143,7 @@ fn generateCssJSArray(dev: *DevServer, route_bundle: *RouteBundle) bun.OOM!JSC.J
try dev.traceAllRouteImports(route_bundle, &gts, .find_css);
const names = dev.client_graph.current_css_files.items;
const arr = JSC.JSArray.createEmpty(dev.vm.global, names.len);
const arr = try JSC.JSArray.createEmpty(dev.vm.global, names.len);
for (names, 0..) |item, i| {
var buf: [asset_prefix.len + @sizeOf(u64) * 2 + "/.css".len]u8 = undefined;
const path = std.fmt.bufPrint(&buf, asset_prefix ++ "/{s}.css", .{
@@ -2188,9 +2187,9 @@ fn traceAllRouteImports(dev: *DevServer, route_bundle: *RouteBundle, gts: *Graph
}
}
fn makeArrayForServerComponentsPatch(dev: *DevServer, global: *JSC.JSGlobalObject, items: []const IncrementalGraph(.server).FileIndex) JSValue {
fn makeArrayForServerComponentsPatch(dev: *DevServer, global: *JSC.JSGlobalObject, items: []const IncrementalGraph(.server).FileIndex) bun.JSError!JSValue {
if (items.len == 0) return .null;
const arr = JSC.JSArray.createEmpty(global, items.len);
const arr = try JSC.JSArray.createEmpty(global, items.len);
const names = dev.server_graph.bundled_files.keys();
for (items, 0..) |item, i| {
const str = bun.String.createUTF8(dev.relativePath(names[item.get()]));
@@ -2249,7 +2248,7 @@ pub fn finalizeBundle(
dev: *DevServer,
bv2: *bun.bundle_v2.BundleV2,
result: *const bun.bundle_v2.DevServerOutput,
) bun.OOM!void {
) bun.JSError!void {
assert(dev.magic == .valid);
var had_sent_hmr_event = false;
defer {
@@ -2540,8 +2539,8 @@ pub fn finalizeBundle(
dev.vm.global.toJSValue(),
&.{
server_modules,
dev.makeArrayForServerComponentsPatch(dev.vm.global, dev.incremental_result.client_components_added.items),
dev.makeArrayForServerComponentsPatch(dev.vm.global, dev.incremental_result.client_components_removed.items),
try dev.makeArrayForServerComponentsPatch(dev.vm.global, dev.incremental_result.client_components_added.items),
try dev.makeArrayForServerComponentsPatch(dev.vm.global, dev.incremental_result.client_components_removed.items),
},
) catch |err| {
// One module replacement error should NOT prevent follow-up
@@ -2903,7 +2902,7 @@ pub fn finalizeBundle(
switch (req.handler) {
.aborted => continue,
.server_handler => |saved| dev.onFrameworkRequestWithBundle(req.route_bundle_index, .{ .saved = saved }, saved.response),
.server_handler => |saved| try dev.onFrameworkRequestWithBundle(req.route_bundle_index, .{ .saved = saved }, saved.response),
.bundled_html_page => |ram| dev.onHtmlRequestWithBundle(req.route_bundle_index, ram.response, ram.method),
}
}
@@ -6044,7 +6043,7 @@ pub fn onWebSocketUpgrade(
dev: *DevServer,
res: anytype,
req: *Request,
upgrade_ctx: *uws.uws_socket_context_t,
upgrade_ctx: *uws.SocketContext,
id: usize,
) void {
assert(id == 0);
@@ -8497,8 +8496,6 @@ const BundleV2 = bun.bundle_v2.BundleV2;
const Chunk = bun.bundle_v2.Chunk;
const ContentHasher = bun.bundle_v2.ContentHasher;
const Define = bun.options.Define;
const uws = bun.uws;
const AnyWebSocket = uws.AnyWebSocket;
const Request = uws.Request;
@@ -8509,8 +8506,6 @@ const MimeType = bun.http.MimeType;
const JSC = bun.JSC;
const JSValue = JSC.JSValue;
const VirtualMachine = JSC.VirtualMachine;
const JSModuleLoader = JSC.JSModuleLoader;
const EventLoopHandle = JSC.EventLoopHandle;
const HTMLBundle = JSC.API.HTMLBundle;
const Plugin = JSC.API.JSBundler.Plugin;
const EventLoopTimer = bun.api.Timer.EventLoopTimer;

View File

@@ -1094,14 +1094,14 @@ pub const JSFrameworkRouter = struct {
const validators = bun.JSC.Node.validators;
pub fn getBindings(global: *JSC.JSGlobalObject) JSC.JSValue {
return JSC.JSObject.create(.{
pub fn getBindings(global: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue {
return (try JSC.JSObject.create(.{
.parseRoutePattern = global.createHostFunction("parseRoutePattern", parseRoutePattern, 1),
.FrameworkRouter = js.getConstructor(global),
}, global).toJS();
}, global)).toJS();
}
pub fn constructor(global: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) !*JSFrameworkRouter {
pub fn constructor(global: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!*JSFrameworkRouter {
const opts = callframe.argumentsAsArray(1)[0];
if (!opts.isObject())
return global.throwInvalidArguments("FrameworkRouter needs an object as it's first argument", .{});
@@ -1144,7 +1144,7 @@ pub const JSFrameworkRouter = struct {
InsertionContext.wrap(JSFrameworkRouter, jsfr),
);
if (jsfr.stored_parse_errors.items.len > 0) {
const arr = JSValue.createEmptyArray(global, jsfr.stored_parse_errors.items.len);
const arr = try JSValue.createEmptyArray(global, jsfr.stored_parse_errors.items.len);
for (jsfr.stored_parse_errors.items, 0..) |*item, i| {
arr.putIndex(
global,
@@ -1176,7 +1176,7 @@ pub const JSFrameworkRouter = struct {
var sfb = std.heap.stackFallback(4096, bun.default_allocator);
const alloc = sfb.get();
return JSC.JSObject.create(.{
return (try JSC.JSObject.create(.{
.params = if (params_out.params.len > 0) params: {
const obj = JSValue.createEmptyObject(global, params_out.params.len);
for (params_out.params.slice()) |param| {
@@ -1187,7 +1187,7 @@ pub const JSFrameworkRouter = struct {
break :params obj;
} else .null,
.route = try jsfr.routeToJsonInverse(global, index, alloc),
}, global).toJS();
}, global)).toJS();
}
return .null;
@@ -1204,7 +1204,7 @@ pub const JSFrameworkRouter = struct {
fn routeToJson(jsfr: *JSFrameworkRouter, global: *JSGlobalObject, route_index: Route.Index, allocator: Allocator) !JSValue {
const route = jsfr.router.routePtr(route_index);
return JSC.JSObject.create(.{
return (try JSC.JSObject.create(.{
.part = try partToJS(global, route.part, allocator),
.page = jsfr.fileIdToJS(global, route.file_page),
.layout = jsfr.fileIdToJS(global, route.file_layout),
@@ -1214,7 +1214,7 @@ pub const JSFrameworkRouter = struct {
var next = route.first_child.unwrap();
while (next) |r| : (next = jsfr.router.routePtr(r).next_sibling.unwrap())
len += 1;
const arr = JSValue.createEmptyArray(global, len);
const arr = try JSValue.createEmptyArray(global, len);
next = route.first_child.unwrap();
var i: u32 = 0;
while (next) |r| : (next = jsfr.router.routePtr(r).next_sibling.unwrap()) {
@@ -1223,12 +1223,12 @@ pub const JSFrameworkRouter = struct {
}
break :brk arr;
},
}, global).toJS();
}, global)).toJS();
}
fn routeToJsonInverse(jsfr: *JSFrameworkRouter, global: *JSGlobalObject, route_index: Route.Index, allocator: Allocator) !JSValue {
const route = jsfr.router.routePtr(route_index);
return JSC.JSObject.create(.{
return (try JSC.JSObject.create(.{
.part = try partToJS(global, route.part, allocator),
.page = jsfr.fileIdToJS(global, route.file_page),
.layout = jsfr.fileIdToJS(global, route.file_layout),
@@ -1237,7 +1237,7 @@ pub const JSFrameworkRouter = struct {
try routeToJsonInverse(jsfr, global, parent, allocator)
else
.null,
}, global).toJS();
}, global)).toJS();
}
pub fn finalize(this: *JSFrameworkRouter) void {

View File

@@ -356,9 +356,9 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
pt.attach();
// Static site generator
const server_render_funcs = JSValue.createEmptyArray(global, router.types.len);
const server_param_funcs = JSValue.createEmptyArray(global, router.types.len);
const client_entry_urls = JSValue.createEmptyArray(global, router.types.len);
const server_render_funcs = try JSValue.createEmptyArray(global, router.types.len);
const server_param_funcs = try JSValue.createEmptyArray(global, router.types.len);
const client_entry_urls = try JSValue.createEmptyArray(global, router.types.len);
for (router.types, 0..) |router_type, i| {
if (router_type.client_file.unwrap()) |client_file| {
@@ -421,12 +421,12 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
str.* = (try bun.String.createFormat("{s}{s}", .{ public_path, output_file.dest_path })).toJS(global);
}
const route_patterns = JSValue.createEmptyArray(global, navigatable_routes.items.len);
const route_nested_files = JSValue.createEmptyArray(global, navigatable_routes.items.len);
const route_type_and_flags = JSValue.createEmptyArray(global, navigatable_routes.items.len);
const route_source_files = JSValue.createEmptyArray(global, navigatable_routes.items.len);
const route_param_info = JSValue.createEmptyArray(global, navigatable_routes.items.len);
const route_style_references = JSValue.createEmptyArray(global, navigatable_routes.items.len);
const route_patterns = try JSValue.createEmptyArray(global, navigatable_routes.items.len);
const route_nested_files = try JSValue.createEmptyArray(global, navigatable_routes.items.len);
const route_type_and_flags = try JSValue.createEmptyArray(global, navigatable_routes.items.len);
const route_source_files = try JSValue.createEmptyArray(global, navigatable_routes.items.len);
const route_param_info = try JSValue.createEmptyArray(global, navigatable_routes.items.len);
const route_style_references = try JSValue.createEmptyArray(global, navigatable_routes.items.len);
var params_buf: std.ArrayListUnmanaged([]const u8) = .{};
for (navigatable_routes.items, 0..) |route_index, nav_index| {
@@ -476,8 +476,8 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
}
// Fill styles and file_list
const styles = JSValue.createEmptyArray(global, css_chunks_count);
const file_list = JSValue.createEmptyArray(global, file_count);
const styles = try JSValue.createEmptyArray(global, css_chunks_count);
const file_list = try JSValue.createEmptyArray(global, file_count);
next = route.parent.unwrap();
file_count = 1;
@@ -523,7 +523,7 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
})));
if (params_buf.items.len > 0) {
const param_info_array = JSValue.createEmptyArray(global, params_buf.items.len);
const param_info_array = try JSValue.createEmptyArray(global, params_buf.items.len);
for (params_buf.items, 0..) |param, i| {
param_info_array.putIndex(global, @intCast(params_buf.items.len - i - 1), bun.String.createUTF8ForJS(global, param));
}
@@ -766,10 +766,10 @@ pub const PerThread = struct {
/// After initializing, call `attach`
pub fn init(vm: *VirtualMachine, opts: Options) !PerThread {
const loaded_files = try bun.bit_set.AutoBitSet.initEmpty(vm.allocator, opts.output_indexes.len);
var loaded_files = try bun.bit_set.AutoBitSet.initEmpty(vm.allocator, opts.output_indexes.len);
errdefer loaded_files.deinit(vm.allocator);
const all_server_files = JSValue.createEmptyArray(vm.global, opts.output_indexes.len);
const all_server_files = try JSValue.createEmptyArray(vm.global, opts.output_indexes.len);
all_server_files.protect();
return .{

View File

@@ -1,12 +1,10 @@
const bun = @import("bun");
const logger = bun.logger;
const std = @import("std");
const Fs = bun.fs;
const string = bun.string;
const Resolver = @import("../resolver//resolver.zig").Resolver;
const JSC = bun.JSC;
const JSGlobalObject = JSC.JSGlobalObject;
const strings = bun.strings;
const default_allocator = bun.default_allocator;
const ZigString = JSC.ZigString;
const JSValue = JSC.JSValue;
@@ -28,7 +26,7 @@ pub const BuildMessage = struct {
pub fn getNotes(this: *BuildMessage, globalThis: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue {
const notes = this.msg.notes;
const array = JSC.JSValue.createEmptyArray(globalThis, notes.len);
const array = try JSC.JSValue.createEmptyArray(globalThis, notes.len);
for (notes, 0..) |note, i| {
const cloned = try note.clone(bun.default_allocator);
array.putIndex(

View File

@@ -5,9 +5,8 @@ pub fn mark(this: *Counters, comptime tag: Field) void {
@field(this, @tagName(tag)) +|= 1;
}
pub fn toJS(this: *const Counters, globalObject: *JSC.JSGlobalObject) JSC.JSValue {
const obj = JSC.JSObject.create(this.*, globalObject);
return obj.toJS();
pub fn toJS(this: *const Counters, globalObject: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue {
return (try JSC.JSObject.create(this.*, globalObject)).toJS();
}
pub fn createCountersObject(globalObject: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSC.JSValue {

View File

@@ -2030,7 +2030,7 @@ fn dumpSourceString(vm: *VirtualMachine, specifier: string, written: []const u8)
fn dumpSourceStringFailiable(vm: *VirtualMachine, specifier: string, written: []const u8) !void {
if (!Environment.isDebug) return;
if (bun.getRuntimeFeatureFlag("BUN_DEBUG_NO_DUMP")) return;
if (bun.getRuntimeFeatureFlag(.BUN_DEBUG_NO_DUMP)) return;
const BunDebugHolder = struct {
pub var dir: ?std.fs.Dir = null;
@@ -2632,8 +2632,6 @@ pub const FetchFlags = enum {
}
};
const SavedSourceMap = JSC.SavedSourceMap;
pub const HardcodedModule = enum {
bun,
@"abort-controller",
@@ -3061,22 +3059,16 @@ export fn ModuleLoader__isBuiltin(data: [*]const u8, len: usize) bool {
}
const std = @import("std");
const StaticExport = @import("./bindings/static_export.zig");
const bun = @import("bun");
const string = bun.string;
const Output = bun.Output;
const Global = bun.Global;
const Environment = bun.Environment;
const strings = bun.strings;
const MutableString = bun.MutableString;
const stringZ = bun.stringZ;
const StoredFileDescriptorType = bun.StoredFileDescriptorType;
const Arena = @import("../allocators/mimalloc_arena.zig").Arena;
const Allocator = std.mem.Allocator;
const IdentityContext = @import("../identity_context.zig").IdentityContext;
const Fs = @import("../fs.zig");
const Resolver = @import("../resolver/resolver.zig");
const ast = @import("../import_record.zig");
const MacroEntryPoint = bun.transpiler.EntryPoints.MacroEntryPoint;
const ParseResult = bun.transpiler.ParseResult;
@@ -3086,15 +3078,11 @@ const options = @import("../options.zig");
const Transpiler = bun.Transpiler;
const PluginRunner = bun.transpiler.PluginRunner;
const js_printer = bun.js_printer;
const js_parser = bun.js_parser;
const js_ast = bun.JSAst;
const ImportKind = ast.ImportKind;
const Analytics = @import("../analytics/analytics_thread.zig");
const ZigString = bun.JSC.ZigString;
const Runtime = @import("../runtime.zig");
const Router = @import("./api/filesystem_router.zig");
const ImportRecord = ast.ImportRecord;
const DotEnv = @import("../env_loader.zig");
const PackageJSON = @import("../resolver/package_json.zig").PackageJSON;
const MacroRemap = @import("../resolver/package_json.zig").MacroMap;
const JSC = bun.JSC;
@@ -3102,22 +3090,8 @@ const JSValue = bun.JSC.JSValue;
const node_module_module = @import("./bindings/NodeModuleModule.zig");
const JSGlobalObject = bun.JSC.JSGlobalObject;
const ConsoleObject = bun.JSC.ConsoleObject;
const ZigException = bun.JSC.ZigException;
const ZigStackTrace = bun.JSC.ZigStackTrace;
const ResolvedSource = bun.JSC.ResolvedSource;
const JSPromise = bun.JSC.JSPromise;
const JSModuleLoader = bun.JSC.JSModuleLoader;
const JSPromiseRejectionOperation = bun.JSC.JSPromiseRejectionOperation;
const ErrorableZigString = bun.JSC.ErrorableZigString;
const VM = bun.JSC.VM;
const JSFunction = bun.JSC.JSFunction;
const Config = @import("./config.zig");
const URL = @import("../url.zig").URL;
const Bun = JSC.API.Bun;
const EventLoop = JSC.EventLoop;
const PendingResolution = @import("../resolver/resolver.zig").PendingResolution;
const ThreadSafeFunction = bun.api.napi.ThreadSafeFunction;
const PackageManager = @import("../install/install.zig").PackageManager;
const Install = @import("../install/install.zig");
const VirtualMachine = bun.JSC.VirtualMachine;

View File

@@ -15,7 +15,6 @@ const expected_version = 13;
const bun = @import("bun");
const std = @import("std");
const Output = bun.Output;
const JSC = bun.JSC;
const debug = Output.scoped(.cache, false);
const MINIMUM_CACHE_SIZE = 50 * 1024;

View File

@@ -438,7 +438,7 @@ pub fn loadExtraEnvAndSourceCodePrinter(this: *VirtualMachine) void {
this.hide_bun_stackframes = false;
}
if (bun.getRuntimeFeatureFlag("BUN_FEATURE_FLAG_DISABLE_ASYNC_TRANSPILER")) {
if (bun.getRuntimeFeatureFlag(.BUN_FEATURE_FLAG_DISABLE_ASYNC_TRANSPILER)) {
this.transpiler_store.enabled = false;
}
@@ -2023,11 +2023,6 @@ export fn Bun__VirtualMachine__setOverrideModuleRunMainPromise(vm: *VirtualMachi
}
}
export fn Bun__VirtualMachine__getWorker(vm: *VirtualMachine) ?*anyopaque {
const worker = vm.worker orelse return null;
return worker.cpp_worker;
}
pub fn reloadEntryPointForTestRunner(this: *VirtualMachine, entry_path: []const u8) !*JSInternalPromise {
this.has_loaded = false;
this.main = entry_path;
@@ -2489,8 +2484,8 @@ pub fn remapZigException(
) void {
error_instance.toZigException(this.global, exception);
const enable_source_code_preview = allow_source_code_preview and
!(bun.getRuntimeFeatureFlag("BUN_DISABLE_SOURCE_CODE_PREVIEW") or
bun.getRuntimeFeatureFlag("BUN_DISABLE_TRANSPILED_SOURCE_CODE_PREVIEW"));
!(bun.getRuntimeFeatureFlag(.BUN_DISABLE_SOURCE_CODE_PREVIEW) or
bun.getRuntimeFeatureFlag(.BUN_DISABLE_TRANSPILED_SOURCE_CODE_PREVIEW));
defer {
if (Environment.isDebug) {
@@ -3387,7 +3382,7 @@ pub const IPCInstance = struct {
Process__emitDisconnectEvent(vm.global);
event_loop.exit();
if (Environment.isPosix) {
uws.us_socket_context_free(0, this.context);
this.context.deinit(false);
}
vm.channel_ref.disable();
}
@@ -3417,7 +3412,7 @@ pub fn getIPCInstance(this: *VirtualMachine) ?*IPCInstance {
const instance = switch (Environment.os) {
else => instance: {
const context = uws.us_create_bun_nossl_socket_context(this.event_loop_handle.?, @sizeOf(usize)).?;
const context = uws.SocketContext.createNoSSLContext(this.event_loop_handle.?, @sizeOf(usize)).?;
IPC.Socket.configure(context, true, *IPC.SendQueue, IPC.IPCHandlers.PosixSocket);
var instance = IPCInstance.new(.{
@@ -3518,22 +3513,16 @@ const Async = bun.Async;
const Transpiler = bun.Transpiler;
const ImportWatcher = JSC.hot_reloader.ImportWatcher;
const MutableString = bun.MutableString;
const stringZ = bun.stringZ;
const default_allocator = bun.default_allocator;
const StoredFileDescriptorType = bun.StoredFileDescriptorType;
const ErrorableString = JSC.ErrorableString;
const Arena = @import("../allocators/mimalloc_arena.zig").Arena;
const Exception = JSC.Exception;
const Allocator = std.mem.Allocator;
const IdentityContext = @import("../identity_context.zig").IdentityContext;
const Fs = @import("../fs.zig");
const Resolver = @import("../resolver/resolver.zig");
const ast = @import("../import_record.zig");
const MacroEntryPoint = bun.transpiler.EntryPoints.MacroEntryPoint;
const ParseResult = bun.transpiler.ParseResult;
const logger = bun.logger;
const Api = @import("../api/schema.zig").Api;
const JSPrivateDataPtr = JSC.JSPrivateDataPtr;
const ConsoleObject = JSC.ConsoleObject;
const Node = JSC.Node;
const ZigException = JSC.ZigException;
@@ -3542,16 +3531,11 @@ const ErrorableResolvedSource = JSC.ErrorableResolvedSource;
const ResolvedSource = JSC.ResolvedSource;
const JSInternalPromise = JSC.JSInternalPromise;
const JSModuleLoader = JSC.JSModuleLoader;
const JSPromiseRejectionOperation = JSC.JSPromiseRejectionOperation;
const ErrorableZigString = JSC.ErrorableZigString;
const VM = JSC.VM;
const JSFunction = JSC.JSFunction;
const Config = @import("./config.zig");
const URL = @import("../url.zig").URL;
const Bun = JSC.API.Bun;
const EventLoop = JSC.EventLoop;
const PendingResolution = @import("../resolver/resolver.zig").PendingResolution;
const ThreadSafeFunction = bun.api.napi.ThreadSafeFunction;
const PackageManager = @import("../install/install.zig").PackageManager;
const IPC = @import("ipc.zig");
const DNSResolver = @import("api/bun/dns_resolver.zig").DNSResolver;

View File

@@ -38,6 +38,10 @@ pub const BunObject = struct {
pub const udpSocket = toJSCallback(host_fn.wrapStaticMethod(api.UDPSocket, "udpSocket", false));
pub const which = toJSCallback(Bun.which);
pub const write = toJSCallback(JSC.WebCore.Blob.writeFile);
pub const zstdCompressSync = toJSCallback(JSZstd.compressSync);
pub const zstdDecompressSync = toJSCallback(JSZstd.decompressSync);
pub const zstdCompress = toJSCallback(JSZstd.compress);
pub const zstdDecompress = toJSCallback(JSZstd.decompress);
// --- Callbacks ---
@@ -91,7 +95,7 @@ pub const BunObject = struct {
fn toJSGetter(comptime getter: anytype) LazyPropertyCallback {
return struct {
pub fn callback(this: *JSC.JSGlobalObject, object: *JSC.JSObject) callconv(JSC.conv) JSValue {
return @call(.always_inline, getter, .{ this, object });
return bun.jsc.toJSHostValue(this, getter(this, object));
}
}.callback;
}
@@ -168,7 +172,10 @@ pub const BunObject = struct {
@export(&BunObject.udpSocket, .{ .name = callbackName("udpSocket") });
@export(&BunObject.which, .{ .name = callbackName("which") });
@export(&BunObject.write, .{ .name = callbackName("write") });
@export(&BunObject.zstdCompressSync, .{ .name = callbackName("zstdCompressSync") });
@export(&BunObject.zstdDecompressSync, .{ .name = callbackName("zstdDecompressSync") });
@export(&BunObject.zstdCompress, .{ .name = callbackName("zstdCompress") });
@export(&BunObject.zstdDecompress, .{ .name = callbackName("zstdDecompress") });
// -- Callbacks --
}
};
@@ -1296,9 +1303,9 @@ pub fn getValkeyClientConstructor(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObj
return JSC.API.Valkey.js.getConstructor(globalThis);
}
pub fn getEmbeddedFiles(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.JSValue {
pub fn getEmbeddedFiles(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) bun.JSError!JSC.JSValue {
const vm = globalThis.bunVM();
const graph = vm.standalone_module_graph orelse return JSC.JSValue.createEmptyArray(globalThis, 0);
const graph = vm.standalone_module_graph orelse return try JSC.JSValue.createEmptyArray(globalThis, 0);
const unsorted_files = graph.files.values();
var sort_indices = std.ArrayList(u32).initCapacity(bun.default_allocator, unsorted_files.len) catch bun.outOfMemory();
@@ -1313,7 +1320,7 @@ pub fn getEmbeddedFiles(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.J
}
var i: u32 = 0;
var array = JSC.JSValue.createEmptyArray(globalThis, sort_indices.items.len);
var array = try JSC.JSValue.createEmptyArray(globalThis, sort_indices.items.len);
std.mem.sort(u32, sort_indices.items, unsorted_files, bun.StandaloneModuleGraph.File.lessThanByIndex);
for (sort_indices.items) |index| {
const file = &unsorted_files[index];
@@ -1716,6 +1723,287 @@ pub const JSZlib = struct {
}
};
pub const JSZstd = struct {
export fn deallocator(_: ?*anyopaque, ctx: ?*anyopaque) void {
comptime assert(bun.use_mimalloc);
bun.Mimalloc.mi_free(ctx);
}
inline fn getOptions(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!struct { JSC.Node.StringOrBuffer, ?JSValue } {
const arguments = callframe.arguments();
const buffer_value = if (arguments.len > 0) arguments[0] else .undefined;
const options_val: ?JSValue =
if (arguments.len > 1 and arguments[1].isObject())
arguments[1]
else if (arguments.len > 1 and !arguments[1].isUndefined()) {
return globalThis.throwInvalidArguments("Expected options to be an object", .{});
} else null;
if (try JSC.Node.StringOrBuffer.fromJS(globalThis, bun.default_allocator, buffer_value)) |buffer| {
return .{ buffer, options_val };
}
return globalThis.throwInvalidArguments("Expected buffer to be a string or buffer", .{});
}
fn getLevel(globalThis: *JSGlobalObject, options_val: ?JSValue) bun.JSError!i32 {
if (options_val) |option_obj| {
if (try option_obj.get(globalThis, "level")) |level_val| {
const value = level_val.coerce(i32, globalThis);
if (globalThis.hasException()) return error.JSError;
if (value < 1 or value > 22) {
return globalThis.throwInvalidArguments("Compression level must be between 1 and 22", .{});
}
return value;
}
}
return 3;
}
inline fn getOptionsAsync(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!struct { JSC.Node.StringOrBuffer, ?JSValue, i32 } {
const arguments = callframe.arguments();
const buffer_value = if (arguments.len > 0) arguments[0] else .undefined;
const options_val: ?JSValue =
if (arguments.len > 1 and arguments[1].isObject())
arguments[1]
else if (arguments.len > 1 and !arguments[1].isUndefined()) {
return globalThis.throwInvalidArguments("Expected options to be an object", .{});
} else null;
const level = try getLevel(globalThis, options_val);
const allow_string_object = true;
if (try JSC.Node.StringOrBuffer.fromJSMaybeAsync(globalThis, bun.default_allocator, buffer_value, true, allow_string_object)) |buffer| {
return .{ buffer, options_val, level };
}
return globalThis.throwInvalidArguments("Expected buffer to be a string or buffer", .{});
}
pub fn compressSync(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue {
const buffer, const options_val = try getOptions(globalThis, callframe);
defer buffer.deinit();
const level = try getLevel(globalThis, options_val);
const input = buffer.slice();
const allocator = bun.default_allocator;
// Calculate max compressed size
const max_size = bun.zstd.compressBound(input.len);
var output = try allocator.alloc(u8, max_size);
// Perform compression with context
const compressed_size = switch (bun.zstd.compress(output, input, level)) {
.success => |size| size,
.err => |err| {
allocator.free(output);
return globalThis.ERR(.ZSTD, "{s}", .{err}).throw();
},
};
// Resize to actual compressed size
if (compressed_size < output.len) {
output = try allocator.realloc(output, compressed_size);
}
return JSC.JSValue.createBuffer(globalThis, output, bun.default_allocator);
}
pub fn decompressSync(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue {
const buffer, _ = try getOptions(globalThis, callframe);
defer buffer.deinit();
const input = buffer.slice();
const allocator = bun.default_allocator;
// Try to get the decompressed size
const decompressed_size = bun.zstd.getDecompressedSize(input);
if (decompressed_size == std.math.maxInt(c_ulonglong) - 1 or decompressed_size == std.math.maxInt(c_ulonglong) - 2) {
// If size is unknown, we'll need to decompress in chunks
return globalThis.ERR(.ZSTD, "Decompressed size is unknown. Either the input is not a valid zstd compressed buffer or the decompressed size is too large. If you run into this error with a valid input, please file an issue at https://github.com/oven-sh/bun/issues", .{}).throw();
}
// Allocate output buffer based on decompressed size
var output = try allocator.alloc(u8, decompressed_size);
// Perform decompression
const actual_size = switch (bun.zstd.decompress(output, input)) {
.success => |actual_size| actual_size,
.err => |err| {
allocator.free(output);
return globalThis.ERR(.ZSTD, "{s}", .{err}).throw();
},
};
bun.debugAssert(actual_size <= output.len);
// mimalloc doesn't care about the self-reported size of the slice.
output.len = actual_size;
return JSC.JSValue.createBuffer(globalThis, output, bun.default_allocator);
}
// --- Async versions ---
pub const ZstdJob = struct {
buffer: JSC.Node.StringOrBuffer = JSC.Node.StringOrBuffer.empty,
is_compress: bool = true,
level: i32 = 3,
task: JSC.WorkPoolTask = .{ .callback = &runTask },
promise: JSC.JSPromise.Strong = .{},
vm: *JSC.VirtualMachine,
output: []u8 = &[_]u8{},
error_message: ?[]const u8 = null,
any_task: JSC.AnyTask = undefined,
poll: Async.KeepAlive = .{},
pub const new = bun.TrivialNew(@This());
pub fn runTask(task: *JSC.WorkPoolTask) void {
const job: *ZstdJob = @fieldParentPtr("task", task);
defer job.vm.enqueueTaskConcurrent(JSC.ConcurrentTask.create(job.any_task.task()));
const input = job.buffer.slice();
const allocator = bun.default_allocator;
if (job.is_compress) {
// Compression path
// Calculate max compressed size
const max_size = bun.zstd.compressBound(input.len);
job.output = allocator.alloc(u8, max_size) catch {
job.error_message = "Out of memory";
return;
};
// Perform compression
job.output = switch (bun.zstd.compress(job.output, input, job.level)) {
.success => |size| blk: {
// Resize to actual compressed size
if (size < job.output.len) {
break :blk allocator.realloc(job.output, size) catch {
job.error_message = "Out of memory";
return;
};
}
break :blk job.output;
},
.err => |err| {
allocator.free(job.output);
job.output = &[_]u8{};
job.error_message = err;
return;
},
};
} else {
// Decompression path
// Try to get the decompressed size
const decompressed_size = bun.zstd.getDecompressedSize(input);
if (decompressed_size == std.math.maxInt(c_ulonglong) - 1 or decompressed_size == std.math.maxInt(c_ulonglong) - 2) {
job.error_message = "Decompressed size is unknown. Either the input is not a valid zstd compressed buffer or the decompressed size is too large";
return;
}
// Allocate output buffer based on decompressed size
job.output = allocator.alloc(u8, decompressed_size) catch {
job.error_message = "Out of memory";
return;
};
// Perform decompression
switch (bun.zstd.decompress(job.output, input)) {
.success => |actual_size| {
if (actual_size < job.output.len) {
job.output.len = actual_size;
}
},
.err => |err| {
allocator.free(job.output);
job.output = &[_]u8{};
job.error_message = err;
return;
},
}
}
}
pub fn runFromJS(this: *ZstdJob) void {
defer this.deinit();
if (this.vm.isShuttingDown()) {
return;
}
const globalThis = this.vm.global;
const promise = this.promise.swap();
if (this.error_message) |err_msg| {
promise.reject(globalThis, globalThis.ERR(.ZSTD, "{s}", .{err_msg}).toJS());
return;
}
const output_slice = this.output;
const buffer_value = JSC.JSValue.createBuffer(globalThis, output_slice, bun.default_allocator);
if (globalThis.hasException()) {
promise.reject(globalThis, error.JSError);
return;
}
if (buffer_value == .zero) {
promise.reject(globalThis, ZigString.init("Failed to create buffer").toErrorInstance(globalThis));
return;
}
this.output = &[_]u8{};
promise.resolve(globalThis, buffer_value);
}
pub fn deinit(this: *ZstdJob) void {
this.poll.unref(this.vm);
this.buffer.deinitAndUnprotect();
this.promise.deinit();
bun.default_allocator.free(this.output);
bun.destroy(this);
}
pub fn create(vm: *JSC.VirtualMachine, globalThis: *JSC.JSGlobalObject, buffer: JSC.Node.StringOrBuffer, is_compress: bool, level: i32) *ZstdJob {
var job = ZstdJob.new(.{
.buffer = buffer,
.is_compress = is_compress,
.level = level,
.vm = vm,
.any_task = undefined,
});
job.promise = JSC.JSPromise.Strong.init(globalThis);
job.any_task = JSC.AnyTask.New(@This(), &runFromJS).init(job);
job.poll.ref(vm);
JSC.WorkPool.schedule(&job.task);
return job;
}
};
pub fn compress(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue {
const buffer, _, const level = try getOptionsAsync(globalThis, callframe);
const vm = globalThis.bunVM();
var job = ZstdJob.create(vm, globalThis, buffer, true, level);
return job.promise.value();
}
pub fn decompress(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue {
const buffer, _, _ = try getOptionsAsync(globalThis, callframe);
const vm = globalThis.bunVM();
var job = ZstdJob.create(vm, globalThis, buffer, false, 0); // level is ignored for decompression
return job.promise.value();
}
};
// const InternalTestingAPIs = struct {
// pub fn BunInternalFunction__syntaxHighlighter(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue {
// const args = callframe.arguments_old(1);
@@ -1753,84 +2041,39 @@ comptime {
const assert = bun.assert;
const conv = std.builtin.CallingConvention.Unspecified;
const S3File = @import("../webcore/S3File.zig");
const Bun = @This();
const default_allocator = bun.default_allocator;
const bun = @import("bun");
const uv = bun.windows.libuv;
const Environment = bun.Environment;
const Global = bun.Global;
const strings = bun.strings;
const string = bun.string;
const Output = bun.Output;
const MutableString = bun.MutableString;
const std = @import("std");
const Allocator = std.mem.Allocator;
const IdentityContext = @import("../../identity_context.zig").IdentityContext;
const Fs = @import("../../fs.zig");
const Resolver = @import("../../resolver/resolver.zig");
const ast = @import("../../import_record.zig");
const MacroEntryPoint = bun.transpiler.MacroEntryPoint;
const logger = bun.logger;
const Api = @import("../../api/schema.zig").Api;
const options = @import("../../options.zig");
const js_printer = bun.js_printer;
const js_parser = bun.js_parser;
const js_ast = bun.JSAst;
const NodeFallbackModules = @import("../../node_fallbacks.zig");
const ImportKind = ast.ImportKind;
const Analytics = @import("../../analytics/analytics_thread.zig");
const ZigString = bun.JSC.ZigString;
const Runtime = @import("../../runtime.zig");
const Router = @import("./filesystem_router.zig");
const ImportRecord = ast.ImportRecord;
const DotEnv = @import("../../env_loader.zig");
const ParseResult = bun.transpiler.ParseResult;
const PackageJSON = @import("../../resolver/package_json.zig").PackageJSON;
const MacroRemap = @import("../../resolver/package_json.zig").MacroMap;
const WebCore = bun.JSC.WebCore;
const Request = WebCore.Request;
const Response = WebCore.Response;
const Headers = WebCore.Headers;
const Fetch = WebCore.Fetch;
const JSC = bun.JSC;
const JSValue = bun.JSC.JSValue;
const JSGlobalObject = bun.JSC.JSGlobalObject;
const JSPrivateDataPtr = bun.JSC.JSPrivateDataPtr;
const ConsoleObject = bun.JSC.ConsoleObject;
const api = bun.api;
const node = bun.api.node;
const host_fn = bun.jsc.host_fn;
const ZigException = bun.JSC.ZigException;
const ZigStackTrace = bun.JSC.ZigStackTrace;
const ErrorableResolvedSource = bun.JSC.ErrorableResolvedSource;
const ResolvedSource = bun.JSC.ResolvedSource;
const JSPromise = bun.JSC.JSPromise;
const JSInternalPromise = bun.JSC.JSInternalPromise;
const JSModuleLoader = bun.JSC.JSModuleLoader;
const JSPromiseRejectionOperation = bun.JSC.JSPromiseRejectionOperation;
const ErrorableZigString = bun.JSC.ErrorableZigString;
const VM = bun.JSC.VM;
const JSFunction = bun.JSC.JSFunction;
const Config = @import("../config.zig");
const URL = @import("../../url.zig").URL;
const Transpiler = bun.JSC.API.JSTranspiler;
const JSBundler = bun.JSC.API.JSBundler;
const VirtualMachine = JSC.VirtualMachine;
const IOTask = JSC.IOTask;
const zlib = @import("../../zlib.zig");
const Which = @import("../../which.zig");
const ErrorableString = JSC.ErrorableString;
const glob = @import("../../glob.zig");
const Async = bun.Async;
const SemverObject = bun.Semver.SemverObject;
const Braces = @import("../../shell/braces.zig");
const Shell = @import("../../shell/shell.zig");
const Debugger = JSC.Debugger;
const HashObject = bun.api.HashObject;
const UnsafeObject = bun.api.UnsafeObject;
const TOMLObject = bun.api.TOMLObject;
const Timer = bun.api.Timer;
const FFIObject = bun.api.FFIObject;

View File

@@ -27,6 +27,7 @@ pub const xxHash3 = hashWrap(struct {
pub const murmur32v2 = hashWrap(std.hash.murmur.Murmur2_32);
pub const murmur32v3 = hashWrap(std.hash.murmur.Murmur3_32);
pub const murmur64v2 = hashWrap(std.hash.murmur.Murmur2_64);
pub const rapidhash = hashWrap(std.hash.RapidHash);
pub fn create(globalThis: *JSC.JSGlobalObject) JSC.JSValue {
const function = JSC.createCallback(globalThis, ZigString.static("hash"), 1, wyhash);
@@ -42,6 +43,7 @@ pub fn create(globalThis: *JSC.JSGlobalObject) JSC.JSValue {
"murmur32v2",
"murmur32v3",
"murmur64v2",
"rapidhash",
};
inline for (fns) |name| {
const value = JSC.createCallback(
@@ -138,7 +140,6 @@ const HashObject = @This();
const JSC = bun.JSC;
const JSValue = JSC.JSValue;
const JSGlobalObject = JSC.JSGlobalObject;
const JSObject = JSC.JSObject;
const std = @import("std");
const bun = @import("bun");
const ZigString = JSC.ZigString;

View File

@@ -1,7 +1,5 @@
const std = @import("std");
const Api = @import("../../api/schema.zig").Api;
const QueryStringMap = @import("../../url.zig").QueryStringMap;
const CombinedScanner = @import("../../url.zig").CombinedScanner;
const bun = @import("bun");
const string = bun.string;
const JSC = bun.JSC;
@@ -9,33 +7,17 @@ const WebCore = bun.webcore;
const Transpiler = bun.transpiler;
const options = @import("../../options.zig");
const resolve_path = @import("../../resolver/resolve_path.zig");
const ScriptSrcStream = std.io.FixedBufferStream([]u8);
const ZigString = JSC.ZigString;
const Fs = @import("../../fs.zig");
const JSObject = JSC.JSObject;
const JSValue = bun.JSC.JSValue;
const JSGlobalObject = JSC.JSGlobalObject;
const strings = bun.strings;
const JSError = bun.JSError;
const OOM = bun.OOM;
const Request = WebCore.Request;
const String = bun.String;
const FetchEvent = WebCore.FetchEvent;
const MacroMap = @import("../../resolver/package_json.zig").MacroMap;
const TSConfigJSON = @import("../../resolver/tsconfig_json.zig").TSConfigJSON;
const PackageJSON = @import("../../resolver/package_json.zig").PackageJSON;
const logger = bun.logger;
const Loader = options.Loader;
const Target = options.Target;
const JSAst = bun.JSAst;
const JSParser = bun.js_parser;
const JSPrinter = bun.js_printer;
const ScanPassResult = JSParser.ScanPassResult;
const Mimalloc = @import("../../allocators/mimalloc_arena.zig");
const Runtime = @import("../../runtime.zig").Runtime;
const JSLexer = bun.js_lexer;
const Expr = JSAst.Expr;
const Index = @import("../../ast/base.zig").Index;
const debug = bun.Output.scoped(.Transpiler, false);

Some files were not shown because too many files have changed in this diff Show More