Compare commits

..

381 Commits

Author SHA1 Message Date
Kai Tamkun
4b2351a12c Merge branch 'ben/fix-node-napi-tests' into kai/fix-node-napi-tests 2024-12-02 14:11:20 -08:00
Kai Tamkun
7ee91a9912 Merge branch 'main' into ben/fix-node-napi-tests 2024-12-02 12:44:04 -08:00
Michael H
56ad4cc4a6 simplify vscode extension title (#15519) 2024-12-02 06:29:07 -08:00
github-actions[bot]
d2acb2eac0 deps: update libdeflate to v1.22 (#15505)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2024-11-30 07:26:45 -08:00
github-actions[bot]
de7eafbdd1 deps: update lshpack to v2.3.3 (#15501)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2024-11-29 20:13:47 -08:00
github-actions[bot]
4114986c3e deps: update c-ares to v1.34.3 (#15502)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2024-11-29 19:10:10 -08:00
dave caruso
8aa451c2dc bake(dev): plugins in dev server, with other fixes (#15467)
Co-authored-by: paperdave <paperdave@users.noreply.github.com>
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-11-29 19:07:56 -08:00
Michael H
497cef9759 bun upgrade --help document --stable option (#15472)
Co-authored-by: RiskyMH <RiskyMH@users.noreply.github.com>
2024-11-29 18:02:18 -08:00
imide
dd57b95546 Add musl related documentation to installation.md (#15500) 2024-11-29 18:01:44 -08:00
github-actions[bot]
ea7c4986d7 deps: update lolhtml to 4f8becea13a0021c8b71abd2dcc5899384973b66 (#15462)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-11-29 04:20:10 -08:00
Jarred Sumner
6c7edf2dbe bump 2024-11-29 04:10:01 -08:00
Jarred Sumner
bf2f153f5c Check for unix:// instead of unix: 2024-11-28 22:07:06 -08:00
Jarred Sumner
f64a4c4ace Fix debugger connection issue on Windows 2024-11-28 22:05:58 -08:00
Jarred Sumner
0216431c98 Clean up debugger waiting logic (#15469) 2024-11-28 01:34:31 -08:00
Jarred Sumner
ae289c4858 use using 2024-11-28 00:40:47 -08:00
Jarred Sumner
5d1609fe5c Fixes #15470 2024-11-28 00:40:17 -08:00
Ciro Spaciari
471fe7b886 fix(net/tls) fix reusePort, allowHalfOpen, FIN before reconnect (#15452)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-11-27 21:30:30 -08:00
Alistair Smith
08222eda71 fix: Connect with 1 socket to new env var but still work with js debug terminal (#15458) 2024-11-27 20:47:23 -08:00
github-actions[bot]
6f8c5959d0 deps: update sqlite to 3.470.100 (#15465)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2024-11-27 20:39:38 -08:00
Jarred Sumner
40d5e745c9 Stagger the updates 2024-11-27 19:04:08 -08:00
Jarred Sumner
225bfd54fa Shorter branch names 2024-11-27 18:42:04 -08:00
Jarred Sumner
a6ca8c40d4 Add sqlite3 auto updater script 2024-11-27 18:36:41 -08:00
Jarred Sumner
b52ad226a5 Update actions 2024-11-27 18:08:23 -08:00
Jarred Sumner
5f8f805db9 Update update-libarchive.yml 2024-11-27 17:38:57 -08:00
Jarred Sumner
37c98bebd6 Update update-libarchive.yml 2024-11-27 17:36:32 -08:00
Jarred Sumner
bd01df19c1 github actions 2024-11-27 17:34:28 -08:00
Kai Tamkun
7fd16ebffa Fix incorrect public TS class field name minification (#15411) 2024-11-27 14:06:09 -08:00
Dennis Dudek
1bb211df56 bustDirCache on FileSystemRouter.reload & fix of dir_cache keys in windows (#15091)
Co-authored-by: dave caruso <me@paperdave.net>
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-11-27 12:21:32 -08:00
cdfzo
bdd0b89f16 docs: fix broken windows contributing guide url (#15451) 2024-11-27 13:01:39 -07:00
Jarred Sumner
841f593b12 Auto-update c-ares, libarchive, libdeflate, lolhtml, lshpack weekly (#15442) 2024-11-26 22:01:36 -08:00
Jarred Sumner
3afd19c73c Clean up .throwError (#15433) 2024-11-26 18:22:37 -08:00
Meghan Denny
b6a231add3 musl: fix test/js/bun/http/serve.test.ts (#15271)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-11-26 18:18:29 -08:00
Jarred Sumner
ca86bae5d5 Deflake next-build test (#15436) 2024-11-26 18:06:10 -08:00
Meghan Denny
215fdb4697 zig: make throwInvalidArgumentTypeValue use JSError (#15302)
Co-authored-by: Ciro Spaciari <ciro.spaciari@gmail.com>
2024-11-26 17:17:12 -08:00
snwy
578bdf1cd6 bake: params used when doing static site generation (#15430) 2024-11-26 16:58:14 -08:00
Ciro Spaciari
cf2fa30639 fix(fetch) fix deref + deinit (#15432) 2024-11-26 16:56:55 -08:00
Jarred Sumner
5b3c58bdf5 Update c-ares (#15435) 2024-11-26 16:55:53 -08:00
Michael H
0d6d4faa51 better printing for console.log types (#15404) 2024-11-26 14:27:39 -08:00
Meghan Denny
5e4642295a zig: eliminate errorUnionToCPP (#15416) 2024-11-26 14:11:48 -08:00
Kai Tamkun
68f026b3cd FFI: provide napi_env explicitly (#15431)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-11-26 13:54:24 -08:00
Meghan Denny
5e9563833d zig: fix missed compile error from merge 2024-11-26 13:35:32 -08:00
Nick Reilingh
6dd44cbeda Docs: cli/test.md - Completed GH Actions example (#15412) 2024-11-26 13:25:16 -08:00
Jarred Sumner
a9ce4d40c2 Add scratch*.{js,ts,tsx,mts,cts,mjs } to gitignore 2024-11-26 13:09:31 -08:00
Meghan Denny
663f00b62b zig: make throwOutOfMemory use JSError (#15413) 2024-11-26 12:58:43 -08:00
Kai Tamkun
769c6de751 Merge branch 'main' into ben/fix-node-napi-tests 2024-11-26 12:23:24 -08:00
Jarred Sumner
f21fffd1bf Fix debugger printing exception 2024-11-25 19:57:08 -08:00
Jarred Sumner
d92d8dc886 Bump 2024-11-25 19:18:46 -08:00
Kai Tamkun
c1a25d0948 Merge branch 'main' into ben/fix-node-napi-tests 2024-11-25 19:18:45 -08:00
Jarred Sumner
6d127ba3f4 Silence another debugger error 2024-11-25 19:13:56 -08:00
Jarred Sumner
c3d9e8c7af Fix crash in Bun v1.1.36 caused by VSCode extension update 2024-11-25 19:09:34 -08:00
Jarred Sumner
c25e744837 Silence debugger connection error 2024-11-25 19:02:56 -08:00
dave caruso
dc01a5d6a8 feat(DevServer): batch bundles & run them asynchronously (#15181)
Co-authored-by: Ashcon Partovi <ashcon@partovi.net>
2024-11-25 18:55:47 -08:00
Meghan Denny
c434b2c191 zig: make throwPretty use JSError (#15410) 2024-11-25 18:08:42 -08:00
Jarred Sumner
8ca0eb831d Clean up some error handling code (#15368)
Co-authored-by: Dylan Conway <dylan.conway567@gmail.com>
2024-11-25 15:42:02 -08:00
Ashcon Partovi
b19f13f5c4 bun-vscode: Bump version [no ci] 2024-11-25 15:19:56 -08:00
Meghan Denny
bb3d570ad0 zig: assert there is an exception when .zero is returned (#15362)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-11-25 15:19:02 -08:00
Kai Tamkun
1baa1b6975 Clean up missing globals code a bit 2024-11-25 13:18:42 -08:00
Kai Tamkun
1789364215 Add missing globals in napi tests 2024-11-25 12:59:52 -08:00
Jarred Sumner
a6f37b398c Fix bug with --eval & --print (#15379) 2024-11-25 12:58:30 -08:00
Kai Tamkun
bb33176924 Oops, again 2024-11-25 12:45:01 -08:00
Alistair Smith
39af2a0a56 Fix VSCode extension hanging (#15407) 2024-11-25 12:43:46 -08:00
Kai Tamkun
4a10bf22f7 Oops 2024-11-25 12:30:49 -08:00
Kai Tamkun
33d3732d44 Merge branch 'main' into ben/fix-node-napi-tests 2024-11-25 11:56:32 -08:00
Jarred Sumner
7f6bb30877 Fixes #15403 2024-11-25 04:59:04 -08:00
Jarred Sumner
812288eb72 [internal] Add problem matcher for Zig 2024-11-25 04:43:58 -08:00
Jarred Sumner
9cbe1ec300 Include docs/ folder in bun-types (#15398) 2024-11-25 00:12:28 -08:00
Jarred Sumner
4f8c1c9124 Does this make the tests less flaky (#15399) 2024-11-25 00:11:10 -08:00
Ashcon Partovi
468a392fd5 ci: Larger zig agents 2024-11-25 00:09:57 -07:00
Ashcon Partovi
f61f03fae3 cmake: Fix cross-compiling zig on alpine (#15400)
Co-authored-by: Electroid <Electroid@users.noreply.github.com>
2024-11-25 00:07:08 -07:00
Ashcon Partovi
a468d09064 ci: Fix typo 2024-11-24 23:38:59 -07:00
Ashcon Partovi
898feb886f ci: Temporarily run zig build on ephemeral agents 2024-11-24 23:37:18 -07:00
Lua MacDougall
c5cd0e4575 Bun.serve incorrect file for error page template (#15397) 2024-11-24 22:04:54 -08:00
Jarred Sumner
f4a0fe40aa Fixes #8683 (#15389) 2024-11-24 22:03:54 -08:00
imide
2d2e329ee3 Update installation.md (#15392) 2024-11-24 16:53:39 -07:00
Christian Rotzoll
618d2cb3ac docs: clarify concurrency behavior in WAL mode (#15382) 2024-11-24 00:56:40 -08:00
Jarred Sumner
6c915fc1d0 Cherry-pick WebKit/WebKit#37039 (#15380) 2024-11-23 23:39:42 -08:00
Jarred Sumner
aa60ab3b65 Delete incorrect assertion 2024-11-23 04:35:41 -08:00
Alistair Smith
f855ae8618 VSCode in-editor error messages (readme updates) (#15325)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-11-22 22:36:15 -08:00
Jarred Sumner
514a47cb54 Slightly more complete undici polyfill (#15360) 2024-11-22 22:01:52 -08:00
Kai Tamkun
1a1cf0a4d7 Fix setRawMode return value on Windows (#15357) 2024-11-22 20:28:22 -08:00
advaith
9fbe64619b Remove outdated todo comment from Windows install script (#15358) 2024-11-22 20:25:38 -08:00
Kai Tamkun
78861829c9 Fix setRawMode return value on Windows 2024-11-22 17:54:37 -08:00
Ashcon Partovi
642e0ba73c cmake: Remove unused code that causes issues with commit messages 2024-11-22 17:33:42 -08:00
Ciro Spaciari
19d7a5fe53 fix(CI) make prisma avoid env url because of CI and rely on getSecret (#15352) 2024-11-22 15:23:39 -08:00
Ciro Spaciari
c04a2d1dfc fix regression on http2-wrapper caused by node.js compatibility improvements on net (#15318) 2024-11-22 15:22:35 -08:00
Meghan Denny
82cb82d828 pm: add some missing npm_ env vars (#14786)
Co-authored-by: Dylan Conway <35280289+dylan-conway@users.noreply.github.com>
2024-11-22 15:13:32 -08:00
Ciro Spaciari
4ae982be4e fix(CI) mark inspect test as todo and comment why we mark this as todo (#15354) 2024-11-22 15:02:26 -08:00
Jarred Sumner
2d65063571 Stub performance.markResourceTiming, add PerformanceResourceTiming, PerformanceServerTiming (#15341) 2024-11-22 14:14:05 -08:00
Kai Tamkun
e817928981 Reenable Worker test in js-native-api/test_instance_data/test.js 2024-11-22 13:22:17 -08:00
Grigory
746cf2cf01 feat(resolver): add support for self-referencing (#15284)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-11-22 04:48:02 -08:00
Jarred Sumner
9c1fde0132 Rewrite most of napi_threadsafe_function (#15309)
Co-authored-by: Ben Grant <ben@bun.sh>
2024-11-22 04:44:52 -08:00
Jarred Sumner
f8f76a6fe0 CSS fixes & fuzzing (#15312) 2024-11-22 04:41:10 -08:00
Alistair Smith
4117af6e46 feat(vscode-extension) error reporting, qol (#15261)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
Co-authored-by: Ashcon Partovi <ashcon@partovi.net>
Co-authored-by: Electroid <Electroid@users.noreply.github.com>
Co-authored-by: Meghan Denny <meghan@bun.sh>
Co-authored-by: Dylan Conway <dylan.conway567@gmail.com>
2024-11-22 02:55:21 -08:00
Jarred Sumner
5bcaf32ba3 Fix lockfile print crash (#15332) 2024-11-22 02:07:11 -08:00
Jarred Sumner
d01bfb5aa2 Ensure test with errors before JS execution exit with code 1 (#15321) 2024-11-22 01:33:58 -08:00
pfg
78b495aff5 fix \uFFFF printing regression (#15330)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-11-21 22:01:27 -08:00
Ciro Spaciari
6adb3954fe fix(ReadableStream) flush as much we can before ending the stream (#15324) 2024-11-21 20:16:43 -08:00
Jarred Sumner
b152fbefcd Remove a test.only 2024-11-21 17:49:54 -08:00
Ciro Spaciari
8c0c97a273 fix(ws) ping without parameters (#15319) 2024-11-21 17:48:50 -08:00
pfg
95fcee8b76 Fix expect toMatchSnapshot not working for some strings (#15183) 2024-11-21 17:46:45 -08:00
Meghan Denny
c3f63bcdc4 zig: make throwInvalidArguments use JSError (#15305) 2024-11-21 16:19:13 -08:00
Kai Tamkun
3fc6ad4982 Increase napi timeout 2024-11-21 12:28:01 -08:00
Kai Tamkun
7a623fe3e8 Print compilation progress in node-napi.test.ts 2024-11-21 12:15:49 -08:00
Kai Tamkun
f78ac6344b Merge branch 'ben/fix-node-napi-tests' of github.com:oven-sh/bun into ben/fix-node-napi-tests 2024-11-21 12:13:18 -08:00
Jarred Sumner
2283ed098f Remove Amazon Linux 2023 tests for now 2024-11-21 02:52:56 -08:00
Michael H
43dcb8fce1 docs: --bail [n] -> --bail=[n] (#15301) 2024-11-20 21:46:57 -07:00
Ciro Spaciari
0eb6a4c55e fix(Bun.file) throw OOM if read is too big (#15253) 2024-11-20 19:56:00 -08:00
Ben Grant
94260398b0 Add Prisma leak test from #15289 2024-11-20 19:44:10 -08:00
Kai Tamkun
81690617c0 Revert "Maybe increase the timeout with setDefaultTimeout too"
This reverts commit 30eda1eca9.
2024-11-20 19:24:47 -08:00
Kai Tamkun
30eda1eca9 Maybe increase the timeout with setDefaultTimeout too 2024-11-20 19:12:52 -08:00
Pham Minh Triet
144db9ca52 Fix typo in 15276.test.ts (#15304) 2024-11-20 19:11:12 -08:00
Jarred Sumner
a6a4ca1e49 fix(install): ensure aliases hash map is initialized (#15280)
Co-authored-by: Dylan Conway <dylan.conway567@gmail.com>
Co-authored-by: Dylan Conway <35280289+dylan-conway@users.noreply.github.com>
2024-11-20 18:52:34 -08:00
Kai Tamkun
e0414d0890 Allow ten minutes for compiling node-napi tests 2024-11-20 18:39:50 -08:00
Kai Tamkun
b191968681 Skip test_callback_scope/test.js on Windows 2024-11-20 18:24:24 -08:00
Meghan Denny
314b4d9b44 fix fuzzy-wuzzy test (#15297) 2024-11-20 17:21:27 -08:00
Kai Tamkun
2406936f33 Merge branch 'ben/fix-node-napi-tests' of github.com:oven-sh/bun into ben/fix-node-napi-tests 2024-11-20 16:54:54 -08:00
Kai Tamkun
24a3f96359 Flush stdout explicitly in test_cleanup_hook 2024-11-20 16:54:48 -08:00
Meghan Denny
0e3e33072b zig: rename CallFrame.arguments to .arguments_old to free up decl name (#15296) 2024-11-20 16:18:56 -08:00
Ciro Spaciari
3681aa9f0a fix(root_cert) use a more reliable source for the latest cert (#15262) 2024-11-20 15:57:35 -08:00
Meghan Denny
c9d0fd51a9 zig: make throwTODO use JSError (#15264)
Co-authored-by: Dylan Conway <35280289+dylan-conway@users.noreply.github.com>
2024-11-20 15:16:51 -08:00
Meghan Denny
4fe8b71437 ci: bootstrap.sh: musl download of bun no longer has to be special-cased (#15265) 2024-11-20 13:31:06 -08:00
Meghan Denny
1efab7f42d zig: JSValue: make .get and .toSliceOrNull use JSError (#15270) 2024-11-20 13:26:41 -08:00
Meghan Denny
61a3f08595 bindings: make throwInvalidArgumentTypeValue print the value like the real ERR_INVALID_ARG_TYPE (#14804) 2024-11-19 22:35:25 -08:00
Meghan Denny
363595fd31 bunjs: print received value when Bun.write is passed a bad argument (#14805) 2024-11-19 22:34:41 -08:00
Meghan Denny
173f67d81e zig: make throwError use JSError (#15267) 2024-11-19 22:21:02 -08:00
Meghan Denny
05d5ab7489 ci: disable testing on debian 10 2024-11-19 20:32:08 -08:00
Meghan Denny
b7bd5a4cf5 zig: remove noop JSGlobalObject.ptr() (#15268) 2024-11-19 19:45:40 -08:00
Ashcon Partovi
ab4da13785 ci: Disable changed files detection until bugs are fixed 2024-11-19 20:44:06 -07:00
Meghan Denny
ab3cb68f66 zig: make throwNotEnoughArguments use JSError (#15266) 2024-11-19 19:07:14 -08:00
Meghan Denny
795f14c1d1 zig: align getTruthy to use JSError (#15199) 2024-11-19 18:46:08 -08:00
Ashcon Partovi
708ed00705 ci: Expand automated build images to Debian, Ubuntu, and Amazon Linux (#15250) 2024-11-19 19:31:15 -07:00
Ben Grant
7a73f14da7 Fix napi_threadsafe_function memory leak with fixed queue size 2024-11-19 18:11:23 -08:00
Ben Grant
6ba0563d2d Add test for napi_threadsafe_function memory leak 2024-11-19 18:11:22 -08:00
Kai Tamkun
dec572eb4b Merge branch 'ben/fix-node-napi-tests' of github.com:oven-sh/bun into ben/fix-node-napi-tests 2024-11-19 15:55:57 -08:00
Kai Tamkun
01bbe3070a Disable part of js-native-api/test_instance_data/test.js 2024-11-19 15:55:05 -08:00
Jarred Sumner
ff4eccc3b4 bump 2024-11-19 15:53:26 -08:00
Ashcon Partovi
ededc168cf Bun v1.1.36 [release] 2024-11-19 14:28:20 -08:00
Ashcon Partovi
46c750fc12 Bun v1.1.36 [release] 2024-11-19 14:27:46 -08:00
Ben Grant
b0a30ca422 Fix leak in napi_threadsafe_function 2024-11-19 13:30:21 -08:00
Kai Tamkun
1e649b4976 Merge branch 'main' into ben/fix-node-napi-tests 2024-11-19 12:19:00 -08:00
Meghan Denny
fc94db1efb ci: changedFiles can be undefined 2024-11-19 02:23:54 -08:00
Meghan Denny
958e531cc5 ci: always build images when core ci files change (#15229) 2024-11-19 02:19:56 -08:00
Meghan Denny
206d2edf12 docker:alpine: update to 3.20 and use bun musl build (#15241) 2024-11-19 00:57:40 -08:00
Meghan Denny
ecb0098b89 us_bun_verify_error_t: ensure c struct matches zig extern (#15244) 2024-11-19 00:52:38 -08:00
Meghan Denny
ba767aa5ba Revert "fix(tls) fix type matching" (#15243) 2024-11-19 00:08:25 -08:00
Kai Tamkun
46515d4865 Call Bun__onExit + std.os.windows.kernel32.ExitProcess to exit on Windows (#15237)
Co-authored-by: Dylan Conway <dylan.conway567@gmail.com>
2024-11-18 22:58:19 -08:00
Jarred Sumner
3ef35d746a Implement junit test reporter (#15205)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
Co-authored-by: Dylan Conway <dylan.conway567@gmail.com>
Co-authored-by: Dylan Conway <35280289+dylan-conway@users.noreply.github.com>
2024-11-18 20:50:42 -08:00
Ashcon Partovi
daece6a0ed Revert "cmake: Set explicit rustc target"
This reverts commit cba3bda8ec.
2024-11-18 20:04:55 -08:00
Jarred Sumner
adaee07138 [Bun.sql] Support TLS (#15217)
Co-authored-by: Ciro Spaciari <ciro.spaciari@gmail.com>
2024-11-18 19:38:23 -08:00
pfg
8a0666acd1 Fix setTimeout with node:util.promisify (#15230) 2024-11-18 19:29:55 -08:00
pfg
fd1d6b10d4 Fix docs on todo tests (#15233) 2024-11-18 19:28:28 -08:00
Ciro Spaciari
d19c18580b fix(tls) fix type matching (#15224) 2024-11-18 19:23:27 -08:00
Ashcon Partovi
f8e9adeb64 ci: Do not check changed files on main 2024-11-18 18:52:02 -08:00
Zack Radisic
3c95d5d011 Fix bundler crash with onLoad plugins on copy-file loaders used on entrypoints (#15231) 2024-11-18 18:50:01 -08:00
Jarred Sumner
9ad3471fb0 Support Headers & URLSearchParams in expect().toEqual() (#15195)
Co-authored-by: Meghan Denny <meghan@bun.sh>
2024-11-18 18:49:29 -08:00
Ashcon Partovi
cba3bda8ec cmake: Set explicit rustc target 2024-11-18 18:33:28 -08:00
Ashcon Partovi
5b1808b90b Revert "Ensure that lolhtml builds the target platform"
This reverts commit b023bb805b.
2024-11-18 18:08:54 -08:00
Ashcon Partovi
b023bb805b Ensure that lolhtml builds the target platform 2024-11-18 17:59:01 -08:00
Dennis Dudek
98bb5999a3 Fixed Responses to OPTIONS Requests ignore Body (#15108) 2024-11-18 17:55:50 -08:00
Kai Tamkun
5949777ec3 Or maybe this might 2024-11-18 16:45:32 -08:00
Kai Tamkun
7f9935a560 Will this fix the Windows timeout problem? 2024-11-18 16:35:54 -08:00
Kai Tamkun
437d333978 Actually never mind on that last one 2024-11-18 16:18:45 -08:00
Kai Tamkun
c38eca222e Node NAPI tests: get clang(++) from $PATH in CI 2024-11-18 16:12:46 -08:00
Kai Tamkun
d93122a656 Typedef napi_env in napi_with_version.h and then use it in the node_api_basic_env typedef 2024-11-18 16:12:07 -08:00
Kai Tamkun
b02fb2463f Add Bun__crashHandler binding 2024-11-18 14:50:33 -08:00
Kai Tamkun
ba5490dafc Change abort exit code on Windows 2024-11-18 14:43:43 -08:00
Kai Tamkun
3e085b5540 Cast argument to quick_exit in crash() 2024-11-18 13:11:40 -08:00
Kai Tamkun
73e98663bb Merge branch 'main' into ben/fix-node-napi-tests 2024-11-18 13:00:54 -08:00
Kai Tamkun
d09050127f Change Windows crash behavior 2024-11-18 12:09:15 -08:00
Pham Minh Triet
d5a118e25f Fix(doc): update cluster.md (#15214) 2024-11-18 03:04:36 -08:00
Ciro Spaciari
1911fa1e75 fix(HttpParser) always check if content length is valid before calling requestHandler (#15179) 2024-11-16 19:41:59 -08:00
Meghan Denny
6dbf1bff4f musl: fix test/js/node/process/process.test.js (#15185) 2024-11-16 02:57:20 -08:00
Jarred Sumner
a5a0539f26 Fixes #15177 (#15180) 2024-11-16 02:18:13 -08:00
Meghan Denny
3393b0e1d3 musl: fix third_party/prisma.test.ts (#15186) 2024-11-16 01:44:53 -08:00
Dylan Conway
910efec0b7 fix auto-install on windows when symlinks aren't available (#15182) 2024-11-16 00:43:12 -08:00
Meghan Denny
dafd8156b0 ci: skip running tests on main branch 2024-11-15 22:18:55 -08:00
Meghan Denny
befb269b2d zig: align fromJS methods to using JSError (#15165) 2024-11-15 22:14:18 -08:00
Ashcon Partovi
39d8ade27c ci: musl builds (#15154)
Co-authored-by: Electroid <Electroid@users.noreply.github.com>
Co-authored-by: Meghan Denny <meghan@bun.sh>
2024-11-15 21:01:55 -08:00
Meghan Denny
4fedc41545 musl: fix 'bun upgrade' (#15178) 2024-11-15 20:58:23 -08:00
Kai Tamkun
eef79ce772 Skip test_worker_buffer_callback/test-free-called.js 2024-11-15 20:08:26 -08:00
Kai Tamkun
cf960b5c17 Change how GC is detected 2024-11-15 14:46:24 -08:00
dave caruso
15f2bbb33a docs: remove contributing instructions involving winget (#15176) 2024-11-15 13:06:51 -08:00
Jarred Sumner
4ddb63e7e2 Try linker script (#15158) 2024-11-15 13:02:10 -08:00
Ben Grant
6603871617 Add logs for NAPI refs and handle scopes 2024-11-15 12:56:52 -08:00
Ben Grant
e5c5033790 Use clang 16 to compile Node NAPI tests on CI linux 2024-11-15 12:56:52 -08:00
Grigory
3791146476 docs(contributing): group os-specific code tabs (#15173) 2024-11-15 12:50:28 -08:00
Kai Tamkun
07252d1755 Add missing include 2024-11-15 12:40:00 -08:00
ippsav
910e479d29 Fix node:net not handling path in listen (#15162)
Co-authored-by: Ciro Spaciari <ciro.spaciari@gmail.com>
2024-11-15 10:35:14 -08:00
Kai Tamkun
d9c8f27bf9 Merge branch 'ben/fix-node-napi-tests' into kai/fix-node-napi-tests 2024-11-14 21:36:05 -08:00
Kai Tamkun
28830f0294 Don't defer finalizers if not in GC 2024-11-14 21:35:23 -08:00
Meghan Denny
266e033d6f node:https: fix prototype chain of Agent (#15160)
Co-authored-by: Ciro Spaciari <ciro.spaciari@gmail.com>
2024-11-14 19:03:16 -08:00
Ashcon Partovi
9a6f033206 ci: Fix changed files detection on forks 2024-11-14 18:34:13 -08:00
Kai Tamkun
2aee62382f Remove async cleanup hooks from the list after they're called, not before 2024-11-14 17:57:52 -08:00
Kai Tamkun
4103b738ff Report NAPI assertion failures more forcefully 2024-11-14 17:56:14 -08:00
Meghan Denny
2810f39802 zig: make all JS constructors use JSError (#15146)
Co-authored-by: Ciro Spaciari <ciro.spaciari@gmail.com>
2024-11-14 16:36:01 -08:00
Ben Grant
f73ef54edd Compile Node tests using Node 23 headers 2024-11-14 15:59:02 -08:00
Kai Tamkun
f9718af6a5 Make checkGC fail if running a finalizer during napi env cleanup 2024-11-14 15:17:28 -08:00
Kai Tamkun
f50114332f Fix incorrect calling convention usage 2024-11-14 15:09:26 -08:00
Ben Grant
90852a37d5 Use correct path separators on Windows 2024-11-14 14:41:20 -08:00
Ben Grant
2afb5e635d Skip compiling Node NAPI tests that we skip running 2024-11-14 14:24:52 -08:00
Michael H
3170b88058 fix vscode debugger (#14995) 2024-11-14 14:24:18 -08:00
Kai Tamkun
134f66c24d Undo accidental formatting 2024-11-14 13:32:08 -08:00
Kai Tamkun
f37df906b4 Add typedef for node_api_basic_env in napi tests 2024-11-14 13:28:52 -08:00
Jarred Sumner
357581c61a Shrink Bun's binary by 3.5 MB (#15121) 2024-11-14 06:02:15 -08:00
pfg
d8987ccdb8 Remove assertion in js printer triggering for unicode comments (#15143) 2024-11-14 00:14:43 -08:00
Meghan Denny
fdd8d35845 allow zig js host functions to return JSError (#15120) 2024-11-13 21:11:56 -08:00
Kai Tamkun
ed1f25e5cc Skip node-api/test_async_cleanup_hook/test.js because it uses libuv functions 2024-11-13 19:59:38 -08:00
Kai Tamkun
2646ea0956 Fix async cleanup hooks? 2024-11-13 19:53:10 -08:00
Kai Tamkun
9fa480ce9b Fix env cleanup hooks 2024-11-13 19:26:56 -08:00
Kai Tamkun
83a2c245f3 Merge branch 'ben/fix-node-napi-tests' into kai/fix-node-napi-tests 2024-11-13 18:36:45 -08:00
dave caruso
32ddf343ee bake: csr, streaming ssr, serve integration, safer jsvalue functions, &more (#14900)
Co-authored-by: paperdave <paperdave@users.noreply.github.com>
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-11-13 18:19:12 -08:00
Ben Grant
e11a68315b Fix compile error 2024-11-13 17:30:25 -08:00
Meghan Denny
bceb0a2327 ci: fix release script (#15129) 2024-11-13 18:29:14 -07:00
Ben Grant
f439dacf21 Merge branch 'main' into ben/fix-node-napi-tests 2024-11-13 17:27:12 -08:00
Meghan Denny
9b0cdf01f9 cpp: Bun::toStringRef: return dead when exception has been thrown (#15127)
Co-authored-by: Dylan Conway <35280289+dylan-conway@users.noreply.github.com>
2024-11-13 17:03:59 -08:00
Ben Grant
544dd2497c Increase timeout 2024-11-13 16:59:45 -08:00
Ben Grant
0e7ed996d3 Remove stray import 2024-11-13 16:17:53 -08:00
Meghan Denny
35513a9d6d zig: remove JSValue.isEmpty (#15128) 2024-11-13 16:04:13 -08:00
Ben Grant
bb8b46507e Remove workaround for #15111 2024-11-13 15:53:13 -08:00
Kai Tamkun
06d37bf644 Actually call instance data finalizers 2024-11-13 15:32:16 -08:00
Meghan Denny
f8979b05b1 rid nearly all use of ExceptionRef in zig (#15100)
Co-authored-by: nektro <nektro@users.noreply.github.com>
2024-11-13 15:23:52 -08:00
Kai Tamkun
6dd369e66b Merge branch 'ben/fix-node-napi-tests' into kai/fix-node-napi-tests 2024-11-13 15:23:27 -08:00
Ben Grant
8358f4dc73 Merge branch 'main' into ben/fix-node-napi-tests 2024-11-13 15:20:55 -08:00
Ben Grant
9dbe40ddba Compile tests in parallel 2024-11-13 15:20:43 -08:00
Kai Tamkun
f54f4e6ebf Remove stray iostream include 2024-11-13 15:19:54 -08:00
Kai Tamkun
adc00e0566 Don't defer finalizers if the VM is shutting down 2024-11-13 15:11:06 -08:00
ippsav
ec91e91fda Pass missing signal code for child_process.spawnSync (#15137) 2024-11-13 15:07:43 -08:00
Kai Tamkun
d3b509e80a Merge branch 'ben/fix-node-napi-tests' into kai/fix-node-napi-tests 2024-11-13 14:43:48 -08:00
Kai Tamkun
b11d631e41 Ensure NapiRef finalizers are called by the time the env is cleaned up 2024-11-13 14:42:28 -08:00
190n
440111f924 bun run prettier:extra 2024-11-13 22:01:31 +00:00
Ben Grant
ccd72755dc Remove console log 2024-11-13 14:00:07 -08:00
Ben Grant
59700068d3 Merge branch 'main' into ben/fix-node-napi-tests 2024-11-13 13:55:03 -08:00
Ben Grant
ac8c6f093b Run some of Node's Node-API tests in CI 2024-11-13 13:54:45 -08:00
Meghan Denny
956853f036 test: dont overwrite root package.json when running bun-ipc-inherit.test.ts (#15126) 2024-11-13 00:14:57 -08:00
Kai Tamkun
a8fa566101 Fix beforeOnExit being dispatched multiple times 2024-11-12 22:43:05 -08:00
Kai Tamkun
7b25ce15eb Merge branch 'ben/fix-node-napi-tests' into kai/fix-node-napi-tests 2024-11-12 21:28:02 -08:00
Kai Tamkun
c20c0dea92 Undo the previous commit and add a TODO about Web Worker termination 2024-11-12 21:01:48 -08:00
Kai Tamkun
86d4dbe143 Treat a call to process.exit() inside napi_call_function as if it threw an exception 2024-11-12 20:29:04 -08:00
Dylan Conway
c5df329772 #15059 follow-up (#15118) 2024-11-12 18:17:35 -08:00
Ciro Spaciari
e945146fde fix(bundler) fix pretty path resolution (#15119) 2024-11-12 18:16:13 -08:00
190n
83f536f4da Make parameter const 2024-11-12 17:57:14 -08:00
Ciro Spaciari
873b0a7540 fix(socket) Support named pipes on Windows using forward slashes (#15112) 2024-11-12 16:09:43 -08:00
Kai Tamkun
d11a48398d Merge branch 'ben/fix-node-napi-tests' into kai/fix-node-napi-tests 2024-11-12 14:43:54 -08:00
Kai Tamkun
563b3c0339 Address more feedback 2024-11-12 14:38:35 -08:00
Dennis Dudek
c785ab921b ci: Fix detection of changed files (#15114)
Co-authored-by: Ashcon Partovi <ashcon@partovi.net>
2024-11-12 15:27:27 -07:00
Kai Tamkun
6e7240b6e7 Derefcountify NapiFinalizer 2024-11-12 14:08:38 -08:00
Kai Tamkun
2335e35a86 Remove defer parameter from NapiRef constructor 2024-11-12 13:57:50 -08:00
Kai Tamkun
bd45a65f2b Fix napi module file URIs 2024-11-12 13:45:02 -08:00
Kai Tamkun
7993f4fa11 Address some feedback 2024-11-12 13:44:37 -08:00
Kai Tamkun
09a6a11a14 Don't needlessly tick the event loop if the napi finalizer queue is empty 2024-11-12 12:59:49 -08:00
Kai Tamkun
c17e05c191 Better use of types in FFI.h 2024-11-12 12:58:12 -08:00
Kai Tamkun
9ea9925e9c Rename napiEnv FFI symbol to avoid potential collisions 2024-11-12 12:06:45 -08:00
Ben Grant
469be87987 Merge branch 'main' into ben/fix-node-napi-tests 2024-11-12 12:00:07 -08:00
Ben Grant
9490c30d47 Remove stray include 2024-11-12 11:53:18 -08:00
Kai Tamkun
1d8423ea57 Allow null data in napi_async_work 2024-11-12 11:46:52 -08:00
Kai Tamkun
0bee1c9b5d Don't create napi_envs for FFI unless actually needed 2024-11-12 11:45:40 -08:00
Meghan Denny
797958082c musl patches [v4] (#15066) 2024-11-11 19:23:58 -08:00
Ben Grant
3a71be377e Use node_api_post_finalize in napi-app 2024-11-11 17:24:34 -08:00
Ben Grant
1de2319526 Update TODO in napi-app 2024-11-11 17:23:02 -08:00
Ben Grant
8b5fb349dd Assert there is an env when calling external finalizer 2024-11-11 16:31:58 -08:00
Jarred Sumner
2b9abc20da Use linux syscall interface more in I/O (#15067) 2024-11-11 14:47:04 -08:00
Jarred Sumner
d713001e35 Fixes #14982 2024-11-11 14:40:11 -08:00
Jarred Sumner
b49f6d143e Postgres client - more progress (#15086) 2024-11-11 14:40:02 -08:00
pfg
4cf9851747 Bump runtime transpiler cache version for #15009 (#15094) 2024-11-11 14:38:17 -08:00
Ben Grant
657f5b9f6a Set WebKit version to merge commit instead of commit on branch of oven-sh/WebKit#68 2024-11-11 13:40:47 -08:00
Ben Grant
86e421ad80 Merge branch 'main' into ben/fix-node-napi-tests 2024-11-11 13:40:14 -08:00
pfg
56f7c8887b Fix unicode imports, unicode-escaped variable names, and printClauseAlias not working for utf-8 (#15009) 2024-11-11 13:27:42 -08:00
Ciro Spaciari
62cabe9003 fix(tests) new grpc certs (#15090) 2024-11-11 13:00:58 -08:00
Kai Tamkun
e34673ca45 Fix test_reference_by_node_api_version 2024-11-08 18:07:06 -08:00
Kai Tamkun
7c13e637b8 Update WebKit version 2024-11-08 17:46:44 -08:00
Kai Tamkun
9f3b0f754b Adjust some NAPI error messages to match Node equivalents 2024-11-08 15:07:50 -08:00
Kai Tamkun
07a391368f Escape NAPI file:// URIs 2024-11-08 15:07:13 -08:00
Kai Tamkun
c30ef2ccc8 Fix more missing includes in napi-app 2024-11-07 17:03:20 -08:00
Kai Tamkun
855b7101e6 Use node_api_post_finalizer in napi-app and add some missing includes 2024-11-07 16:39:13 -08:00
Kai Tamkun
fceeb228a8 Don't dlclose if node_api_module_get_api_version_v1 is missing 2024-11-07 16:17:10 -08:00
Kai Tamkun
85dcebedd7 Fix typedef in napi-app 2024-11-07 15:40:56 -08:00
Kai Tamkun
0f29267a3e Add node_api_get_module_file_name 2024-11-07 15:16:22 -08:00
Kai Tamkun
a152557096 Fix napi_env usage in FFI 2024-11-07 15:16:06 -08:00
Kai Tamkun
80b742665e Add node_api_create_buffer_from_arraybuffer 2024-11-06 14:21:58 -08:00
Kai Tamkun
7978505b94 Fix napi_get_buffer_info return code 2024-11-06 14:21:39 -08:00
Kai Tamkun
0dce7366e2 oops 2024-11-05 17:18:56 -08:00
Kai Tamkun
71f3089f4d Defer external (array)buffer finalizers if necessary 2024-11-05 17:18:11 -08:00
Kai Tamkun
07a217f773 Fix napi_get_version misunderstanding 2024-11-05 16:43:12 -08:00
Kai Tamkun
d4b710287f Clear NAPI error after native function invocations 2024-11-05 15:21:15 -08:00
Kai Tamkun
3296a6edc9 Fix napi_instanceof 2024-11-05 13:32:07 -08:00
Kai Tamkun
ab92fc5fab Add a typedef for node_api_basic_env in napi-app 2024-11-05 11:43:01 -08:00
Kai Tamkun
f5dc0498f4 Fix some formatting 2024-11-05 11:41:17 -08:00
Kai Tamkun
1c06dbd3ef Fix multiple dispatch of beforeExit 2024-11-05 10:58:22 -08:00
Kai Tamkun
a6d707a74e Don't require node_api_module_get_api_version_v1, default to version 8 2024-11-04 20:12:44 -08:00
Kai Tamkun
ce469474d8 Use WebKit branch kai/inherited-property-names 2024-11-04 19:54:07 -08:00
Kai Tamkun
c659b3b7d3 Require key to be a string or symbol in napi_has_own_property 2024-11-04 15:59:24 -08:00
Kai Tamkun
a60ae54751 Merge branch 'ben/fix-node-napi-tests' of github.com:oven-sh/bun into ben/fix-node-napi-tests 2024-11-04 15:47:32 -08:00
Kai Tamkun
3303a5de1f Improve finalizer support 2024-11-04 15:47:23 -08:00
Ben Grant
d8557ea982 Merge branch 'main' into ben/fix-node-napi-tests 2024-11-04 09:49:35 -08:00
Ben Grant
e3b5927e73 Work on NAPI leak tests 2024-11-01 19:09:49 -07:00
Ben Grant
066b1dacc4 Fix errors from rebase 2024-11-01 18:36:46 -07:00
Ben Grant
7a20f515f7 Fix NAPI property tests 2024-11-01 18:30:47 -07:00
Ben Grant
afd023ac95 Fix remaining error in NAPI property functions 2024-11-01 18:30:47 -07:00
Jarred Sumner
1d5da9ef77 Fixes #11754 (#14948) 2024-11-01 18:30:47 -07:00
Jarred Sumner
7110c073ca Inline process.versions.bun in bun build --compile (#14940) 2024-11-01 18:30:47 -07:00
Ashcon Partovi
03d945ee05 Run tests from npm packages, elysia to start (#14932) 2024-11-01 18:30:47 -07:00
190n
08116e43f4 Fix napi property methods on non-objects (#14935) 2024-11-01 18:30:45 -07:00
Dylan Conway
7fab6701e5 Redact secrets in bunfig.toml and npmrc logs (#14919) 2024-11-01 18:23:06 -07:00
Dylan Conway
30fe8d5258 fix(install): only globally link requested packages (#12506)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-11-01 18:23:06 -07:00
Jarred Sumner
a8a2403568 Add bytesWritten property to Bun.Socket, fix encoding issue in node:net (#14516)
Co-authored-by: Ciro Spaciari <ciro.spaciari@gmail.com>
2024-11-01 18:23:06 -07:00
Jarred Sumner
664c080d02 Fixes #14918 (#14921) 2024-11-01 18:23:06 -07:00
Ashcon Partovi
b5ed0f028f ci: If only tests change, use artifacts from last successful build (#14927) 2024-11-01 18:23:06 -07:00
Jarred Sumner
1293039002 Clean up some code in node validators (#14897) 2024-11-01 18:23:06 -07:00
Kai Tamkun
8bb8193a39 Fix use after free 2024-11-01 16:55:07 -07:00
Kai Tamkun
dffc718b6a Merge branch 'ben/fix-node-napi-tests' of github.com:oven-sh/bun into ben/fix-node-napi-tests 2024-11-01 16:36:15 -07:00
Kai Tamkun
f15059face Handle exceptions in NAPI finalizers as uncaught exceptions 2024-11-01 16:36:01 -07:00
Ben Grant
2d96ec0e21 Fix napi_set_property 2024-11-01 15:47:31 -07:00
Ben Grant
b6dfd89928 Remove reference to nonexistent test 2024-11-01 15:42:46 -07:00
Ben Grant
936ae5a796 Fix and test edge cases calling NAPI constructors 2024-11-01 15:32:18 -07:00
Ben Grant
fe18b871f8 Move napi_wrap lifetime test to existing test suite 2024-11-01 15:32:16 -07:00
Kai Tamkun
a1c4240940 Integrate #12660 into bun/fix-node-napi-tests 2024-11-01 11:58:48 -07:00
Kai Tamkun
dc4177f113 Keep napi_envs separate, keep track of them, associate module info
This is so we can check the NAPI version per module and adjust behavior accordingly.
2024-10-31 20:54:10 -07:00
Kai Tamkun
d2c4a9a57e Fix napi_get_all_property_names not including inherited properties when desired 2024-10-31 16:45:17 -07:00
Kai Tamkun
fb6a48a35f Fix property-related NAPI methods 2024-10-31 15:16:42 -07:00
Ben Grant
5176ab58bc Test napi_set_property and napi_set_named_property 2024-10-31 13:21:26 -07:00
Ben Grant
a669ff1243 Build napi-app in debug mode 2024-10-31 12:19:55 -07:00
Ben Grant
91a52311de Merge branch 'main' into ben/fix-node-napi-tests 2024-10-31 12:16:56 -07:00
Kai Tamkun
197a26fc16 Fix some confusion over the "length" parameter in NapiClass 2024-10-31 11:21:31 -07:00
Kai Tamkun
059185f4ba Fix napi_get_new_target 2024-10-31 10:58:49 -07:00
Kai Tamkun
838ca008cd Replace NAPIFunction with NapiClass 2024-10-30 18:55:27 -07:00
Kai Tamkun
8a0a88cd42 Merge branch 'ben/fix-node-napi-tests' of github.com:oven-sh/bun into ben/fix-node-napi-tests 2024-10-30 12:59:05 -07:00
Kai Tamkun
774bb8923d Fix node_api_symbol_for behavior for null description parameters 2024-10-30 12:55:47 -07:00
Ben Grant
8b19e08882 Rename wrap-lifetime-test.mjs 2024-10-30 12:24:32 -07:00
Ben Grant
84c4f96b3f Merge branch 'main' into ben/fix-node-napi-tests 2024-10-30 10:01:32 -07:00
Ben Grant
73579e1254 Support filtering by property descriptor in napi_get_all_property_names 2024-10-29 18:18:04 -07:00
Kai Tamkun
19b0fed84f Include all properties except enums in napi_get_property_names, and ensure that the key is a name in napi_has_own_property 2024-10-29 15:24:58 -07:00
Kai Tamkun
699997826f Make napi_create_external_arraybuffer produce a non-shared ArrayBuffer 2024-10-29 14:00:41 -07:00
Kai Tamkun
528d9a64e8 Merge branch 'main' into ben/fix-node-napi-tests 2024-10-29 13:37:59 -07:00
Kai Tamkun
39b442b664 Move napi_create_typedarray from napi.zig to napi.cpp to produce RangeError exceptions 2024-10-29 13:37:29 -07:00
Kai Tamkun
249227d4d6 Fix NAPI string creation bugs 2024-10-28 19:26:19 -07:00
Ben Grant
71101e1fa3 Fix NAPI bugs 2024-10-28 17:43:11 -07:00
Ben Grant
c28d419b25 Reset last NAPI error before calling into a native module 2024-10-28 14:03:59 -07:00
Ben Grant
3587391920 Add napi_type_tag_object and napi_check_type_tag 2024-10-25 18:43:19 -07:00
Ben Grant
43d7cfcb23 Fix NAPI tests compiling on Windows 2024-10-25 18:42:53 -07:00
Ben Grant
3ba398f482 Stress test napi_wrap and napi_external 2024-10-25 16:56:54 -07:00
Ben Grant
600bc1cbd2 Fix napi_ref finalizers 2024-10-25 16:56:33 -07:00
Ben Grant
f71b440c4d Add filename to napi_log 2024-10-25 16:54:55 -07:00
Ben Grant
d29e72f89c Add missing #include 2024-10-25 10:08:44 -07:00
Ben Grant
e04f461508 Work on leak testing for NAPI wrap/ref/external 2024-10-24 18:33:55 -07:00
Ben Grant
bdcca417ef Refine NAPI tests 2024-10-24 18:05:51 -07:00
Ben Grant
b8aba83da6 napi_wrap fixes 2024-10-24 18:02:28 -07:00
Ben Grant
b753e4b38b Fix providing class's data pointer to method without data pointer 2024-10-23 18:46:20 -07:00
Ben Grant
a7bc53b92c Split NAPI tests out of the huge C++ file 2024-10-23 18:29:48 -07:00
Ben Grant
b2080c88f4 JS exceptions instead of assertions in napi tests 2024-10-23 15:02:26 -07:00
Ben Grant
710f7790cf Delete NapiPrototype::napiRef 2024-10-23 15:01:58 -07:00
Ben Grant
c44eb732ee Do not propagate nullptr out of NAPIFunction::call 2024-10-23 11:24:52 -07:00
Ben Grant
e5e643d8bc Merge branch 'main' into ben/fix-node-napi-tests 2024-10-23 10:19:02 -07:00
Ben Grant
d612cfff12 Misc CallFrame fixes 2024-10-22 19:07:29 -07:00
Ben Grant
020c32bc73 clangd config for NAPI tests 2024-10-22 18:59:35 -07:00
Ben Grant
a240093a97 Pass data pointer to NAPI constructors 2024-10-21 11:21:48 -07:00
Ben Grant
e5ffd66649 Merge branch 'main' into ben/fix-node-napi-tests 2024-10-21 10:18:05 -07:00
Ben Grant
5bae294c14 More (self-contained for now) napi tests 2024-10-18 18:43:22 -07:00
Ben Grant
ea1ddb2740 napi_wrap WIP + rip out NAPICallFrame tagging 2024-10-18 18:42:51 -07:00
Ben Grant
6d1db2c8e9 WIP supporting napi_wrap for more values 2024-10-17 17:56:06 -07:00
Ben Grant
8c571d8949 Merge branch 'main' into ben/fix-node-napi-tests 2024-10-16 18:49:21 -07:00
Ben Grant
7be1bf3026 Fix napi_create_dataview 2024-10-16 17:26:33 -07:00
Ben Grant
2d0e0c9195 Set error code in Zig napi functions 2024-10-16 17:02:57 -07:00
Ben Grant
b773e66d67 Move napi_get_value_string_{latin1,utf16} to C++ 2024-10-16 17:02:25 -07:00
Ben Grant
2fee09fc4d Change sizeof division to std::size 2024-10-16 14:00:57 -07:00
Ben Grant
216e5b3f96 Fix napi_coerce_to_* 2024-10-15 18:40:54 -07:00
Ben Grant
ed4175b80e Arg checking in napi_define_class 2024-10-15 16:17:45 -07:00
Ben Grant
a0c2a73730 Fix napi_create_bigint_words error handling 2024-10-15 16:10:32 -07:00
Ben Grant
1649c03824 Merge branch 'main' into ben/fix-node-napi-tests 2024-10-15 14:59:57 -07:00
Ben Grant
49b2de93d0 Set last error in all C++ napi functions 2024-10-15 13:59:58 -07:00
Ben Grant
ef4728c267 WIP convert napi.cpp functions to set the last error 2024-10-14 18:40:20 -07:00
Ben Grant
6a440aa946 Merge branch 'main' into ben/fix-node-napi-tests 2024-10-14 11:28:30 -07:00
Ben Grant
6169f1053a Fix bugs with napi_define_class 2024-10-11 19:10:49 -07:00
Ben Grant
85f617f97e Fix bugs in napi_create_class and napi_get_value_bigint_* 2024-10-11 18:45:58 -07:00
Ben Grant
23dc0fed71 Test that threadsafe function finalizers run on the next tick 2024-10-04 15:24:01 -07:00
Ben Grant
afcf7b1eb6 Route all finalizers through NapiFinalizer 2024-10-04 15:23:23 -07:00
Ben Grant
2583f33a33 Make NAPI tests allocate in the finalizer 2024-10-04 11:08:37 -07:00
Ben Grant
dfa2a6b60b Fix tsfn finalizers 2024-10-03 18:48:25 -07:00
Ben Grant
e66ec2a10b Merge branch 'main' into jarred/napi-2 2024-10-03 17:56:38 -07:00
Dylan Conway
eb8d465c50 Merge branch 'main' into jarred/napi-2 2024-07-22 21:14:24 -07:00
Jarred-Sumner
418139358b Apply formatting changes 2024-07-19 08:20:27 +00:00
Jarred Sumner
1f5359705e Defer finalization for napi callbacks to the immediate task queue 2024-07-19 01:09:31 -07:00
834 changed files with 75101 additions and 32318 deletions

832
.buildkite/ci.mjs Normal file → Executable file

File diff suppressed because it is too large Load Diff

View File

@@ -164,7 +164,9 @@ function upload_s3_file() {
function send_bench_webhook() {
if [ -z "$BENCHMARK_URL" ]; then
return 1
echo "error: \$BENCHMARK_URL is not set"
# exit 1 # TODO: this isn't live yet
return
fi
local tag="$1"
@@ -200,6 +202,12 @@ function create_release() {
bun-linux-x64-profile.zip
bun-linux-x64-baseline.zip
bun-linux-x64-baseline-profile.zip
bun-linux-aarch64-musl.zip
bun-linux-aarch64-musl-profile.zip
bun-linux-x64-musl.zip
bun-linux-x64-musl-profile.zip
bun-linux-x64-musl-baseline.zip
bun-linux-x64-musl-baseline-profile.zip
bun-windows-x64.zip
bun-windows-x64-profile.zip
bun-windows-x64-baseline.zip

92
.github/workflows/update-cares.yml vendored Normal file
View File

@@ -0,0 +1,92 @@
name: Update c-ares
on:
schedule:
- cron: "0 4 * * 0"
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check c-ares version
id: check-version
run: |
set -euo pipefail
# Extract the commit hash from the line after COMMIT
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildCares.cmake)
if [ -z "$CURRENT_VERSION" ]; then
echo "Error: Could not find COMMIT line in BuildCares.cmake"
exit 1
fi
# Validate that it looks like a git hash
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid git hash format in BuildCares.cmake"
echo "Found: $CURRENT_VERSION"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/c-ares/c-ares/releases/latest)
if [ -z "$LATEST_RELEASE" ]; then
echo "Error: Failed to fetch latest release from GitHub API"
exit 1
fi
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
echo "Error: Could not extract tag name from GitHub API response"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid SHA format received from GitHub"
echo "Found: $LATEST_SHA"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
- name: Update version if needed
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
run: |
set -euo pipefail
# Handle multi-line format where COMMIT and its value are on separate lines
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildCares.cmake
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |
cmake/targets/BuildCares.cmake
commit-message: "deps: update c-ares to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update c-ares to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-cares-${{ github.run_number }}
body: |
## What does this PR do?
Updates c-ares to version ${{ steps.check-version.outputs.tag }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-cares.yml)

92
.github/workflows/update-libarchive.yml vendored Normal file
View File

@@ -0,0 +1,92 @@
name: Update libarchive
on:
schedule:
- cron: "0 3 * * 0"
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check libarchive version
id: check-version
run: |
set -euo pipefail
# Extract the commit hash from the line after COMMIT
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLibArchive.cmake)
if [ -z "$CURRENT_VERSION" ]; then
echo "Error: Could not find COMMIT line in BuildLibArchive.cmake"
exit 1
fi
# Validate that it looks like a git hash
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid git hash format in BuildLibArchive.cmake"
echo "Found: $CURRENT_VERSION"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/libarchive/libarchive/releases/latest)
if [ -z "$LATEST_RELEASE" ]; then
echo "Error: Failed to fetch latest release from GitHub API"
exit 1
fi
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
echo "Error: Could not extract tag name from GitHub API response"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/libarchive/libarchive/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid SHA format received from GitHub"
echo "Found: $LATEST_SHA"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
- name: Update version if needed
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
run: |
set -euo pipefail
# Handle multi-line format where COMMIT and its value are on separate lines
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLibArchive.cmake
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |
cmake/targets/BuildLibArchive.cmake
commit-message: "deps: update libarchive to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update libarchive to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-libarchive-${{ github.run_number }}
body: |
## What does this PR do?
Updates libarchive to version ${{ steps.check-version.outputs.tag }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-libarchive.yml)

92
.github/workflows/update-libdeflate.yml vendored Normal file
View File

@@ -0,0 +1,92 @@
name: Update libdeflate
on:
schedule:
- cron: "0 2 * * 0"
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check libdeflate version
id: check-version
run: |
set -euo pipefail
# Extract the commit hash from the line after COMMIT
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLibDeflate.cmake)
if [ -z "$CURRENT_VERSION" ]; then
echo "Error: Could not find COMMIT line in BuildLibDeflate.cmake"
exit 1
fi
# Validate that it looks like a git hash
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid git hash format in BuildLibDeflate.cmake"
echo "Found: $CURRENT_VERSION"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/ebiggers/libdeflate/releases/latest)
if [ -z "$LATEST_RELEASE" ]; then
echo "Error: Failed to fetch latest release from GitHub API"
exit 1
fi
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
echo "Error: Could not extract tag name from GitHub API response"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/ebiggers/libdeflate/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid SHA format received from GitHub"
echo "Found: $LATEST_SHA"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
- name: Update version if needed
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
run: |
set -euo pipefail
# Handle multi-line format where COMMIT and its value are on separate lines
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLibDeflate.cmake
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |
cmake/targets/BuildLibDeflate.cmake
commit-message: "deps: update libdeflate to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update libdeflate to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-libdeflate-${{ github.run_number }}
body: |
## What does this PR do?
Updates libdeflate to version ${{ steps.check-version.outputs.tag }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-libdeflate.yml)

92
.github/workflows/update-lolhtml.yml vendored Normal file
View File

@@ -0,0 +1,92 @@
name: Update lolhtml
on:
schedule:
- cron: "0 1 * * 0"
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check lolhtml version
id: check-version
run: |
set -euo pipefail
# Extract the commit hash from the line after COMMIT
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLolHtml.cmake)
if [ -z "$CURRENT_VERSION" ]; then
echo "Error: Could not find COMMIT line in BuildLolHtml.cmake"
exit 1
fi
# Validate that it looks like a git hash
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid git hash format in BuildLolHtml.cmake"
echo "Found: $CURRENT_VERSION"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/cloudflare/lol-html/releases/latest)
if [ -z "$LATEST_RELEASE" ]; then
echo "Error: Failed to fetch latest release from GitHub API"
exit 1
fi
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
echo "Error: Could not extract tag name from GitHub API response"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid SHA format received from GitHub"
echo "Found: $LATEST_SHA"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
- name: Update version if needed
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
run: |
set -euo pipefail
# Handle multi-line format where COMMIT and its value are on separate lines
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLolHtml.cmake
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |
cmake/targets/BuildLolHtml.cmake
commit-message: "deps: update lolhtml to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update lolhtml to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-lolhtml-${{ github.run_number }}
body: |
## What does this PR do?
Updates lolhtml to version ${{ steps.check-version.outputs.tag }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-lolhtml.yml)

92
.github/workflows/update-lshpack.yml vendored Normal file
View File

@@ -0,0 +1,92 @@
name: Update lshpack
on:
schedule:
- cron: "0 5 * * 0"
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check lshpack version
id: check-version
run: |
set -euo pipefail
# Extract the commit hash from the line after COMMIT
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLshpack.cmake)
if [ -z "$CURRENT_VERSION" ]; then
echo "Error: Could not find COMMIT line in BuildLshpack.cmake"
exit 1
fi
# Validate that it looks like a git hash
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid git hash format in BuildLshpack.cmake"
echo "Found: $CURRENT_VERSION"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/litespeedtech/ls-hpack/releases/latest)
if [ -z "$LATEST_RELEASE" ]; then
echo "Error: Failed to fetch latest release from GitHub API"
exit 1
fi
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
echo "Error: Could not extract tag name from GitHub API response"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid SHA format received from GitHub"
echo "Found: $LATEST_SHA"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
- name: Update version if needed
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
run: |
set -euo pipefail
# Handle multi-line format where COMMIT and its value are on separate lines
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLshpack.cmake
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |
cmake/targets/BuildLshpack.cmake
commit-message: "deps: update lshpack to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update lshpack to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-lshpack-${{ github.run_number }}
body: |
## What does this PR do?
Updates lshpack to version ${{ steps.check-version.outputs.tag }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-lshpack.yml)

109
.github/workflows/update-sqlite3.yml vendored Normal file
View File

@@ -0,0 +1,109 @@
name: Update SQLite3
on:
schedule:
- cron: "0 6 * * 0" # Run weekly
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check SQLite version
id: check-version
run: |
set -euo pipefail
# Get current version from the header file using SQLITE_VERSION_NUMBER
CURRENT_VERSION_NUM=$(grep -o '#define SQLITE_VERSION_NUMBER [0-9]\+' src/bun.js/bindings/sqlite/sqlite3_local.h | awk '{print $3}' | tr -d '\n\r')
if [ -z "$CURRENT_VERSION_NUM" ]; then
echo "Error: Could not find SQLITE_VERSION_NUMBER in sqlite3_local.h"
exit 1
fi
# Convert numeric version to semantic version for display
CURRENT_MAJOR=$((CURRENT_VERSION_NUM / 1000000))
CURRENT_MINOR=$((($CURRENT_VERSION_NUM / 1000) % 1000))
CURRENT_PATCH=$((CURRENT_VERSION_NUM % 1000))
CURRENT_VERSION="$CURRENT_MAJOR.$CURRENT_MINOR.$CURRENT_PATCH"
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
echo "current_num=$CURRENT_VERSION_NUM" >> $GITHUB_OUTPUT
# Fetch SQLite download page
DOWNLOAD_PAGE=$(curl -sL https://sqlite.org/download.html)
if [ -z "$DOWNLOAD_PAGE" ]; then
echo "Error: Failed to fetch SQLite download page"
exit 1
fi
# Extract latest version and year from the amalgamation link
LATEST_INFO=$(echo "$DOWNLOAD_PAGE" | grep -o 'sqlite-amalgamation-[0-9]\{7\}.zip' | head -n1)
LATEST_YEAR=$(echo "$DOWNLOAD_PAGE" | grep -o '[0-9]\{4\}/sqlite-amalgamation-[0-9]\{7\}.zip' | head -n1 | cut -d'/' -f1 | tr -d '\n\r')
LATEST_VERSION_NUM=$(echo "$LATEST_INFO" | grep -o '[0-9]\{7\}' | tr -d '\n\r')
if [ -z "$LATEST_VERSION_NUM" ] || [ -z "$LATEST_YEAR" ]; then
echo "Error: Could not extract latest version info"
exit 1
fi
# Convert numeric version to semantic version for display
LATEST_MAJOR=$((10#$LATEST_VERSION_NUM / 1000000))
LATEST_MINOR=$((($LATEST_VERSION_NUM / 1000) % 1000))
LATEST_PATCH=$((10#$LATEST_VERSION_NUM % 1000))
LATEST_VERSION="$LATEST_MAJOR.$LATEST_MINOR.$LATEST_PATCH"
echo "latest=$LATEST_VERSION" >> $GITHUB_OUTPUT
echo "latest_year=$LATEST_YEAR" >> $GITHUB_OUTPUT
echo "latest_num=$LATEST_VERSION_NUM" >> $GITHUB_OUTPUT
# Debug output
echo "Current version: $CURRENT_VERSION ($CURRENT_VERSION_NUM)"
echo "Latest version: $LATEST_VERSION ($LATEST_VERSION_NUM)"
- name: Update SQLite if needed
if: success() && steps.check-version.outputs.current_num < steps.check-version.outputs.latest_num
run: |
set -euo pipefail
TEMP_DIR=$(mktemp -d)
cd $TEMP_DIR
echo "Downloading from: https://sqlite.org/${{ steps.check-version.outputs.latest_year }}/sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
# Download and extract latest version
wget "https://sqlite.org/${{ steps.check-version.outputs.latest_year }}/sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
unzip "sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
cd "sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}"
# Add header comment and copy files
echo "// clang-format off" > $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3.c
cat sqlite3.c >> $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3.c
echo "// clang-format off" > $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3_local.h
cat sqlite3.h >> $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3_local.h
- name: Create Pull Request
if: success() && steps.check-version.outputs.current_num < steps.check-version.outputs.latest_num
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |
src/bun.js/bindings/sqlite/sqlite3.c
src/bun.js/bindings/sqlite/sqlite3_local.h
commit-message: "deps: update sqlite to ${{ steps.check-version.outputs.latest }}"
title: "deps: update sqlite to ${{ steps.check-version.outputs.latest }}"
delete-branch: true
branch: deps/update-sqlite-${{ steps.check-version.outputs.latest }}
body: |
## What does this PR do?
Updates SQLite to version ${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-sqlite3.yml)

19
.gitignore vendored
View File

@@ -26,6 +26,7 @@
*.db
*.dmg
*.dSYM
*.generated.ts
*.jsb
*.lib
*.log
@@ -53,8 +54,8 @@
/test-report.md
/test.js
/test.ts
/testdir
/test.zig
/testdir
build
build.ninja
bun-binary
@@ -111,8 +112,10 @@ pnpm-lock.yaml
profile.json
README.md.template
release/
scripts/env.local
sign.*.json
sign.json
src/bake/generated.ts
src/bun.js/bindings-obj
src/bun.js/bindings/GeneratedJS2Native.zig
src/bun.js/debug-bindings-obj
@@ -131,6 +134,7 @@ src/runtime.version
src/tests.zig
test.txt
test/js/bun/glob/fixtures
test/node.js/upstream
tsconfig.tsbuildinfo
txt.js
x64
@@ -142,6 +146,9 @@ test/node.js/upstream
scripts/env.local
*.generated.ts
src/bake/generated.ts
test/cli/install/registry/packages/publish-pkg-*
test/cli/install/registry/packages/@secret/publish-pkg-8
test/js/third_party/prisma/prisma/sqlite/dev.db-journal
# Dependencies
/vendor
@@ -149,22 +156,24 @@ src/bake/generated.ts
# Dependencies (before CMake)
# These can be removed in the far future
/src/bun.js/WebKit
/src/deps/WebKit
/src/deps/boringssl
/src/deps/brotli
/src/deps/c*ares
/src/deps/lol*html
/src/deps/libarchive
/src/deps/libdeflate
/src/deps/libuv
/src/deps/lol*html
/src/deps/ls*hpack
/src/deps/mimalloc
/src/deps/picohttpparser
/src/deps/tinycc
/src/deps/zstd
/src/deps/zlib
/src/deps/WebKit
/src/deps/zig
/src/deps/zlib
/src/deps/zstd
# Generated files
.buildkite/ci.yml
*.sock
scratch*.{js,ts,tsx,cjs,mjs}

3
.vscode/launch.json generated vendored
View File

@@ -224,8 +224,11 @@
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
// "BUN_DEBUG_DEBUGGER": "1",
// "BUN_DEBUG_INTERNAL_DEBUGGER": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
// "BUN_INSPECT": "ws+unix:///var/folders/jk/8fzl9l5119598vsqrmphsw7m0000gn/T/tl15npi7qtf.sock?report=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.

View File

@@ -78,7 +78,7 @@
"prettier.prettierPath": "./node_modules/prettier",
// TypeScript
"typescript.tsdk": "${workspaceFolder}/node_modules/typescript/lib",
"typescript.tsdk": "node_modules/typescript/lib",
"[typescript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
},

91
.vscode/tasks.json vendored
View File

@@ -2,50 +2,57 @@
"version": "2.0.0",
"tasks": [
{
"type": "process",
"label": "Install Dependencies",
"command": "scripts/all-dependencies.sh",
"windows": {
"command": "scripts/all-dependencies.ps1",
},
"icon": {
"id": "arrow-down",
},
"options": {
"cwd": "${workspaceFolder}",
},
},
{
"type": "process",
"label": "Setup Environment",
"dependsOn": ["Install Dependencies"],
"command": "scripts/setup.sh",
"windows": {
"command": "scripts/setup.ps1",
},
"icon": {
"id": "check",
},
"options": {
"cwd": "${workspaceFolder}",
},
},
{
"type": "process",
"label": "Build Bun",
"dependsOn": ["Setup Environment"],
"command": "bun",
"args": ["run", "build"],
"icon": {
"id": "gear",
"type": "shell",
"command": "bun run build",
"group": {
"kind": "build",
"isDefault": true,
},
"options": {
"cwd": "${workspaceFolder}",
},
"isBuildCommand": true,
"runOptions": {
"instanceLimit": 1,
"reevaluateOnRerun": true,
"problemMatcher": [
{
"owner": "zig",
"fileLocation": ["relative", "${workspaceFolder}"],
"pattern": [
{
"regexp": "^(.+?):(\\d+):(\\d+): (error|warning|note): (.+)$",
"file": 1,
"line": 2,
"column": 3,
"severity": 4,
"message": 5,
},
{
"regexp": "^\\s+(.+)$",
"message": 1,
"loop": true,
},
],
},
{
"owner": "clang",
"fileLocation": ["relative", "${workspaceFolder}"],
"pattern": [
{
"regexp": "^([^:]+):(\\d+):(\\d+):\\s+(warning|error|note|remark):\\s+(.*)$",
"file": 1,
"line": 2,
"column": 3,
"severity": 4,
"message": 5,
},
{
"regexp": "^\\s*(.*)$",
"message": 1,
"loop": true,
},
],
},
],
"presentation": {
"reveal": "always",
"panel": "shared",
"clear": true,
},
},
],

View File

@@ -1,6 +1,6 @@
Configuring a development environment for Bun can take 10-30 minutes depending on your internet connection and computer speed. You will need ~10GB of free disk space for the repository and build artifacts.
If you are using Windows, please refer to [this guide](/docs/project/building-windows)
If you are using Windows, please refer to [this guide](/docs/project/building-windows.md)
{% details summary="For Ubuntu users" %}
TL;DR: Ubuntu 22.04 is suggested.
@@ -11,7 +11,7 @@ Bun currently requires `glibc >=2.32` in development which means if you're on Ub
Using your system's package manager, install Bun's dependencies:
{% codetabs %}
{% codetabs group="os" %}
```bash#macOS (Homebrew)
$ brew install automake ccache cmake coreutils gnu-sed go icu4c libiconv libtool ninja pkg-config rust ruby
@@ -60,7 +60,7 @@ $ brew install bun
Bun requires LLVM 16 (`clang` is part of LLVM). This version requirement is to match WebKit (precompiled), as mismatching versions will cause memory allocation failures at runtime. In most cases, you can install LLVM through your system package manager:
{% codetabs %}
{% codetabs group="os" %}
```bash#macOS (Homebrew)
$ brew install llvm@18
@@ -97,7 +97,7 @@ $ which clang-16
If not, run this to manually add it:
{% codetabs %}
{% codetabs group="os" %}
```bash#macOS (Homebrew)
# use fish_add_path if you're using fish
@@ -285,7 +285,7 @@ If you see this error when compiling, run:
$ xcode-select --install
```
## Cannot find `libatomic.a`
### Cannot find `libatomic.a`
Bun defaults to linking `libatomic` statically, as not all systems have it. If you are building on a distro that does not have a static libatomic available, you can run the following command to enable dynamic linking:
@@ -295,7 +295,7 @@ $ bun run build -DUSE_STATIC_LIBATOMIC=OFF
The built version of Bun may not work on other systems if compiled this way.
## ccache conflicts with building TinyCC on macOS
### ccache conflicts with building TinyCC on macOS
If you run into issues with `ccache` when building TinyCC, try reinstalling ccache
@@ -303,3 +303,9 @@ If you run into issues with `ccache` when building TinyCC, try reinstalling ccac
brew uninstall ccache
brew install ccache
```
## Using bun-debug
- Disable logging: `BUN_DEBUG_QUIET_LOGS=1 bun-debug ...` (to disable all debug logging)
- Enable logging for a specific zig scope: `BUN_DEBUG_EventLoop=1 bun-debug ...` (to allow `std.log.scoped(.EventLoop)`)
- Bun transpiles every file it runs, to see the actual executed source in a debug build find it in `/tmp/bun-debug-src/...path/to/file`, for example the transpiled version of `/home/bun/index.ts` would be in `/tmp/bun-debug-src/home/bun/index.ts`

2
LATEST
View File

@@ -1 +1 @@
1.1.34
1.1.38

18
ci/linux/Dockerfile Normal file
View File

@@ -0,0 +1,18 @@
ARG IMAGE=debian:11
FROM $IMAGE
COPY ./scripts/bootstrap.sh /tmp/bootstrap.sh
ENV CI=true
RUN sh /tmp/bootstrap.sh && rm -rf /tmp/*
WORKDIR /workspace/bun
COPY bunfig.toml bunfig.toml
COPY package.json package.json
COPY CMakeLists.txt CMakeLists.txt
COPY cmake/ cmake/
COPY scripts/ scripts/
COPY patches/ patches/
COPY *.zig ./
COPY src/ src/
COPY packages/ packages/
COPY test/ test/
RUN bun i
RUN bun run build:ci

View File

@@ -0,0 +1,27 @@
#!/bin/sh
# This script sets the hostname of the current machine.
execute() {
echo "$ $@" >&2
if ! "$@"; then
echo "Command failed: $@" >&2
exit 1
fi
}
main() {
if [ "$#" -ne 1 ]; then
echo "Usage: $0 <hostname>" >&2
exit 1
fi
if [ -f "$(which hostnamectl)" ]; then
execute hostnamectl set-hostname "$1"
else
echo "Error: hostnamectl is not installed." >&2
exit 1
fi
}
main "$@"

View File

@@ -0,0 +1,22 @@
#!/bin/sh
# This script starts tailscale on the current machine.
execute() {
echo "$ $@" >&2
if ! "$@"; then
echo "Command failed: $@" >&2
exit 1
fi
}
main() {
if [ "$#" -ne 1 ]; then
echo "Usage: $0 <auth-key>" >&2
exit 1
fi
execute tailscale up --reset --ssh --accept-risk=lose-ssh --auth-key="$1"
}
main "$@"

View File

@@ -2,7 +2,7 @@
"private": true,
"scripts": {
"bootstrap": "brew install gh jq cirruslabs/cli/tart cirruslabs/cli/sshpass hashicorp/tap/packer && packer init darwin",
"login": "gh auth token | tart login ghcr.io --username $(gh api user --jq .login) --password-stdin",
"login": "token=$(gh auth token); username=$(gh api user --jq .login); echo \"Login as $username...\"; echo \"$token\" | tart login ghcr.io --username \"$username\" --password-stdin; echo \"$token\" | docker login ghcr.io --username \"$username\" --password-stdin",
"fetch:image-name": "echo ghcr.io/oven-sh/bun-vm",
"fetch:darwin-version": "echo 1",
"fetch:macos-version": "sw_vers -productVersion | cut -d. -f1",

View File

@@ -265,7 +265,7 @@ if(ENABLE_LTO)
endif()
# --- Remapping ---
if(UNIX)
if(UNIX AND CI)
register_compiler_flags(
DESCRIPTION "Remap source files"
-ffile-prefix-map=${CWD}=.

View File

@@ -105,14 +105,6 @@ else()
unsupported(CMAKE_HOST_SYSTEM_NAME)
endif()
if(EXISTS "/lib/ld-musl-aarch64.so.1")
set(IS_MUSL ON)
elseif(EXISTS "/lib/ld-musl-x86_64.so.1")
set(IS_MUSL ON)
else()
set(IS_MUSL OFF)
endif()
if(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "arm64|ARM64|aarch64|AARCH64")
set(HOST_OS "aarch64")
elseif(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "x86_64|X86_64|x64|X64|amd64|AMD64")

View File

@@ -10,7 +10,6 @@ optionx(GITHUB_ACTIONS BOOL "If GitHub Actions is enabled" DEFAULT OFF)
if(BUILDKITE)
optionx(BUILDKITE_COMMIT STRING "The commit hash")
optionx(BUILDKITE_MESSAGE STRING "The commit message")
endif()
optionx(CMAKE_BUILD_TYPE "Debug|Release|RelWithDebInfo|MinSizeRel" "The build type to use" REQUIRED)
@@ -49,6 +48,16 @@ else()
message(FATAL_ERROR "Unsupported architecture: ${CMAKE_SYSTEM_PROCESSOR}")
endif()
if(LINUX)
if(EXISTS "/etc/alpine-release")
set(DEFAULT_ABI "musl")
else()
set(DEFAULT_ABI "gnu")
endif()
optionx(ABI "musl|gnu" "The ABI to use (e.g. musl, gnu)" DEFAULT ${DEFAULT_ABI})
endif()
if(ARCH STREQUAL "x64")
optionx(ENABLE_BASELINE BOOL "If baseline features should be used for older CPUs (e.g. disables AVX, AVX2)" DEFAULT OFF)
endif()
@@ -56,14 +65,7 @@ endif()
optionx(ENABLE_LOGS BOOL "If debug logs should be enabled" DEFAULT ${DEBUG})
optionx(ENABLE_ASSERTIONS BOOL "If debug assertions should be enabled" DEFAULT ${DEBUG})
if(BUILDKITE_MESSAGE AND BUILDKITE_MESSAGE MATCHES "\\[release build\\]")
message(STATUS "Switched to release build, since commit message contains: \"[release build]\"")
set(DEFAULT_CANARY OFF)
else()
set(DEFAULT_CANARY ON)
endif()
optionx(ENABLE_CANARY BOOL "If canary features should be enabled" DEFAULT ${DEFAULT_CANARY})
optionx(ENABLE_CANARY BOOL "If canary features should be enabled" DEFAULT ON)
if(ENABLE_CANARY AND BUILDKITE)
execute_process(

View File

@@ -484,14 +484,12 @@ set(BUN_ZIG_OUTPUT ${BUILD_PATH}/bun-zig.o)
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm|ARM|arm64|ARM64|aarch64|AARCH64")
set(IS_ARM64 ON)
if(APPLE)
set(ZIG_CPU "apple_m1")
else()
set(ZIG_CPU "native")
endif()
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64|X86_64|x64|X64|amd64|AMD64")
set(IS_X86_64 ON)
if(ENABLE_BASELINE)
set(ZIG_CPU "nehalem")
else()
@@ -528,6 +526,7 @@ register_command(
-Dcanary=${CANARY_REVISION}
-Dcodegen_path=${CODEGEN_PATH}
-Dcodegen_embed=$<IF:$<BOOL:${CODEGEN_EMBED}>,true,false>
--prominent-compile-errors
${ZIG_FLAGS_BUN}
ARTIFACTS
${BUN_ZIG_OUTPUT}
@@ -760,8 +759,8 @@ if(NOT WIN32)
)
if(DEBUG)
# TODO: this shouldn't be necessary long term
if (NOT IS_MUSL)
set(ABI_PUBLIC_FLAGS
if (NOT ABI STREQUAL "musl")
target_compile_options(${bun} PUBLIC
-fsanitize=null
-fsanitize-recover=all
-fsanitize=bounds
@@ -772,14 +771,9 @@ if(NOT WIN32)
-fsanitize=returns-nonnull-attribute
-fsanitize=unreachable
)
set(ABI_PRIVATE_FLAGS
target_link_libraries(${bun} PRIVATE
-fsanitize=null
)
else()
set(ABI_PUBLIC_FLAGS
)
set(ABI_PRIVATE_FLAGS
)
endif()
target_compile_options(${bun} PUBLIC
@@ -797,10 +791,6 @@ if(NOT WIN32)
-Wno-unused-function
-Wno-nullability-completeness
-Werror
${ABI_PUBLIC_FLAGS}
)
target_link_libraries(${bun} PRIVATE
${ABI_PRIVATE_FLAGS}
)
else()
# Leave -Werror=unused off in release builds so we avoid errors from being used in ASSERT
@@ -845,65 +835,48 @@ if(WIN32)
/delayload:IPHLPAPI.dll
)
endif()
elseif(APPLE)
endif()
if(APPLE)
target_link_options(${bun} PUBLIC
-dead_strip
-dead_strip_dylibs
-Wl,-ld_new
-Wl,-no_compact_unwind
-Wl,-stack_size,0x1200000
-fno-keep-static-consts
-Wl,-map,${bun}.linker-map
)
else()
# Try to use lld-16 if available, otherwise fallback to lld
# Cache it so we don't have to re-run CMake to pick it up
if((NOT DEFINED LLD_NAME) AND (NOT CI OR BUN_LINK_ONLY))
find_program(LLD_EXECUTABLE_NAME lld-${LLVM_VERSION_MAJOR})
endif()
if(NOT LLD_EXECUTABLE_NAME)
if(CI)
# Ensure we don't use a differing version of lld in CI vs clang
message(FATAL_ERROR "lld-${LLVM_VERSION_MAJOR} not found. Please make sure you have LLVM ${LLVM_VERSION_MAJOR}.x installed and set to lld-${LLVM_VERSION_MAJOR}")
endif()
# To make it easier for contributors, allow differing versions of lld vs clang/cmake
find_program(LLD_EXECUTABLE_NAME lld)
if(LINUX)
if(NOT ABI STREQUAL "musl")
if(ARCH STREQUAL "aarch64")
target_link_options(${bun} PUBLIC
-Wl,--wrap=fcntl64
-Wl,--wrap=statx
)
endif()
if(ARCH STREQUAL "x64")
target_link_options(${bun} PUBLIC
-Wl,--wrap=fcntl
-Wl,--wrap=fcntl64
-Wl,--wrap=fstat
-Wl,--wrap=fstat64
-Wl,--wrap=fstatat
-Wl,--wrap=fstatat64
-Wl,--wrap=lstat
-Wl,--wrap=lstat64
-Wl,--wrap=mknod
-Wl,--wrap=mknodat
-Wl,--wrap=stat
-Wl,--wrap=stat64
-Wl,--wrap=statx
)
endif()
if(NOT LLD_EXECUTABLE_NAME)
message(FATAL_ERROR "LLD not found. Please make sure you have LLVM ${LLVM_VERSION_MAJOR}.x installed and lld is available in your PATH as lld-${LLVM_VERSION_MAJOR}")
endif()
# normalize to basename so it can be used with -fuse-ld
get_filename_component(LLD_NAME ${LLD_EXECUTABLE_NAME} NAME CACHE)
message(STATUS "Using linker: ${LLD_NAME} (${LLD_EXECUTABLE_NAME})")
elseif(NOT DEFINED LLD_NAME)
set(LLD_NAME lld-${LLVM_VERSION_MAJOR})
endif()
if (IS_ARM64)
set(ARCH_WRAP_FLAGS
-Wl,--wrap=fcntl64
-Wl,--wrap=statx
)
elseif(IS_X86_64)
set(ARCH_WRAP_FLAGS
-Wl,--wrap=fcntl
-Wl,--wrap=fcntl64
-Wl,--wrap=fstat
-Wl,--wrap=fstat64
-Wl,--wrap=fstatat
-Wl,--wrap=fstatat64
-Wl,--wrap=lstat
-Wl,--wrap=lstat64
-Wl,--wrap=mknod
-Wl,--wrap=mknodat
-Wl,--wrap=stat
-Wl,--wrap=stat64
-Wl,--wrap=statx
)
endif()
if (NOT IS_MUSL)
set(ABI_WRAP_FLAGS
target_link_options(${bun} PUBLIC
-Wl,--wrap=cosf
-Wl,--wrap=exp
-Wl,--wrap=expf
@@ -920,26 +893,37 @@ else()
-Wl,--wrap=sinf
-Wl,--wrap=tanf
)
endif()
if(NOT ABI STREQUAL "musl")
target_link_options(${bun} PUBLIC
-static-libstdc++
-static-libgcc
)
else()
set(ABI_WRAP_FLAGS
target_link_options(${bun} PUBLIC
-lstdc++
-lgcc
)
endif()
target_link_options(${bun} PUBLIC
-fuse-ld=${LLD_NAME}
--ld-path=${LLD_PROGRAM}
-fno-pic
-static-libstdc++
-static-libgcc
-Wl,-no-pie
-Wl,-icf=safe
-Wl,--as-needed
-Wl,--gc-sections
-Wl,-z,stack-size=12800000
${ARCH_WRAP_FLAGS}
${ABI_WRAP_FLAGS}
-Wl,--compress-debug-sections=zlib
-Wl,-z,lazy
-Wl,-z,norelro
-Wl,-z,combreloc
-Wl,--no-eh-frame-hdr
-Wl,--sort-section=name
-Wl,--hash-style=gnu
-Wl,--build-id=sha1 # Better for debugging than default
-Wl,-Map=${bun}.linker-map
)
endif()
@@ -1079,6 +1063,18 @@ endif()
# --- Packaging ---
if(NOT BUN_CPP_ONLY)
set(CMAKE_STRIP_FLAGS "")
if(APPLE)
# We do not build with exceptions enabled. These are generated by lolhtml
# and other dependencies. We build lolhtml with abort on panic, so it
# shouldn't be including these in the first place.
set(CMAKE_STRIP_FLAGS --remove-section=__TEXT,__eh_frame --remove-section=__TEXT,__unwind_info --remove-section=__TEXT,__gcc_except_tab)
elseif(LINUX AND NOT ABI STREQUAL "musl")
# When you use llvm-strip to do this, it doesn't delete it from the binary and instead keeps it as [LOAD #2 [R]]
# So, we must use GNU strip to do this.
set(CMAKE_STRIP_FLAGS -R .eh_frame -R .gcc_except_table)
endif()
if(bunStrip)
register_command(
TARGET
@@ -1090,6 +1086,7 @@ if(NOT BUN_CPP_ONLY)
COMMAND
${CMAKE_STRIP}
${bunExe}
${CMAKE_STRIP_FLAGS}
--strip-all
--strip-debug
--discard-all
@@ -1165,10 +1162,12 @@ if(NOT BUN_CPP_ONLY)
endif()
if(CI)
set(bunTriplet bun-${OS}-${ARCH})
if(LINUX AND ABI STREQUAL "musl")
set(bunTriplet ${bunTriplet}-musl)
endif()
if(ENABLE_BASELINE)
set(bunTriplet bun-${OS}-${ARCH}-baseline)
else()
set(bunTriplet bun-${OS}-${ARCH})
set(bunTriplet ${bunTriplet}-baseline)
endif()
string(REPLACE bun ${bunTriplet} bunPath ${bun})
set(bunFiles ${bunExe} features.json)
@@ -1177,6 +1176,12 @@ if(NOT BUN_CPP_ONLY)
elseif(APPLE)
list(APPEND bunFiles ${bun}.dSYM)
endif()
if(APPLE OR LINUX)
list(APPEND bunFiles ${bun}.linker-map)
endif()
register_command(
TARGET
${bun}

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
c-ares/c-ares
COMMIT
d1722e6e8acaf10eb73fa995798a9cd421d9f85e
41ee334af3e3d0027dca5e477855d0244936bd49
)
register_cmake_command(

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
ebiggers/libdeflate
COMMIT
dc76454a39e7e83b68c3704b6e3784654f8d5ac5
9d624d1d8ba82c690d6d6be1d0a961fc5a983ea4
)
register_cmake_command(

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
cloudflare/lol-html
COMMIT
8d4c273ded322193d017042d1f48df2766b0f88b
4f8becea13a0021c8b71abd2dcc5899384973b66
)
set(LOLHTML_CWD ${VENDOR_PATH}/lolhtml/c-api)
@@ -26,6 +26,13 @@ if(RELEASE)
list(APPEND LOLHTML_BUILD_ARGS --release)
endif()
# Windows requires unwind tables, apparently.
if (NOT WIN32)
# The encoded escape sequences are intentional. They're how you delimit multiple arguments in a single environment variable.
# Also add rust optimization flag for smaller binary size, but not huge speed penalty.
set(RUSTFLAGS "-Cpanic=abort-Cdebuginfo=0-Cforce-unwind-tables=no-Copt-level=s")
endif()
register_command(
TARGET
lolhtml
@@ -37,6 +44,11 @@ register_command(
${LOLHTML_BUILD_ARGS}
ARTIFACTS
${LOLHTML_LIBRARY}
ENVIRONMENT
CARGO_TERM_COLOR=always
CARGO_TERM_VERBOSE=true
CARGO_TERM_DIAGNOSTIC=true
CARGO_ENCODED_RUSTFLAGS=${RUSTFLAGS}
)
target_link_libraries(${bun} PRIVATE ${LOLHTML_LIBRARY})

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
litespeedtech/ls-hpack
COMMIT
3d0f1fc1d6e66a642e7a98c55deb38aa986eb4b0
32e96f10593c7cb8553cd8c9c12721100ae9e924
)
if(WIN32)

View File

@@ -0,0 +1,6 @@
set(CMAKE_SYSTEM_NAME Linux)
set(CMAKE_SYSTEM_PROCESSOR aarch64)
set(ABI musl)
set(CMAKE_C_COMPILER_WORKS ON)
set(CMAKE_CXX_COMPILER_WORKS ON)

View File

@@ -1,5 +1,6 @@
set(CMAKE_SYSTEM_NAME Linux)
set(CMAKE_SYSTEM_PROCESSOR aarch64)
set(ABI gnu)
set(CMAKE_C_COMPILER_WORKS ON)
set(CMAKE_CXX_COMPILER_WORKS ON)

View File

@@ -1,6 +1,7 @@
set(CMAKE_SYSTEM_NAME Linux)
set(CMAKE_SYSTEM_PROCESSOR x64)
set(ENABLE_BASELINE ON)
set(ABI gnu)
set(CMAKE_C_COMPILER_WORKS ON)
set(CMAKE_CXX_COMPILER_WORKS ON)

View File

@@ -0,0 +1,7 @@
set(CMAKE_SYSTEM_NAME Linux)
set(CMAKE_SYSTEM_PROCESSOR x64)
set(ENABLE_BASELINE ON)
set(ABI musl)
set(CMAKE_C_COMPILER_WORKS ON)
set(CMAKE_CXX_COMPILER_WORKS ON)

View File

@@ -0,0 +1,6 @@
set(CMAKE_SYSTEM_NAME Linux)
set(CMAKE_SYSTEM_PROCESSOR x64)
set(ABI musl)
set(CMAKE_C_COMPILER_WORKS ON)
set(CMAKE_CXX_COMPILER_WORKS ON)

View File

@@ -1,5 +1,6 @@
set(CMAKE_SYSTEM_NAME Linux)
set(CMAKE_SYSTEM_PROCESSOR x64)
set(ABI gnu)
set(CMAKE_C_COMPILER_WORKS ON)
set(CMAKE_CXX_COMPILER_WORKS ON)

View File

@@ -29,7 +29,7 @@ execute_process(
)
if(NOT GIT_DIFF_RESULT EQUAL 0)
message(${WARNING} "Command failed: ${GIT_DIFF_COMMAND} ${GIT_DIFF_ERROR}")
message(WARNING "Command failed: ${GIT_DIFF_COMMAND} ${GIT_DIFF_ERROR}")
return()
endif()

View File

@@ -4,7 +4,7 @@ if(NOT ENABLE_LLVM)
return()
endif()
if(CMAKE_HOST_WIN32 OR CMAKE_HOST_APPLE OR IS_MUSL)
if(CMAKE_HOST_WIN32 OR CMAKE_HOST_APPLE OR EXISTS "/etc/alpine-release")
set(DEFAULT_LLVM_VERSION "18.1.8")
else()
set(DEFAULT_LLVM_VERSION "16.0.6")
@@ -52,6 +52,7 @@ if(UNIX)
/usr/lib/llvm-${LLVM_VERSION_MAJOR}.${LLVM_VERSION_MINOR}.${LLVM_VERSION_PATCH}/bin
/usr/lib/llvm-${LLVM_VERSION_MAJOR}.${LLVM_VERSION_MINOR}/bin
/usr/lib/llvm-${LLVM_VERSION_MAJOR}/bin
/usr/lib/llvm${LLVM_VERSION_MAJOR}/bin
)
endif()
endif()
@@ -108,8 +109,23 @@ else()
find_llvm_command(CMAKE_CXX_COMPILER clang++)
find_llvm_command(CMAKE_LINKER llvm-link)
find_llvm_command(CMAKE_AR llvm-ar)
find_llvm_command(CMAKE_STRIP llvm-strip)
if (LINUX)
# On Linux, strip ends up being more useful for us.
find_command(
VARIABLE
CMAKE_STRIP
COMMAND
strip
REQUIRED
ON
)
else()
find_llvm_command(CMAKE_STRIP llvm-strip)
endif()
find_llvm_command(CMAKE_RANLIB llvm-ranlib)
if(LINUX)
find_llvm_command(LLD_PROGRAM ld.lld)
endif()
if(APPLE)
find_llvm_command(CMAKE_DSYMUTIL dsymutil)
endif()

View File

@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
if(NOT WEBKIT_VERSION)
set(WEBKIT_VERSION 73b551e25d97e463e8e2c86cb819b8639fcbda06)
set(WEBKIT_VERSION 8f9ae4f01a047c666ef548864294e01df731d4ea)
endif()
if(WEBKIT_LOCAL)
@@ -63,7 +63,7 @@ else()
message(FATAL_ERROR "Unsupported architecture: ${CMAKE_SYSTEM_PROCESSOR}")
endif()
if(IS_MUSL)
if(LINUX AND ABI STREQUAL "musl")
set(WEBKIT_SUFFIX "-musl")
endif()

View File

@@ -11,7 +11,7 @@ if(APPLE)
elseif(WIN32)
set(DEFAULT_ZIG_TARGET ${DEFAULT_ZIG_ARCH}-windows-msvc)
elseif(LINUX)
if(IS_MUSL)
if(ABI STREQUAL "musl")
set(DEFAULT_ZIG_TARGET ${DEFAULT_ZIG_ARCH}-linux-musl)
else()
set(DEFAULT_ZIG_TARGET ${DEFAULT_ZIG_ARCH}-linux-gnu)

View File

@@ -1,30 +1,13 @@
FROM alpine:3.18 AS build
FROM alpine:3.20 AS build
# https://github.com/oven-sh/bun/releases
ARG BUN_VERSION=latest
# TODO: Instead of downloading glibc from a third-party source, we should
# build it from source. This is a temporary solution.
# See: https://github.com/sgerrand/alpine-pkg-glibc
# https://github.com/sgerrand/alpine-pkg-glibc/releases
# https://github.com/sgerrand/alpine-pkg-glibc/issues/176
ARG GLIBC_VERSION=2.34-r0
# https://github.com/oven-sh/bun/issues/5545#issuecomment-1722461083
ARG GLIBC_VERSION_AARCH64=2.26-r1
RUN apk --no-cache add \
ca-certificates \
curl \
dirmngr \
gpg \
gpg-agent \
unzip \
RUN apk --no-cache add ca-certificates curl dirmngr gpg gpg-agent unzip \
&& arch="$(apk --print-arch)" \
&& case "${arch##*-}" in \
x86_64) build="x64-baseline";; \
aarch64) build="aarch64";; \
x86_64) build="x64-musl-baseline";; \
aarch64) build="aarch64-musl";; \
*) echo "error: unsupported architecture: $arch"; exit 1 ;; \
esac \
&& version="$BUN_VERSION" \
@@ -59,37 +42,9 @@ RUN apk --no-cache add \
&& unzip "bun-linux-$build.zip" \
&& mv "bun-linux-$build/bun" /usr/local/bin/bun \
&& rm -f "bun-linux-$build.zip" SHASUMS256.txt.asc SHASUMS256.txt \
&& chmod +x /usr/local/bin/bun \
&& cd /tmp \
&& case "${arch##*-}" in \
x86_64) curl "https://github.com/sgerrand/alpine-pkg-glibc/releases/download/${GLIBC_VERSION}/glibc-${GLIBC_VERSION}.apk" \
-fsSLO \
--compressed \
--retry 5 \
|| (echo "error: failed to download: glibc v${GLIBC_VERSION}" && exit 1) \
&& mv "glibc-${GLIBC_VERSION}.apk" glibc.apk \
&& curl "https://github.com/sgerrand/alpine-pkg-glibc/releases/download/${GLIBC_VERSION}/glibc-bin-${GLIBC_VERSION}.apk" \
-fsSLO \
--compressed \
--retry 5 \
|| (echo "error: failed to download: glibc-bin v${GLIBC_VERSION}" && exit 1) \
&& mv "glibc-bin-${GLIBC_VERSION}.apk" glibc-bin.apk ;; \
aarch64) curl "https://raw.githubusercontent.com/squishyu/alpine-pkg-glibc-aarch64-bin/master/glibc-${GLIBC_VERSION_AARCH64}.apk" \
-fsSLO \
--compressed \
--retry 5 \
|| (echo "error: failed to download: glibc v${GLIBC_VERSION_AARCH64}" && exit 1) \
&& mv "glibc-${GLIBC_VERSION_AARCH64}.apk" glibc.apk \
&& curl "https://raw.githubusercontent.com/squishyu/alpine-pkg-glibc-aarch64-bin/master/glibc-bin-${GLIBC_VERSION_AARCH64}.apk" \
-fsSLO \
--compressed \
--retry 5 \
|| (echo "error: failed to download: glibc-bin v${GLIBC_VERSION_AARCH64}" && exit 1) \
&& mv "glibc-bin-${GLIBC_VERSION_AARCH64}.apk" glibc-bin.apk ;; \
*) echo "error: unsupported architecture '$arch'"; exit 1 ;; \
esac
&& chmod +x /usr/local/bin/bun
FROM alpine:3.18
FROM alpine:3.20
# Disable the runtime transpiler cache by default inside Docker containers.
# On ephemeral containers, the cache is not useful
@@ -107,10 +62,8 @@ COPY docker-entrypoint.sh /usr/local/bin/
RUN --mount=type=bind,from=build,source=/tmp,target=/tmp \
addgroup -g 1000 bun \
&& adduser -u 1000 -G bun -s /bin/sh -D bun \
&& apk --no-cache --force-overwrite --allow-untrusted add \
/tmp/glibc.apk \
/tmp/glibc-bin.apk \
&& ln -s /usr/local/bin/bun /usr/local/bin/bunx \
&& apk add libgcc libstdc++ \
&& which bun \
&& which bunx \
&& bun --version

View File

@@ -75,7 +75,7 @@ To instead throw an error when a parameter is missing and allow binding without
import { Database } from "bun:sqlite";
const strict = new Database(
":memory:",
":memory:",
{ strict: true }
);
@@ -177,7 +177,7 @@ const query = db.prepare("SELECT * FROM foo WHERE bar = ?");
## WAL mode
SQLite supports [write-ahead log mode](https://www.sqlite.org/wal.html) (WAL) which dramatically improves performance, especially in situations with many concurrent writes. It's broadly recommended to enable WAL mode for most typical applications.
SQLite supports [write-ahead log mode](https://www.sqlite.org/wal.html) (WAL) which dramatically improves performance, especially in situations with many concurrent readers and a single writer. It's broadly recommended to enable WAL mode for most typical applications.
To enable WAL mode, run this pragma query at the beginning of your application:

View File

@@ -55,6 +55,51 @@ $ bun test ./test/specific-file.test.ts
The test runner runs all tests in a single process. It loads all `--preload` scripts (see [Lifecycle](https://bun.sh/docs/test/lifecycle) for details), then runs all tests. If a test fails, the test runner will exit with a non-zero exit code.
## CI/CD integration
`bun test` supports a variety of CI/CD integrations.
### GitHub Actions
`bun test` automatically detects if it's running inside GitHub Actions and will emit GitHub Actions annotations to the console directly.
No configuration is needed, other than installing `bun` in the workflow and running `bun test`.
#### How to install `bun` in a GitHub Actions workflow
To use `bun test` in a GitHub Actions workflow, add the following step:
```yaml
jobs:
build:
name: build-app
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install bun
uses: oven-sh/setup-bun@v2
- name: Install dependencies # (assuming your project has dependencies)
run: bun install # You can use npm/yarn/pnpm instead if you prefer
- name: Run tests
run: bun test
```
From there, you'll get GitHub Actions annotations.
### JUnit XML reports (GitLab, etc.)
To use `bun test` with a JUnit XML reporter, you can use the `--reporter=junit` in combination with `--reporter-outfile`.
```sh
$ bun test --reporter=junit --reporter-outfile=./bun.xml
```
This will continue to output to stdout/stderr as usual, and also write a JUnit
XML report to the given path at the very end of the test run.
JUnit XML is a popular format for reporting test results in CI/CD pipelines.
## Timeouts
Use the `--timeout` flag to specify a _per-test_ timeout in milliseconds. If a test times out, it will be marked as failed. The default value is `5000`.
@@ -81,7 +126,7 @@ Use the `--bail` flag to abort the test run early after a pre-determined number
$ bun test --bail
# bail after 10 failure
$ bun test --bail 10
$ bun test --bail=10
```
## Watch mode

View File

@@ -1,11 +0,0 @@
- pages
- auto-bundle dependencies
- pages is function that returns a list of pages?
- plugins for svelte and vue
- custom loaders
- HMR
- server endpoints
```ts
Bun.serve({});
```

View File

@@ -1,31 +0,0 @@
To create a new React app:
```bash
$ bun create react ./app
$ cd app
$ bun dev # start dev server
```
To use an existing React app:
```bash
$ bun add -d react-refresh # install React Fast Refresh
$ bun bun ./src/index.js # generate a bundle for your entry point(s)
$ bun dev # start the dev server
```
From there, Bun relies on the filesystem for mapping dev server paths to source files. All URL paths are relative to the project root (where `package.json` is located).
Here are examples of routing source code file paths:
| Dev Server URL | File Path (relative to cwd) |
| -------------------------- | --------------------------- |
| /src/components/Button.tsx | src/components/Button.tsx |
| /src/index.tsx | src/index.tsx |
| /pages/index.js | pages/index.js |
You do not need to include file extensions in `import` paths. CommonJS-style import paths without the file extension work.
You can override the public directory by passing `--public-dir="path-to-folder"`.
If no directory is specified and `./public/` doesnt exist, Bun will try `./static/`. If `./static/` does not exist, but wont serve from a public directory. If you pass `--public-dir=./` Bun will serve from the current directory, but it will check the current directory last instead of first.

View File

@@ -1,77 +0,0 @@
## With `bun dev`
When importing CSS in JavaScript-like loaders, CSS is treated special.
By default, Bun will transform a statement like this:
```js
import "../styles/global.css";
```
### When `platform` is `browser`
```js
globalThis.document?.dispatchEvent(
new CustomEvent("onimportcss", {
detail: "http://localhost:3000/styles/globals.css",
}),
);
```
An event handler for turning that into a `<link>` is automatically registered when HMR is enabled. That event handler can be turned off either in a frameworks `package.json` or by setting `globalThis["Bun_disableCSSImports"] = true;` in client-side code. Additionally, you can get a list of every .css file imported this way via `globalThis["__BUN"].allImportedStyles`.
### When `platform` is `bun`
```js
//@import url("http://localhost:3000/styles/globals.css");
```
Additionally, Bun exposes an API for SSR/SSG that returns a flat list of URLs to css files imported. That function is `Bun.getImportedStyles()`.
```ts
// This specifically is for "framework" in package.json when loaded via `bun dev`
// This API needs to be changed somewhat to work more generally with Bun.js
// Initially, you could only use Bun.js through `bun dev`
// and this API was created at that time
addEventListener("fetch", async (event: FetchEvent) => {
let route = Bun.match(event);
const App = await import("pages/_app");
// This returns all .css files that were imported in the line above.
// Its recursive, so any file that imports a CSS file will be included.
const appStylesheets = bun.getImportedStyles();
// ...rest of code
});
```
This is useful for preventing flash of unstyled content.
## With `bun bun`
Bun bundles `.css` files imported via `@import` into a single file. It doesnt auto-prefix or minify CSS today. Multiple `.css` files imported in one JavaScript file will _not_ be bundled into one file. Youll have to import those from a `.css` file.
This input:
```css
@import url("./hi.css");
@import url("./hello.css");
@import url("./yo.css");
```
Becomes:
```css
/* hi.css */
/* ...contents of hi.css */
/* hello.css */
/* ...contents of hello.css */
/* yo.css */
/* ...contents of yo.css */
```
## CSS runtime
To support hot CSS reloading, Bun inserts `@supports` annotations into CSS that tag which files a stylesheet is composed of. Browsers ignore this, so it doesnt impact styles.
By default, Buns runtime code automatically listens to `onimportcss` and will insert the `event.detail` into a `<link rel="stylesheet" href={${event.detail}}>` if there is no existing `link` tag with that stylesheet. Thats how Buns equivalent of `style-loader` works.

View File

@@ -1,26 +0,0 @@
## Creating a Discord bot with Bun
Discord bots perform actions in response to _application commands_. There are 3 types of commands accessible in different interfaces: the chat input, a message's context menu (top-right menu or right-clicking in a message), and a user's context menu (right-clicking on a user).
To get started you can use the interactions template:
```bash
bun create discord-interactions my-interactions-bot
cd my-interactions-bot
```
If you don't have a Discord bot/application yet, you can create one [here (https://discord.com/developers/applications/me)](https://discord.com/developers/applications/me).
Invite bot to your server by visiting `https://discord.com/api/oauth2/authorize?client_id=<your_application_id>&scope=bot%20applications.commands`
Afterwards you will need to get your bot's token, public key, and application id from the application page and put them into `.env.example` file
Then you can run the http server that will handle your interactions:
```bash
$ bun install
$ mv .env.example .env
$ bun run.js # listening on port 1337
```
Discord does not accept an insecure HTTP server, so you will need to provide an SSL certificate or put the interactions server behind a secure reverse proxy. For development, you can use ngrok/cloudflare tunnel to expose local ports as secure URL.

View File

@@ -63,4 +63,4 @@ process.on("exit", kill);
---
At the time of writing, Bun hasn't implemented the `node:cluster` module yet, but this is a faster, simple, and limited alternative. We will also implement `node:cluster` in the future.
Bun has also implemented the `node:cluster` module, but this is a faster, simple, and limited alternative.

View File

@@ -14,7 +14,7 @@ To bail after a certain threshold of failures, optionally specify a number after
```sh
# bail after 10 failures
$ bun test --bail 10
$ bun test --bail=10
```
---

View File

@@ -57,7 +57,7 @@ Replace `bail` in your Jest config with the `--bail` CLI flag.
``` -->
```sh
$ bun test --bail 3
$ bun test --bail=3
```
---

View File

@@ -44,10 +44,17 @@ test.todo("unimplemented feature", () => {
---
If an implementation is provided, it will be executed and _expected to fail_ by test runner! If a todo test passes, the `bun test` run will return a non-zero exit code to signal the failure.
If an implementation is provided, it will not be run unless the `--todo` flag is passed. If the `--todo` flag is passed, the test will be executed and _expected to fail_ by test runner! If a todo test passes, the `bun test` run will return a non-zero exit code to signal the failure.
```sh
$ bun test
$ bun test --todo
my.test.ts:
✗ unimplemented feature
^ this test is marked as todo but passes. Remove `.todo` or check that test is correct.
0 pass
1 fail
1 expect() calls
$ echo $?
1 # this is the exit code of the previous command
```

View File

@@ -73,8 +73,7 @@ There are also image variants for different operating systems.
$ docker pull oven/bun:debian
$ docker pull oven/bun:slim
$ docker pull oven/bun:distroless
# alpine not recommended until #918 is fixed
# $ docker pull oven/bun:alpine
$ docker pull oven/bun:alpine
```
## Checking installation
@@ -190,14 +189,19 @@ For convenience, here are download links for the latest version:
- [`bun-linux-x64.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-linux-x64.zip)
- [`bun-linux-x64-baseline.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-linux-x64-baseline.zip)
- [`bun-linux-x64-musl.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-linux-x64-musl.zip)
- [`bun-linux-x64-musl-baseline.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-linux-x64-musl-baseline.zip)
- [`bun-windows-x64.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-windows-x64.zip)
- [`bun-windows-x64-baseline.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-windows-x64-baseline.zip)
- [`bun-darwin-aarch64.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-darwin-aarch64.zip)
- [`bun-linux-aarch64.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-linux-aarch64.zip)
- [`bun-linux-aarch64-musl.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-linux-aarch64-musl.zip)
- [`bun-darwin-x64.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-darwin-x64.zip)
- [`bun-darwin-x64-baseline.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-darwin-x64-baseline.zip)
The `baseline` binaries are built for older CPUs which may not support AVX2 instructions. If you run into an "Illegal Instruction" error when running Bun, try using the `baseline` binaries instead. Bun's install scripts automatically choose the correct binary for your system which helps avoid this issue. Baseline builds are slower than regular builds, so use them only if necessary.
The `musl` binaries are built for distributions that do not ship with the glibc libraries by default, instead relying on musl. The two most popular distros are Void Linux and Alpine Linux, with the latter is used heavily in Docker containers. If you encounter an error like the following: `bun: /lib/x86_64-linux-gnu/libm.so.6: version GLIBC_2.29' not found (required by bun)`, try using the musl binary. Bun's install script automatically chooses the correct binary for your system.
The `baseline` binaries are built for older CPUs which may not support AVX2 instructions. If you run into an "Illegal Instruction" error when running Bun, try using the `baseline` binaries instead. Bun's install scripts automatically chooses the correct binary for your system which helps avoid this issue. Baseline builds are slower than regular builds, so use them only if necessary.
<!--
## Native

View File

@@ -73,15 +73,10 @@ After Visual Studio, you need the following:
**Note** The Zig compiler is automatically downloaded, installed, and updated by the building process.
{% /callout %}
[WinGet](https://learn.microsoft.com/windows/package-manager/winget) or [Scoop](https://scoop.sh) can be used to install these remaining tools easily:
[Scoop](https://scoop.sh) can be used to install these remaining tools easily.
{% codetabs group="a" %}
```ps1#WinGet
## Select "Add LLVM to the system PATH for all users" in the LLVM installer
> winget install -i LLVM.LLVM -v 18.1.8 && winget install GoLang.Go Rustlang.Rustup NASM.NASM StrawberryPerl.StrawberryPerl RubyInstallerTeam.Ruby.3.2 OpenJS.NodeJS.LTS Ccache.Ccache
```
```ps1#Scoop
> irm https://get.scoop.sh | iex
> scoop install nodejs-lts go rust nasm ruby perl ccache
@@ -91,20 +86,16 @@ After Visual Studio, you need the following:
{% /codetabs %}
{% callout %}
Please do not use WinGet/other package manager for these, as you will likely install Strawberry Perl instead of a more minimal installation of Perl. Strawberry Perl includes many other utilities that get installed into `$Env:PATH` that will conflict with MSVC and break the build.
{% /callout %}
If you intend on building WebKit locally (optional), you should install these packages:
{% codetabs group="a" %}
```ps1#WinGet
> winget install ezwinports.make Cygwin.Cygwin Python.Python.3.12
```
```ps1#Scoop
> scoop install make cygwin python
```
{% /codetabs %}
From here on out, it is **expected you use a PowerShell Terminal with `.\scripts\vs-shell.ps1` sourced**. This script is available in the Bun repository and can be loaded by executing it:
```ps1

View File

@@ -1,4 +0,0 @@
# RFCs
| Number | Name | Issue |
| ------ | ---- | ----- |

View File

@@ -97,7 +97,7 @@ test.skip("wat", () => {
## `test.todo`
Mark a test as a todo with `test.todo`. These tests _will_ be run, and the test runner will expect them to fail. If they pass, you will be prompted to mark it as a regular test.
Mark a test as a todo with `test.todo`. These tests will not be run.
```ts
import { expect, test } from "bun:test";
@@ -107,12 +107,22 @@ test.todo("fix this", () => {
});
```
To exclusively run tests marked as _todo_, use `bun test --todo`.
To run todo tests and find any which are passing, use `bun test --todo`.
```sh
$ bun test --todo
my.test.ts:
✗ unimplemented feature
^ this test is marked as todo but passes. Remove `.todo` or check that test is correct.
0 pass
1 fail
1 expect() calls
```
With this flag, failing todo tests will not cause an error, but todo tests which pass will be marked as failing so you can remove the todo mark or
fix the test.
## `test.only`
To run a particular test or suite of tests use `test.only()` or `describe.only()`. Once declared, running `bun test --only` will only execute tests/suites that have been marked with `.only()`. Running `bun test` without the `--only` option with `test.only()` declared will result in all tests in the given suite being executed _up to_ the test with `.only()`. `describe.only()` functions the same in both execution scenarios.

View File

@@ -1,67 +0,0 @@
## Troubleshooting
### Bun not running on an M1 (or Apple Silicon)
If you see a message like this
> [1] 28447 killed bun create next ./test
It most likely means youre running Buns x64 version on Apple Silicon. This happens if Bun is running via Rosetta. Rosetta is unable to emulate AVX2 instructions, which Bun indirectly uses.
The fix is to ensure you installed a version of Bun built for Apple Silicon.
### error: Unexpected
If you see an error like this:
![image](https://user-images.githubusercontent.com/709451/141210854-89434678-d21b-42f4-b65a-7df3b785f7b9.png)
It usually means the max number of open file descriptors is being explicitly set to a low number. By default, Bun requests the max number of file descriptors available (which on macOS, is something like 32,000). But, if you previously ran into ulimit issues with, e.g., Chokidar, someone on The Internet may have advised you to run `ulimit -n 8192`.
That advice unfortunately **lowers** the hard limit to `8192`. This can be a problem in large repositories or projects with lots of dependencies. Chokidar (and other watchers) dont seem to call `setrlimit`, which means theyre reliant on the (much lower) soft limit.
To fix this issue:
1. Remove any scripts that call `ulimit -n` and restart your shell.
2. Try again, and if the error still occurs, try setting `ulimit -n` to an absurdly high number, such as `ulimit -n 2147483646`
3. Try again, and if that still doesnt fix it, open an issue
### Unzip is required
Unzip is required to install Bun on Linux. You can use one of the following commands to install `unzip`:
#### Debian / Ubuntu / Mint
```sh
$ sudo apt install unzip
```
#### RedHat / CentOS / Fedora
```sh
$ sudo dnf install unzip
```
#### Arch / Manjaro
```sh
$ sudo pacman -S unzip
```
#### OpenSUSE
```sh
$ sudo zypper install unzip
```
### bun install is stuck
Please run `bun install --verbose 2> logs.txt` and send them to me in Bun's discord. If you're on Linux, it would also be helpful if you run `sudo perf trace bun install --silent` and attach the logs.
### Uninstalling
Bun's binary and install cache is located in `~/.bun` by default. To uninstall bun, delete this directory and edit your shell config (`.bashrc`, `.zshrc`, or similar) to remove `~/.bun/bin` from the `$PATH` variable.
```sh
$ rm -rf ~/.bun # make sure to remove ~/.bun/bin from $PATH
```

View File

@@ -1,7 +1,7 @@
{
"private": true,
"name": "bun",
"version": "1.1.35",
"version": "1.1.39",
"workspaces": [
"./packages/bun-types"
],

View File

@@ -1,6 +1,7 @@
{
"name": "bun-debug-adapter-protocol",
"version": "0.0.1",
"type": "module",
"dependencies": {
"semver": "^7.5.4",
"source-map-js": "^1.0.2"

View File

@@ -1,19 +1,19 @@
import type { InspectorEventMap } from "../../../bun-inspector-protocol/src/inspector";
import type { JSC } from "../../../bun-inspector-protocol/src/protocol";
import type { DAP } from "../protocol";
// @ts-ignore
import { ChildProcess, spawn } from "node:child_process";
import { EventEmitter } from "node:events";
import { AddressInfo, createServer } from "node:net";
import { AddressInfo, createServer, Socket } from "node:net";
import * as path from "node:path";
import { remoteObjectToString, WebSocketInspector } from "../../../bun-inspector-protocol/index";
import { randomUnixPath, TCPSocketSignal, UnixSignal } from "./signal";
import { Location, SourceMap } from "./sourcemap";
import { remoteObjectToString, WebSocketInspector } from "../../../bun-inspector-protocol/index.ts";
import type { Inspector, InspectorEventMap } from "../../../bun-inspector-protocol/src/inspector/index.d.ts";
import { NodeSocketInspector } from "../../../bun-inspector-protocol/src/inspector/node-socket.ts";
import type { JSC } from "../../../bun-inspector-protocol/src/protocol/index.d.ts";
import type { DAP } from "../protocol/index.d.ts";
import { randomUnixPath, TCPSocketSignal, UnixSignal } from "./signal.ts";
import { Location, SourceMap } from "./sourcemap.ts";
export async function getAvailablePort(): Promise<number> {
const server = createServer();
server.listen(0);
return new Promise((resolve, reject) => {
return new Promise(resolve => {
server.on("listening", () => {
const { port } = server.address() as AddressInfo;
server.close(() => {
@@ -105,7 +105,18 @@ const capabilities: DAP.Capabilities = {
type InitializeRequest = DAP.InitializeRequest & {
supportsConfigurationDoneRequest?: boolean;
};
enableControlFlowProfiler?: boolean;
enableDebugger?: boolean;
} & (
| {
enableLifecycleAgentReporter?: false;
sendImmediatePreventExit?: false;
}
| {
enableLifecycleAgentReporter: true;
sendImmediatePreventExit?: boolean;
}
);
type LaunchRequest = DAP.LaunchRequest & {
runtime?: string;
@@ -231,10 +242,14 @@ function normalizeSourcePath(sourcePath: string, untitledDocPath?: string, bunEv
return path.normalize(sourcePath);
}
export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements IDebugAdapter {
export abstract class BaseDebugAdapter<T extends Inspector = Inspector>
extends EventEmitter<DebugAdapterEventMap>
implements IDebugAdapter
{
protected readonly inspector: T;
protected options?: DebuggerOptions;
#threadId: number;
#inspector: WebSocketInspector;
#process?: ChildProcess;
#sourceId: number;
#pendingSources: Map<string, ((source: Source) => void)[]>;
#sources: Map<string | number, Source>;
@@ -247,20 +262,21 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
#targets: Map<number, Target>;
#variableId: number;
#variables: Map<number, Variable>;
#initialized?: InitializeRequest;
#options?: DebuggerOptions;
#untitledDocPath?: string;
#bunEvalPath?: string;
#initialized?: InitializeRequest;
constructor(url?: string | URL, untitledDocPath?: string, bunEvalPath?: string) {
protected constructor(inspector: T, untitledDocPath?: string, bunEvalPath?: string) {
super();
this.#untitledDocPath = untitledDocPath;
this.#bunEvalPath = bunEvalPath;
this.#threadId = threadId++;
this.#inspector = new WebSocketInspector(url);
const emit = this.#inspector.emit.bind(this.#inspector);
this.#inspector.emit = (event, ...args) => {
this.inspector = inspector;
const emit = this.inspector.emit.bind(this.inspector);
this.inspector.emit = (event, ...args) => {
let sent = false;
sent ||= emit(event, ...args);
sent ||= this.emit(event, ...(args as any));
sent ||= this.emit(event as keyof JSC.EventMap, ...(args as any));
return sent;
};
this.#sourceId = 1;
@@ -274,26 +290,27 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
this.#targets = new Map();
this.#variableId = 1;
this.#variables = new Map();
this.#untitledDocPath = untitledDocPath;
this.#bunEvalPath = bunEvalPath;
}
/**
* Gets the inspector url.
* Gets the inspector url. This is deprecated and exists for compat.
* @deprecated You should get the inspector directly (with .getInspector()), and if it's a WebSocketInspector you can access `.url` direclty.
*/
get url(): string {
return this.#inspector.url;
// This code has been migrated from a time when the inspector was always a WebSocketInspector.
if (this.inspector instanceof WebSocketInspector) {
return this.inspector.url;
}
throw new Error("Inspector does not offer a URL");
}
/**
* Starts the inspector.
* @param url the inspector url
* @returns if the inspector was able to connect
*/
start(url?: string): Promise<boolean> {
return this.#attach({ url });
public getInspector() {
return this.inspector;
}
abstract start(...args: unknown[]): Promise<boolean>;
/**
* Sends a request to the JavaScript inspector.
* @param method the method name
@@ -306,7 +323,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
* console.log(result.value); // 2
*/
async send<M extends keyof JSC.ResponseMap>(method: M, params?: JSC.RequestMap[M]): Promise<JSC.ResponseMap[M]> {
return this.#inspector.send(method, params);
return this.inspector.send(method, params);
}
/**
@@ -347,7 +364,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
return sent;
}
#emit<E extends keyof DAP.EventMap>(event: E, body?: DAP.EventMap[E]): void {
protected emitAdapterEvent<E extends keyof DAP.EventMap>(event: E, body?: DAP.EventMap[E]): void {
this.emit("Adapter.event", {
type: "event",
seq: 0,
@@ -359,7 +376,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
#emitAfterResponse<E extends keyof DAP.EventMap>(event: E, body?: DAP.EventMap[E]): void {
this.once("Adapter.response", () => {
process.nextTick(() => {
this.#emit(event, body);
this.emitAdapterEvent(event, body);
});
});
}
@@ -437,19 +454,37 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
this.emit(`Adapter.${name}` as keyof DebugAdapterEventMap, body);
}
initialize(request: InitializeRequest): DAP.InitializeResponse {
public initialize(request: InitializeRequest): DAP.InitializeResponse {
this.#initialized = request;
this.send("Inspector.enable");
this.send("Runtime.enable");
this.send("Console.enable");
this.send("Debugger.enable").catch(error => {
const { message } = unknownToError(error);
if (message !== "Debugger domain already enabled") {
throw error;
if (request.enableControlFlowProfiler) {
this.send("Runtime.enableControlFlowProfiler");
}
if (request.enableLifecycleAgentReporter) {
this.send("LifecycleReporter.enable");
if (request.sendImmediatePreventExit) {
this.send("LifecycleReporter.preventExit");
}
});
this.send("Debugger.setAsyncStackTraceDepth", { depth: 200 });
}
// use !== false because by default if unspecified we want to enable the debugger
// and this option didn't exist beforehand, so we can't make it non-optional
if (request.enableDebugger !== false) {
this.send("Debugger.enable").catch(error => {
const { message } = unknownToError(error);
if (message !== "Debugger domain already enabled") {
throw error;
}
});
this.send("Debugger.setAsyncStackTraceDepth", { depth: 200 });
}
const { clientID, supportsConfigurationDoneRequest } = request;
if (!supportsConfigurationDoneRequest && clientID !== "vscode") {
@@ -463,248 +498,20 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
configurationDone(): void {
// If the client requested that `noDebug` mode be enabled,
// then we need to disable all breakpoints and pause on statements.
const active = !this.#options?.noDebug;
const active = !this.options?.noDebug;
this.send("Debugger.setBreakpointsActive", { active });
// Tell the debugger that its ready to start execution.
this.send("Inspector.initialized");
}
async launch(request: DAP.LaunchRequest): Promise<void> {
this.#options = { ...request, type: "launch" };
try {
await this.#launch(request);
} catch (error) {
// Some clients, like VSCode, will show a system-level popup when a `launch` request fails.
// Instead, we want to show the error as a sidebar notification.
const { message } = unknownToError(error);
this.#emit("output", {
category: "stderr",
output: `Failed to start debugger.\n${message}`,
});
this.terminate();
}
}
async #launch(request: LaunchRequest): Promise<void> {
const {
runtime = "bun",
runtimeArgs = [],
program,
args = [],
cwd,
env = {},
strictEnv = false,
watchMode = false,
stopOnEntry = false,
__skipValidation = false,
stdin,
} = request;
if (!__skipValidation && !program) {
throw new Error("No program specified");
}
const processArgs = [...runtimeArgs];
if (program === "-" && stdin) {
processArgs.push("--eval", stdin);
} else if (program) {
processArgs.push(program);
}
processArgs.push(...args);
if (program && isTestJavaScript(program) && !runtimeArgs.includes("test")) {
processArgs.unshift("test");
}
if (watchMode && !runtimeArgs.includes("--watch") && !runtimeArgs.includes("--hot")) {
processArgs.unshift(watchMode === "hot" ? "--hot" : "--watch");
}
const processEnv = strictEnv
? {
...env,
}
: {
...process.env,
...env,
};
if (process.platform !== "win32") {
// we're on unix
const url = `ws+unix://${randomUnixPath()}`;
const signal = new UnixSignal();
signal.on("Signal.received", () => {
this.#attach({ url });
});
this.once("Adapter.terminated", () => {
signal.close();
});
const query = stopOnEntry ? "break=1" : "wait=1";
processEnv["BUN_INSPECT"] = `${url}?${query}`;
processEnv["BUN_INSPECT_NOTIFY"] = signal.url;
// This is probably not correct, but it's the best we can do for now.
processEnv["FORCE_COLOR"] = "1";
processEnv["BUN_QUIET_DEBUG_LOGS"] = "1";
processEnv["BUN_DEBUG_QUIET_LOGS"] = "1";
const started = await this.#spawn({
command: runtime,
args: processArgs,
env: processEnv,
cwd,
isDebugee: true,
});
if (!started) {
throw new Error("Program could not be started.");
}
} else {
// we're on windows
// Create TCPSocketSignal
const url = `ws://127.0.0.1:${await getAvailablePort()}/${getRandomId()}`; // 127.0.0.1 so it resolves correctly on windows
const signal = new TCPSocketSignal(await getAvailablePort());
signal.on("Signal.received", async () => {
this.#attach({ url });
});
this.once("Adapter.terminated", () => {
signal.close();
});
const query = stopOnEntry ? "break=1" : "wait=1";
processEnv["BUN_INSPECT"] = `${url}?${query}`;
processEnv["BUN_INSPECT_NOTIFY"] = signal.url; // 127.0.0.1 so it resolves correctly on windows
// This is probably not correct, but it's the best we can do for now.
processEnv["FORCE_COLOR"] = "1";
processEnv["BUN_QUIET_DEBUG_LOGS"] = "1";
processEnv["BUN_DEBUG_QUIET_LOGS"] = "1";
const started = await this.#spawn({
command: runtime,
args: processArgs,
env: processEnv,
cwd,
isDebugee: true,
});
if (!started) {
throw new Error("Program could not be started.");
}
}
}
async #spawn(options: {
command: string;
args?: string[];
cwd?: string;
env?: Record<string, string | undefined>;
isDebugee?: boolean;
}): Promise<boolean> {
const { command, args = [], cwd, env, isDebugee } = options;
const request = { command, args, cwd, env };
this.emit("Process.requested", request);
let subprocess: ChildProcess;
try {
subprocess = spawn(command, args, {
...request,
stdio: ["ignore", "pipe", "pipe"],
});
} catch (cause) {
this.emit("Process.exited", new Error("Failed to spawn process", { cause }), null);
return false;
}
subprocess.on("spawn", () => {
this.emit("Process.spawned", subprocess);
if (isDebugee) {
this.#process = subprocess;
this.#emit("process", {
name: `${command} ${args.join(" ")}`,
systemProcessId: subprocess.pid,
isLocalProcess: true,
startMethod: "launch",
});
}
});
subprocess.on("exit", (code, signal) => {
this.emit("Process.exited", code, signal);
if (isDebugee) {
this.#process = undefined;
this.#emit("exited", {
exitCode: code ?? -1,
});
this.#emit("terminated");
}
});
subprocess.stdout?.on("data", data => {
this.emit("Process.stdout", data.toString());
});
subprocess.stderr?.on("data", data => {
this.emit("Process.stderr", data.toString());
});
return new Promise(resolve => {
subprocess.on("spawn", () => resolve(true));
subprocess.on("exit", () => resolve(false));
subprocess.on("error", () => resolve(false));
});
}
async attach(request: AttachRequest): Promise<void> {
this.#options = { ...request, type: "attach" };
try {
await this.#attach(request);
} catch (error) {
// Some clients, like VSCode, will show a system-level popup when a `launch` request fails.
// Instead, we want to show the error as a sidebar notification.
const { message } = unknownToError(error);
this.#emit("output", {
category: "stderr",
output: `Failed to start debugger.\n${message}`,
});
this.terminate();
}
}
async #attach(request: AttachRequest): Promise<boolean> {
const { url } = request;
for (let i = 0; i < 3; i++) {
const ok = await this.#inspector.start(url);
if (ok) {
return true;
}
await new Promise(resolve => setTimeout(resolve, 100 * i));
}
return false;
}
// Required so all implementations have a method that .terminate() always calls.
// This is useful because we don't want any implementors to forget
protected abstract exitJSProcess(): void;
terminate(): void {
if (!this.#process?.kill()) {
this.#evaluate({
expression: "process.exit(0)",
});
}
this.#emit("terminated");
this.exitJSProcess();
this.emitAdapterEvent("terminated");
}
disconnect(request: DAP.DisconnectRequest): void {
@@ -1077,7 +884,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
}
for (const breakpoint of breakpoints) {
this.#emit("breakpoint", {
this.emitAdapterEvent("breakpoint", {
reason: "removed",
breakpoint,
});
@@ -1316,7 +1123,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
const callFrameId = this.#getCallFrameId(frameId);
const objectGroup = callFrameId ? "debugger" : context;
const { result, wasThrown } = await this.#evaluate({
const { result, wasThrown } = await this.evaluateInternal({
expression,
objectGroup,
callFrameId,
@@ -1337,7 +1144,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
};
}
async #evaluate(options: {
protected async evaluateInternal(options: {
expression: string;
objectGroup?: string;
callFrameId?: string;
@@ -1361,7 +1168,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
const callFrameId = this.#getCallFrameId(frameId);
const { expression, hint } = completionToExpression(text);
const { result, wasThrown } = await this.#evaluate({
const { result, wasThrown } = await this.evaluateInternal({
expression: expression || "this",
callFrameId,
objectGroup: "repl",
@@ -1393,33 +1200,29 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
}
["Inspector.connected"](): void {
this.#emit("output", {
this.emitAdapterEvent("output", {
category: "debug console",
output: "Debugger attached.\n",
});
this.#emit("initialized");
this.emitAdapterEvent("initialized");
}
async ["Inspector.disconnected"](error?: Error): Promise<void> {
this.#emit("output", {
this.emitAdapterEvent("output", {
category: "debug console",
output: "Debugger detached.\n",
});
if (error) {
const { message } = error;
this.#emit("output", {
this.emitAdapterEvent("output", {
category: "stderr",
output: `${message}\n`,
});
}
this.#reset();
if (this.#process?.exitCode !== null) {
this.#emit("terminated");
}
this.resetInternal();
}
async ["Debugger.scriptParsed"](event: JSC.Debugger.ScriptParsedEvent): Promise<void> {
@@ -1470,7 +1273,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
return;
}
this.#emit("output", {
this.emitAdapterEvent("output", {
category: "stderr",
output: errorMessage,
line: this.#lineFrom0BasedLine(errorLine),
@@ -1498,7 +1301,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
const breakpoint = breakpoints[i];
const oldBreakpoint = oldBreakpoints[i];
this.#emit("breakpoint", {
this.emitAdapterEvent("breakpoint", {
reason: "changed",
breakpoint: {
...breakpoint,
@@ -1581,7 +1384,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
}
}
this.#emit("stopped", {
this.emitAdapterEvent("stopped", {
threadId: this.#threadId,
reason: this.#stopped,
hitBreakpointIds,
@@ -1598,20 +1401,20 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
}
}
this.#emit("continued", {
this.emitAdapterEvent("continued", {
threadId: this.#threadId,
});
}
["Process.stdout"](output: string): void {
this.#emit("output", {
this.emitAdapterEvent("output", {
category: "debug console",
output,
});
}
["Process.stderr"](output: string): void {
this.#emit("output", {
this.emitAdapterEvent("output", {
category: "debug console",
output,
});
@@ -1695,8 +1498,8 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
// If the path changed or the source has a source reference,
// the old source should be marked as removed.
if (path !== oldPath || sourceReference) {
this.#emit("loadedSource", {
if (path !== oldPath /*|| sourceReference*/) {
this.emitAdapterEvent("loadedSource", {
reason: "removed",
source: oldSource,
});
@@ -1706,7 +1509,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
this.#sources.set(sourceId, source);
this.#sources.set(scriptId, source);
this.#emit("loadedSource", {
this.emitAdapterEvent("loadedSource", {
// If the reason is "changed", the source will be retrieved using
// the `source` command, which is why it cannot be set when `path` is present.
reason: oldSource && !path ? "changed" : "new",
@@ -1762,9 +1565,9 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
}
// If the source is not present, it may not have been loaded yet.
let resolves = this.#pendingSources.get(sourceId);
let resolves = this.#pendingSources.get(sourceId.toString());
if (!resolves) {
this.#pendingSources.set(sourceId, (resolves = []));
this.#pendingSources.set(sourceId.toString(), (resolves = []));
}
return new Promise(resolve => {
@@ -2016,7 +1819,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
const callFrameId = this.#getCallFrameId(frameId);
const objectGroup = callFrameId ? "debugger" : "repl";
const { result, wasThrown } = await this.#evaluate({
const { result, wasThrown } = await this.evaluateInternal({
expression: `${expression} = (${value});`,
objectGroup: "repl",
callFrameId,
@@ -2216,12 +2019,11 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
}
close(): void {
this.#process?.kill();
this.#inspector.close();
this.#reset();
this.inspector.close();
this.resetInternal();
}
#reset(): void {
protected resetInternal(): void {
this.#pendingSources.clear();
this.#sources.clear();
this.#stackFrames.length = 0;
@@ -2232,10 +2034,309 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
this.#functionBreakpoints.clear();
this.#targets.clear();
this.#variables.clear();
this.#options = undefined;
this.options = undefined;
}
}
/**
* Create a debug adapter that connects over a unix/tcp socket. Usually
* in the case of a reverse connection. This is used by the vscode extension.
*
* @warning This will gracefully handle socket closure, you don't need to add extra handling.
*/
export class NodeSocketDebugAdapter extends BaseDebugAdapter<NodeSocketInspector> {
public constructor(socket: Socket, untitledDocPath?: string, bunEvalPath?: string) {
super(new NodeSocketInspector(socket), untitledDocPath, bunEvalPath);
socket.once("close", () => {
this.resetInternal();
});
}
protected exitJSProcess(): void {
this.evaluateInternal({
expression: "process.exit(0)",
});
}
public async start() {
const ok = await this.inspector.start();
return ok;
}
}
/**
* The default debug adapter. Connects via WebSocket
*/
export class WebSocketDebugAdapter extends BaseDebugAdapter<WebSocketInspector> {
#process?: ChildProcess;
public constructor(url?: string | URL, untitledDocPath?: string, bunEvalPath?: string) {
super(new WebSocketInspector(url), untitledDocPath, bunEvalPath);
}
async ["Inspector.disconnected"](error?: Error): Promise<void> {
await super["Inspector.disconnected"](error);
if (this.#process?.exitCode !== null) {
this.emitAdapterEvent("terminated");
}
}
protected exitJSProcess() {
if (!this.#process?.kill()) {
this.evaluateInternal({
expression: "process.exit(0)",
});
}
}
/**
* Starts the inspector.
* @param url the inspector url, will default to the one provided in the constructor (if any). If none
* @returns if the inspector was able to connect
*/
start(url?: string): Promise<boolean> {
return this.#attach({ url });
}
close() {
this.#process?.kill();
super.close();
}
async launch(request: DAP.LaunchRequest): Promise<void> {
this.options = { ...request, type: "launch" };
try {
await this.#launch(request);
} catch (error) {
// Some clients, like VSCode, will show a system-level popup when a `launch` request fails.
// Instead, we want to show the error as a sidebar notification.
const { message } = unknownToError(error);
this.emitAdapterEvent("output", {
category: "stderr",
output: `Failed to start debugger.\n${message}`,
});
this.terminate();
}
}
async #launch(request: LaunchRequest): Promise<void> {
const {
runtime = "bun",
runtimeArgs = [],
program,
args = [],
cwd,
env = {},
strictEnv = false,
watchMode = false,
stopOnEntry = false,
__skipValidation = false,
stdin,
} = request;
if (!__skipValidation && !program) {
throw new Error("No program specified");
}
const processArgs = [...runtimeArgs];
if (program === "-" && stdin) {
processArgs.push("--eval", stdin);
} else if (program) {
processArgs.push(program);
}
processArgs.push(...args);
if (program && isTestJavaScript(program) && !runtimeArgs.includes("test")) {
processArgs.unshift("test");
}
if (watchMode && !runtimeArgs.includes("--watch") && !runtimeArgs.includes("--hot")) {
processArgs.unshift(watchMode === "hot" ? "--hot" : "--watch");
}
const processEnv = strictEnv
? {
...env,
}
: {
...process.env,
...env,
};
if (process.platform !== "win32") {
// we're on unix
const url = `ws+unix://${randomUnixPath()}`;
const signal = new UnixSignal();
signal.on("Signal.received", () => {
this.#attach({ url });
});
this.once("Adapter.terminated", () => {
signal.close();
});
const query = stopOnEntry ? "break=1" : "wait=1";
processEnv["BUN_INSPECT"] = `${url}?${query}`;
processEnv["BUN_INSPECT_NOTIFY"] = signal.url;
// This is probably not correct, but it's the best we can do for now.
processEnv["FORCE_COLOR"] = "1";
processEnv["BUN_QUIET_DEBUG_LOGS"] = "1";
processEnv["BUN_DEBUG_QUIET_LOGS"] = "1";
const started = await this.#spawn({
command: runtime,
args: processArgs,
env: processEnv,
cwd,
isDebugee: true,
});
if (!started) {
throw new Error("Program could not be started.");
}
} else {
// we're on windows
// Create TCPSocketSignal
const url = `ws://127.0.0.1:${await getAvailablePort()}/${getRandomId()}`; // 127.0.0.1 so it resolves correctly on windows
const signal = new TCPSocketSignal(await getAvailablePort());
signal.on("Signal.received", async () => {
this.#attach({ url });
});
this.once("Adapter.terminated", () => {
signal.close();
});
const query = stopOnEntry ? "break=1" : "wait=1";
processEnv["BUN_INSPECT"] = `${url}?${query}`;
processEnv["BUN_INSPECT_NOTIFY"] = signal.url; // 127.0.0.1 so it resolves correctly on windows
// This is probably not correct, but it's the best we can do for now.
processEnv["FORCE_COLOR"] = "1";
processEnv["BUN_QUIET_DEBUG_LOGS"] = "1";
processEnv["BUN_DEBUG_QUIET_LOGS"] = "1";
const started = await this.#spawn({
command: runtime,
args: processArgs,
env: processEnv,
cwd,
isDebugee: true,
});
if (!started) {
throw new Error("Program could not be started.");
}
}
}
async #spawn(options: {
command: string;
args?: string[];
cwd?: string;
env?: Record<string, string | undefined>;
isDebugee?: boolean;
}): Promise<boolean> {
const { command, args = [], cwd, env, isDebugee } = options;
const request = { command, args, cwd, env };
this.emit("Process.requested", request);
let subprocess: ChildProcess;
try {
subprocess = spawn(command, args, {
...request,
stdio: ["ignore", "pipe", "pipe"],
});
} catch (cause) {
this.emit("Process.exited", new Error("Failed to spawn process", { cause }), null);
return false;
}
subprocess.on("spawn", () => {
this.emit("Process.spawned", subprocess);
if (isDebugee) {
this.#process = subprocess;
this.emitAdapterEvent("process", {
name: `${command} ${args.join(" ")}`,
systemProcessId: subprocess.pid,
isLocalProcess: true,
startMethod: "launch",
});
}
});
subprocess.on("exit", (code, signal) => {
this.emit("Process.exited", code, signal);
if (isDebugee) {
this.#process = undefined;
this.emitAdapterEvent("exited", {
exitCode: code ?? -1,
});
this.emitAdapterEvent("terminated");
}
});
subprocess.stdout?.on("data", data => {
this.emit("Process.stdout", data.toString());
});
subprocess.stderr?.on("data", data => {
this.emit("Process.stderr", data.toString());
});
return new Promise(resolve => {
subprocess.on("spawn", () => resolve(true));
subprocess.on("exit", () => resolve(false));
subprocess.on("error", () => resolve(false));
});
}
async attach(request: AttachRequest): Promise<void> {
this.options = { ...request, type: "attach" };
try {
await this.#attach(request);
} catch (error) {
// Some clients, like VSCode, will show a system-level popup when a `launch` request fails.
// Instead, we want to show the error as a sidebar notification.
const { message } = unknownToError(error);
this.emitAdapterEvent("output", {
category: "stderr",
output: `Failed to start debugger.\n${message}`,
});
this.terminate();
}
}
async #attach(request: AttachRequest): Promise<boolean> {
const { url } = request;
for (let i = 0; i < 3; i++) {
const ok = await this.inspector.start(url);
if (ok) {
return true;
}
await new Promise(resolve => setTimeout(resolve, 100 * i));
}
return false;
}
}
export const DebugAdapter = WebSocketDebugAdapter;
function stoppedReason(reason: JSC.Debugger.PausedEvent["reason"]): DAP.StoppedEvent["reason"] {
switch (reason) {
case "Breakpoint":

View File

@@ -0,0 +1,117 @@
import type { Socket } from "node:net";
const enum FramerState {
WaitingForLength,
WaitingForMessage,
}
let socketFramerMessageLengthBuffer: Buffer;
export class SocketFramer {
state: FramerState = FramerState.WaitingForLength;
pendingLength: number = 0;
sizeBuffer: Buffer = Buffer.alloc(4);
sizeBufferIndex: number = 0;
bufferedData: Buffer = Buffer.alloc(0);
socket: Socket;
private onMessage: (message: string | string[]) => void;
constructor(socket: Socket, onMessage: (message: string | string[]) => void) {
this.socket = socket;
this.onMessage = onMessage;
if (!socketFramerMessageLengthBuffer) {
socketFramerMessageLengthBuffer = Buffer.alloc(4);
}
this.reset();
}
reset(): void {
this.state = FramerState.WaitingForLength;
this.bufferedData = Buffer.alloc(0);
this.sizeBufferIndex = 0;
this.sizeBuffer = Buffer.alloc(4);
}
send(data: string): void {
socketFramerMessageLengthBuffer.writeUInt32BE(data.length, 0);
this.socket.write(socketFramerMessageLengthBuffer);
this.socket.write(data);
}
onData(data: Buffer): void {
this.bufferedData = this.bufferedData.length > 0 ? Buffer.concat([this.bufferedData, data]) : data;
let messagesToDeliver: string[] = [];
let position = 0;
while (position < this.bufferedData.length) {
// Need 4 bytes for the length
if (this.bufferedData.length - position < 4) {
break;
}
// Read the length prefix
const messageLength = this.bufferedData.readUInt32BE(position);
// Validate message length
if (messageLength <= 0 || messageLength > 1024 * 1024) {
// 1MB max
// Try to resync by looking for the next valid message
let newPosition = position + 1;
let found = false;
while (newPosition < this.bufferedData.length - 4) {
const testLength = this.bufferedData.readUInt32BE(newPosition);
if (testLength > 0 && testLength <= 1024 * 1024) {
// Verify we can read the full message
if (this.bufferedData.length - newPosition - 4 >= testLength) {
const testMessage = this.bufferedData.toString("utf-8", newPosition + 4, newPosition + 4 + testLength);
if (testMessage.startsWith('{"')) {
position = newPosition;
found = true;
break;
}
}
}
newPosition++;
}
if (!found) {
// Couldn't find a valid message, discard buffer up to this point
this.bufferedData = this.bufferedData.slice(position + 4);
return;
}
continue;
}
// Check if we have the complete message
if (this.bufferedData.length - position - 4 < messageLength) {
break;
}
const message = this.bufferedData.toString("utf-8", position + 4, position + 4 + messageLength);
if (message.startsWith('{"')) {
messagesToDeliver.push(message);
}
position += 4 + messageLength;
}
if (position > 0) {
this.bufferedData =
position < this.bufferedData.length ? this.bufferedData.slice(position) : SocketFramer.emptyBuffer;
}
if (messagesToDeliver.length === 1) {
this.onMessage(messagesToDeliver[0]);
} else if (messagesToDeliver.length > 1) {
this.onMessage(messagesToDeliver);
}
}
private static emptyBuffer = Buffer.from([]);
}

View File

@@ -11,6 +11,8 @@ export type UnixSignalEventMap = {
"Signal.error": [Error];
"Signal.received": [string];
"Signal.closed": [];
"Signal.Socket.closed": [socket: Socket];
"Signal.Socket.connect": [socket: Socket];
};
/**
@@ -21,7 +23,7 @@ export class UnixSignal extends EventEmitter<UnixSignalEventMap> {
#server: Server;
#ready: Promise<void>;
constructor(path?: string | URL) {
constructor(path?: string | URL | undefined) {
super();
this.#path = path ? parseUnixPath(path) : randomUnixPath();
this.#server = createServer();
@@ -29,9 +31,13 @@ export class UnixSignal extends EventEmitter<UnixSignalEventMap> {
this.#server.on("error", error => this.emit("Signal.error", error));
this.#server.on("close", () => this.emit("Signal.closed"));
this.#server.on("connection", socket => {
this.emit("Signal.Socket.connect", socket);
socket.on("data", data => {
this.emit("Signal.received", data.toString());
});
socket.on("close", () => {
this.emit("Signal.Socket.closed", socket);
});
});
this.#ready = new Promise((resolve, reject) => {
this.#server.on("listening", resolve);
@@ -45,7 +51,7 @@ export class UnixSignal extends EventEmitter<UnixSignalEventMap> {
console.log(event, ...args);
}
return super.emit(event, ...args);
return super.emit(event, ...(args as never));
}
/**
@@ -91,6 +97,8 @@ export type TCPSocketSignalEventMap = {
"Signal.error": [Error];
"Signal.closed": [];
"Signal.received": [string];
"Signal.Socket.closed": [socket: Socket];
"Signal.Socket.connect": [socket: Socket];
};
export class TCPSocketSignal extends EventEmitter {
@@ -103,6 +111,8 @@ export class TCPSocketSignal extends EventEmitter {
this.#port = port;
this.#server = createServer((socket: Socket) => {
this.emit("Signal.Socket.connect", socket);
socket.on("data", data => {
this.emit("Signal.received", data.toString());
});
@@ -112,10 +122,14 @@ export class TCPSocketSignal extends EventEmitter {
});
socket.on("close", () => {
this.emit("Signal.closed");
this.emit("Signal.Socket.closed", socket);
});
});
this.#server.on("close", () => {
this.emit("Signal.closed");
});
this.#ready = new Promise((resolve, reject) => {
this.#server.listen(this.#port, () => {
this.emit("Signal.listening");

View File

@@ -1,6 +1,6 @@
import { expect, test } from "bun:test";
import { readFileSync } from "node:fs";
import { SourceMap } from "./sourcemap";
import { SourceMap } from "./sourcemap.js";
test("works without source map", () => {
const sourceMap = getSourceMap("without-sourcemap.js");

View File

@@ -21,7 +21,15 @@ export type Location = {
);
export interface SourceMap {
/**
* Converts a location in the original source to a location in the generated source.
* @param request A request
*/
generatedLocation(request: LocationRequest): Location;
/**
* Converts a location in the generated source to a location in the original source.
* @param request A request
*/
originalLocation(request: LocationRequest): Location;
}

View File

@@ -1,13 +1,13 @@
{
"compilerOptions": {
"lib": ["ESNext"],
"module": "esnext",
"module": "NodeNext",
"target": "esnext",
"moduleResolution": "nodenext",
"moduleDetection": "force",
"allowImportingTsExtensions": true,
"noEmit": true,
"composite": true,
// "composite": true,
"strict": true,
"downlevelIteration": true,
"skipLibCheck": true,
@@ -15,7 +15,7 @@
"forceConsistentCasingInFileNames": true,
"inlineSourceMap": true,
"allowJs": true,
"outDir": "dist",
"outDir": "dist"
},
"include": ["src", "scripts", "../bun-types/index.d.ts", "../bun-inspector-protocol/src"]
"include": ["src", "scripts", "../bun-types/index.d.ts", "../bun-inspector-protocol/**/*.ts"]
}

View File

@@ -1,4 +1,4 @@
export type * from "./src/inspector";
export * from "./src/inspector/websocket";
export type * from "./src/protocol";
export * from "./src/util/preview";
export type * from "./src/inspector/index.js";
export * from "./src/inspector/websocket.js";
export type * from "./src/protocol/index.js";
export * from "./src/util/preview.js";

View File

@@ -1,26 +1,7 @@
import { spawnSync } from "node:child_process";
import { readFileSync, writeFileSync } from "node:fs";
import { readFileSync, writeFileSync, realpathSync } from "node:fs";
import type { Domain, Property, Protocol } from "../src/protocol/schema";
run().catch(console.error);
async function run() {
const cwd = new URL("../src/protocol/", import.meta.url);
const runner = "Bun" in globalThis ? "bunx" : "npx";
const write = (name: string, data: string) => {
const path = new URL(name, cwd);
writeFileSync(path, data);
spawnSync(runner, ["prettier", "--write", path.pathname], { cwd, stdio: "ignore" });
};
const base = readFileSync(new URL("protocol.d.ts", cwd), "utf-8");
const baseNoComments = base.replace(/\/\/.*/g, "");
const jsc = await downloadJsc();
write("jsc/protocol.json", JSON.stringify(jsc));
write("jsc/index.d.ts", "// GENERATED - DO NOT EDIT\n" + formatProtocol(jsc, baseNoComments));
const v8 = await downloadV8();
write("v8/protocol.json", JSON.stringify(v8));
write("v8/index.d.ts", "// GENERATED - DO NOT EDIT\n" + formatProtocol(v8, baseNoComments));
}
import path from "node:path";
function formatProtocol(protocol: Protocol, extraTs?: string): string {
const { name, domains } = protocol;
@@ -29,6 +10,7 @@ function formatProtocol(protocol: Protocol, extraTs?: string): string {
let body = `export namespace ${name} {`;
for (const { domain, types = [], events = [], commands = [] } of domains) {
body += `export namespace ${domain} {`;
for (const type of types) {
body += formatProperty(type);
}
@@ -153,32 +135,12 @@ async function downloadV8(): Promise<Protocol> {
}));
}
/**
* @link https://github.com/WebKit/WebKit/tree/main/Source/JavaScriptCore/inspector/protocol
*/
async function downloadJsc(): Promise<Protocol> {
const baseUrl = "https://raw.githubusercontent.com/WebKit/WebKit/main/Source/JavaScriptCore/inspector/protocol";
const domains = [
"Runtime",
"Console",
"Debugger",
"Heap",
"ScriptProfiler",
"CPUProfiler",
"GenericTypes",
"Network",
"Inspector",
];
return {
name: "JSC",
version: {
major: 1,
minor: 3,
},
domains: await Promise.all(domains.map(domain => download<Domain>(`${baseUrl}/${domain}.json`))).then(domains =>
domains.sort((a, b) => a.domain.localeCompare(b.domain)),
),
};
async function getJSC(): Promise<Protocol> {
let bunExecutable = Bun.which("bun-debug") || process.execPath;
if (!bunExecutable) {
throw new Error("bun-debug not found");
}
bunExecutable = realpathSync(bunExecutable);
}
async function download<V>(url: string): Promise<V> {
@@ -200,3 +162,39 @@ function toComment(description?: string): string {
const lines = ["/**", ...description.split("\n").map(line => ` * ${line.trim()}`), "*/"];
return lines.join("\n");
}
const cwd = new URL("../src/protocol/", import.meta.url);
const runner = "Bun" in globalThis ? "bunx" : "npx";
const write = (name: string, data: string) => {
const filePath = path.resolve(__dirname, "..", "src", "protocol", name);
writeFileSync(filePath, data);
spawnSync(runner, ["prettier", "--write", filePath], { cwd, stdio: "ignore" });
};
const base = readFileSync(new URL("protocol.d.ts", cwd), "utf-8");
const baseNoComments = base.replace(/\/\/.*/g, "");
const jscJsonFile = path.resolve(__dirname, process.argv.at(-1) ?? "");
let jscJSONFile;
try {
jscJSONFile = await Bun.file(jscJsonFile).json();
} catch (error) {
console.warn("Failed to read CombinedDomains.json from WebKit build. Is this a WebKit build from Bun?");
console.error(error);
process.exit(1);
}
const jsc = {
name: "JSC",
version: {
major: 1,
minor: 4,
},
domains: jscJSONFile.domains
.filter(a => a.debuggableTypes?.includes?.("javascript"))
.sort((a, b) => a.domain.localeCompare(b.domain)),
};
write("jsc/protocol.json", JSON.stringify(jsc, null, 2));
write("jsc/index.d.ts", "// GENERATED - DO NOT EDIT\n" + formatProtocol(jsc, baseNoComments));
const v8 = await downloadV8();
write("v8/protocol.json", JSON.stringify(v8));
write("v8/index.d.ts", "// GENERATED - DO NOT EDIT\n" + formatProtocol(v8, baseNoComments));

View File

@@ -0,0 +1,235 @@
import { EventEmitter } from "node:events";
import { Socket } from "node:net";
import { SocketFramer } from "../../../bun-debug-adapter-protocol/src/debugger/node-socket-framer.js";
import type { JSC } from "../protocol";
import type { Inspector, InspectorEventMap } from "./index";
/**
* An inspector that communicates with a debugger over a (unix) socket.
* This is used in the extension as follows:
*
* 1. Extension sets environment variable `BUN_INSPECT_NOTIFY` inside of all vscode terminals.
* This is a path to a unix socket that the extension will listen on.
* 2. Bun reads it and connects to the socket, setting up a reverse connection for sending DAP
* messages.
*/
export class NodeSocketInspector extends EventEmitter<InspectorEventMap> implements Inspector {
#ready: Promise<boolean> | undefined;
#socket: Socket;
#requestId: number;
#pendingRequests: JSC.Request[];
#pendingResponses: Map<
number,
{
request: JSC.Request;
done: (result: unknown) => void;
}
>;
#framer: SocketFramer;
constructor(socket: Socket) {
super();
this.#socket = socket;
this.#requestId = 1;
this.#pendingRequests = [];
this.#pendingResponses = new Map();
this.#framer = new SocketFramer(socket, message => {
if (Array.isArray(message)) {
for (const m of message) {
this.#accept(m);
}
} else {
this.#accept(message);
}
});
}
private onConnectOrImmediately(cb: () => void) {
const isAlreadyConnected = this.#socket.connecting === false;
if (isAlreadyConnected) {
cb();
} else {
this.#socket.once("connect", cb);
}
}
async start(): Promise<boolean> {
if (this.#ready) {
return this.#ready;
}
if (this.closed) {
this.close();
const addressWithPort = this.#socket.remoteAddress + ":" + this.#socket.remotePort;
this.emit("Inspector.connecting", addressWithPort);
}
const socket = this.#socket;
this.onConnectOrImmediately(() => {
this.emit("Inspector.connected");
for (let i = 0; i < this.#pendingRequests.length; i++) {
const request = this.#pendingRequests[i];
if (this.#send(request)) {
this.emit("Inspector.request", request);
} else {
this.#pendingRequests = this.#pendingRequests.slice(i);
break;
}
}
});
socket.on("data", data => this.#framer.onData(data));
socket.on("error", error => {
this.#close(unknownToError(error));
});
socket.on("close", hadError => {
if (hadError) {
this.#close(new Error("Socket closed due to a transmission error"));
} else {
this.#close();
}
});
const ready = new Promise<boolean>(resolve => {
if (socket.connecting) {
socket.on("connect", () => resolve(true));
} else {
resolve(true);
}
socket.on("close", () => resolve(false));
socket.on("error", () => resolve(false));
}).finally(() => {
this.#ready = undefined;
});
this.#ready = ready;
return ready;
}
send<M extends keyof JSC.RequestMap & keyof JSC.ResponseMap>(
method: M,
params?: JSC.RequestMap[M] | undefined,
): Promise<JSC.ResponseMap[M]> {
const id = this.#requestId++;
const request = {
id,
method,
params: params ?? {},
};
return new Promise((resolve, reject) => {
let timerId: number | undefined;
const done = (result: any) => {
this.#pendingResponses.delete(id);
if (timerId) {
clearTimeout(timerId);
}
if (result instanceof Error) {
reject(result);
} else {
resolve(result);
}
};
this.#pendingResponses.set(id, {
request: request,
done: done,
});
if (this.#send(request)) {
timerId = +setTimeout(() => done(new Error(`Timed out: ${method}`)), 10_000);
this.emit("Inspector.request", request);
} else {
this.emit("Inspector.pendingRequest", request);
}
});
}
#send(request: JSC.Request): boolean {
this.#framer.send(JSON.stringify(request));
if (!this.#pendingRequests.includes(request)) {
this.#pendingRequests.push(request);
}
return false;
}
#accept(message: string): void {
let data: JSC.Event | JSC.Response;
try {
data = JSON.parse(message);
} catch (cause) {
this.emit("Inspector.error", new Error(`Failed to parse message: ${message}`, { cause }));
return;
}
if (!("id" in data)) {
this.emit("Inspector.event", data);
const { method, params } = data;
this.emit(method, params);
return;
}
this.emit("Inspector.response", data);
const { id } = data;
const handle = this.#pendingResponses.get(id);
if (!handle) {
this.emit("Inspector.error", new Error(`Failed to find matching request for ID: ${id}`));
return;
}
if ("error" in data) {
const { error } = data;
const { message } = error;
handle.done(new Error(message));
} else {
const { result } = data;
handle.done(result);
}
}
get closed(): boolean {
return !this.#socket.writable;
}
close(): void {
this.#socket?.end();
}
#close(error?: Error): void {
for (const handle of this.#pendingResponses.values()) {
handle.done(error ?? new Error("Socket closed while waiting for: " + handle.request.method));
}
this.#pendingResponses.clear();
if (error) {
this.emit("Inspector.error", error);
}
this.emit("Inspector.disconnected", error);
}
}
function unknownToError(input: unknown): Error {
if (input instanceof Error) {
return input;
}
if (typeof input === "object" && input !== null && "message" in input) {
const { message } = input;
return new Error(`${message}`);
}
return new Error(`${input}`);
}

View File

@@ -1,7 +1,7 @@
import { EventEmitter } from "node:events";
import { WebSocket } from "ws";
import type { Inspector, InspectorEventMap } from ".";
import type { JSC } from "../protocol";
import type { Inspector, InspectorEventMap } from "./index";
/**
* An inspector that communicates with a debugger over a WebSocket.
@@ -170,6 +170,7 @@ export class WebSocketInspector extends EventEmitter<InspectorEventMap> implemen
#accept(message: string): void {
let data: JSC.Event | JSC.Response;
try {
data = JSON.parse(message);
} catch (cause) {

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -3,7 +3,7 @@
"lib": ["ESNext"],
"module": "ESNext",
"target": "ESNext",
"moduleResolution": "NodeNext",
"moduleResolution": "Bundler",
"moduleDetection": "force",
"strict": true,
"downlevelIteration": true,
@@ -12,7 +12,7 @@
"forceConsistentCasingInFileNames": true,
"inlineSourceMap": true,
"allowJs": true,
"outDir": "dist",
"outDir": "dist"
},
"include": [".", "../bun-types/index.d.ts"]
}

View File

@@ -1,2 +1,4 @@
node_modules/
dist/
dist/
docs/
*.tgz

View File

@@ -3873,7 +3873,6 @@ declare module "bun" {
* The default loader for this file extension
*/
loader: Loader;
/**
* Defer the execution of this callback until all other modules have been parsed.
*
@@ -3899,6 +3898,10 @@ declare module "bun" {
* The namespace of the importer.
*/
namespace: string;
/**
* The directory to perform file-based resolutions in.
*/
resolveDir: string;
/**
* The kind of import this resolve is for.
*/
@@ -4534,6 +4537,11 @@ declare module "bun" {
unix: string;
}
interface FdSocketOptions<Data = undefined> extends SocketOptions<Data> {
tls?: TLSOptions;
fd: number;
}
/**
* Create a TCP client that connects to a server
*

View File

@@ -3,14 +3,16 @@
"license": "MIT",
"main": "",
"types": "index.d.ts",
"description": "Type definitions for Bun, an incredibly fast JavaScript runtime",
"description": "Type definitions and documentation for Bun, an incredibly fast JavaScript runtime",
"repository": {
"type": "git",
"url": "https://github.com/oven-sh/bun",
"directory": "packages/bun-types"
},
"files": [
"*.d.ts"
"*.d.ts",
"docs/**/*.md",
"docs/*.md"
],
"homepage": "https://bun.sh",
"dependencies": {
@@ -25,7 +27,8 @@
},
"scripts": {
"prebuild": "echo $(pwd)",
"build": "bun scripts/build.ts && bun run fmt",
"copy-docs": "rm -rf docs && cp -r ../../docs/ ./docs",
"build": "bun run copy-docs && bun scripts/build.ts && bun run fmt",
"test": "tsc",
"fmt": "echo $(which biome) && biome format --write ."
},

View File

@@ -387,9 +387,9 @@ declare module "bun:test" {
/**
* Marks this test as to be written or to be fixed.
*
* When a test function is passed, it will be marked as `todo` in the test results
* as long the test does not pass. When the test passes, the test will be marked as
* `fail` in the results; you will have to remove the `.todo` or check that your test
* These tests will not be executed unless the `--todo` flag is passed. With the flag,
* if the test passes, the test will be marked as `fail` in the results; you will have to
* remove the `.todo` or check that your test
* is implemented correctly.
*
* @param label the label for the test

View File

@@ -18,8 +18,7 @@ const __filename = fileURLToPath(import.meta.url);
const now = new Date();
const formatDate = d => {
const iso = d.toISOString();
return iso.substring(0, iso.indexOf("T"));
return d;
};
const getCertdataURL = version => {
@@ -146,26 +145,35 @@ if (values.help) {
process.exit(0);
}
const scheduleURL = "https://wiki.mozilla.org/NSS:Release_Versions";
if (values.verbose) {
console.log(`Fetching NSS release schedule from ${scheduleURL}`);
}
const schedule = await fetch(scheduleURL);
if (!schedule.ok) {
console.error(`Failed to fetch ${scheduleURL}: ${schedule.status}: ${schedule.statusText}`);
process.exit(-1);
}
const scheduleText = await schedule.text();
const nssReleases = getReleases(scheduleText);
const versions = await fetch("https://nucleus.mozilla.org/rna/all-releases.json").then(res => res.json());
// Retrieve metadata for the NSS release being updated to.
const version = positionals[0] ?? (await getLatestVersion(nssReleases));
const release = nssReleases.find(r => {
return new RegExp(`^${version.replace(".", "\\.")}\\b`).test(r[kNSSVersion]);
});
if (!pastRelease(release)) {
console.warn(`Warning: NSS ${version} is not due to be released until ${formatDate(release[kNSSDate])}`);
const today = new Date().toISOString().split("T")[0].trim();
const releases = versions
.filter(
version =>
version.channel == "Release" &&
version.product === "Firefox" &&
version.is_public &&
version.release_date <= today,
)
.sort((a, b) => (a > b ? (a == b ? 0 : -1) : 1));
const latest = releases[0];
const release_tag = `FIREFOX_${latest.version.replaceAll(".", "_")}_RELEASE`;
if (values.verbose) {
console.log(`Fetching NSS release from ${release_tag}`);
}
const version = await fetch(
`https://hg.mozilla.org/releases/mozilla-release/raw-file/${release_tag}/security/nss/TAG-INFO`,
)
.then(res => res.text())
.then(txt => txt.trim().split("NSS_")[1].split("_RTM").join("").split("_").join(".").trim());
const release = {
version: version,
firefoxVersion: latest.version,
firefoxDate: latest.release_date,
date: latest.release_date,
};
if (values.verbose) {
console.log("Found NSS version:");
console.log(release);

View File

@@ -623,18 +623,34 @@ inline __attribute__((always_inline)) LIBUS_SOCKET_DESCRIPTOR bsd_bind_listen_fd
setsockopt(listenFd, SOL_SOCKET, SO_EXCLUSIVEADDRUSE, (void *) &optval2, sizeof(optval2));
#endif
} else {
#if defined(SO_REUSEPORT)
int optval2 = 1;
setsockopt(listenFd, SOL_SOCKET, SO_REUSEPORT, (void *) &optval2, sizeof(optval2));
#endif
#if defined(SO_REUSEPORT)
if((options & LIBUS_LISTEN_REUSE_PORT)) {
int optval2 = 1;
setsockopt(listenFd, SOL_SOCKET, SO_REUSEPORT, (void *) &optval2, sizeof(optval2));
}
#endif
}
#if defined(SO_REUSEADDR)
#ifndef _WIN32
// Unlike on Unix, here we don't set SO_REUSEADDR, because it doesn't just
// allow binding to addresses that are in use by sockets in TIME_WAIT, it
// effectively allows 'stealing' a port which is in use by another application.
// See libuv issue #1360.
int optval3 = 1;
setsockopt(listenFd, SOL_SOCKET, SO_REUSEADDR, (void *) &optval3, sizeof(optval3));
#endif
#endif
#ifdef IPV6_V6ONLY
// TODO: revise support to match node.js
// if (listenAddr->ai_family == AF_INET6) {
// int disabled = (options & LIBUS_SOCKET_IPV6_ONLY) != 0;
// setsockopt(listenFd, IPPROTO_IPV6, IPV6_V6ONLY, (void *) &disabled, sizeof(disabled));
// }
int disabled = 0;
setsockopt(listenFd, IPPROTO_IPV6, IPV6_V6ONLY, (void *) &disabled, sizeof(disabled));
#endif

View File

@@ -201,7 +201,7 @@ struct loop_ssl_data * us_internal_set_loop_ssl_data(struct us_internal_ssl_sock
struct us_internal_ssl_socket_t *ssl_on_open(struct us_internal_ssl_socket_t *s,
int is_client, char *ip,
int ip_length) {
int ip_length, const char* sni) {
struct us_internal_ssl_socket_context_t *context =
(struct us_internal_ssl_socket_context_t *)us_socket_context(0, &s->s);
@@ -231,6 +231,10 @@ struct us_internal_ssl_socket_t *ssl_on_open(struct us_internal_ssl_socket_t *s,
if (is_client) {
SSL_set_renegotiate_mode(s->ssl, ssl_renegotiate_explicit);
SSL_set_connect_state(s->ssl);
if (sni) {
SSL_set_tlsext_host_name(s->ssl, sni);
}
} else {
SSL_set_accept_state(s->ssl);
// we do not allow renegotiation on the server side (should be the default for BoringSSL, but we set to make openssl compatible)
@@ -1603,6 +1607,10 @@ struct us_internal_ssl_socket_t *us_internal_ssl_socket_context_connect_unix(
socket_ext_size);
}
static void ssl_on_open_without_sni(struct us_internal_ssl_socket_t *s, int is_client, char *ip, int ip_length) {
ssl_on_open(s, is_client, ip, ip_length, NULL);
}
void us_internal_ssl_socket_context_on_open(
struct us_internal_ssl_socket_context_t *context,
struct us_internal_ssl_socket_t *(*on_open)(
@@ -1611,7 +1619,7 @@ void us_internal_ssl_socket_context_on_open(
us_socket_context_on_open(
0, &context->sc,
(struct us_socket_t * (*)(struct us_socket_t *, int, char *, int))
ssl_on_open);
ssl_on_open_without_sni);
context->on_open = on_open;
}
@@ -1850,9 +1858,6 @@ ssl_wrapped_context_on_close(struct us_internal_ssl_socket_t *s, int code,
(struct us_wrapped_socket_context_t *)us_internal_ssl_socket_context_ext(
context);
if (wrapped_context->events.on_close) {
wrapped_context->events.on_close((struct us_socket_t *)s, code, reason);
}
// writting here can cause the context to not be writable anymore but its the
// user responsability to check for that
@@ -1860,6 +1865,10 @@ ssl_wrapped_context_on_close(struct us_internal_ssl_socket_t *s, int code,
wrapped_context->old_events.on_close((struct us_socket_t *)s, code, reason);
}
if (wrapped_context->events.on_close) {
wrapped_context->events.on_close((struct us_socket_t *)s, code, reason);
}
us_socket_context_unref(0, wrapped_context->tcp_context);
return s;
}
@@ -1872,9 +1881,6 @@ ssl_wrapped_context_on_writable(struct us_internal_ssl_socket_t *s) {
(struct us_wrapped_socket_context_t *)us_internal_ssl_socket_context_ext(
context);
if (wrapped_context->events.on_writable) {
wrapped_context->events.on_writable((struct us_socket_t *)s);
}
// writting here can cause the context to not be writable anymore but its the
// user responsability to check for that
@@ -1882,6 +1888,10 @@ ssl_wrapped_context_on_writable(struct us_internal_ssl_socket_t *s) {
wrapped_context->old_events.on_writable((struct us_socket_t *)s);
}
if (wrapped_context->events.on_writable) {
wrapped_context->events.on_writable((struct us_socket_t *)s);
}
return s;
}
@@ -1908,14 +1918,14 @@ ssl_wrapped_context_on_timeout(struct us_internal_ssl_socket_t *s) {
struct us_wrapped_socket_context_t *wrapped_context =
(struct us_wrapped_socket_context_t *)us_internal_ssl_socket_context_ext(
context);
if (wrapped_context->old_events.on_timeout) {
wrapped_context->old_events.on_timeout((struct us_socket_t *)s);
}
if (wrapped_context->events.on_timeout) {
wrapped_context->events.on_timeout((struct us_socket_t *)s);
}
if (wrapped_context->old_events.on_timeout) {
wrapped_context->old_events.on_timeout((struct us_socket_t *)s);
}
return s;
}
@@ -1927,15 +1937,14 @@ ssl_wrapped_context_on_long_timeout(struct us_internal_ssl_socket_t *s) {
struct us_wrapped_socket_context_t *wrapped_context =
(struct us_wrapped_socket_context_t *)us_internal_ssl_socket_context_ext(
context);
if (wrapped_context->old_events.on_long_timeout) {
wrapped_context->old_events.on_long_timeout((struct us_socket_t *)s);
}
if (wrapped_context->events.on_long_timeout) {
wrapped_context->events.on_long_timeout((struct us_socket_t *)s);
}
if (wrapped_context->old_events.on_long_timeout) {
wrapped_context->old_events.on_long_timeout((struct us_socket_t *)s);
}
return s;
}
@@ -1946,14 +1955,13 @@ ssl_wrapped_context_on_end(struct us_internal_ssl_socket_t *s) {
struct us_wrapped_socket_context_t *wrapped_context =
(struct us_wrapped_socket_context_t *)us_internal_ssl_socket_context_ext(
context);
if (wrapped_context->events.on_end) {
wrapped_context->events.on_end((struct us_socket_t *)s);
}
if (wrapped_context->old_events.on_end) {
wrapped_context->old_events.on_end((struct us_socket_t *)s);
}
if (wrapped_context->events.on_end) {
wrapped_context->events.on_end((struct us_socket_t *)s);
}
return s;
}
@@ -1965,13 +1973,13 @@ ssl_wrapped_on_connect_error(struct us_internal_ssl_socket_t *s, int code) {
(struct us_wrapped_socket_context_t *)us_internal_ssl_socket_context_ext(
context);
if (wrapped_context->old_events.on_connect_error) {
wrapped_context->old_events.on_connect_error((struct us_connecting_socket_t *)s, code);
}
if (wrapped_context->events.on_connect_error) {
wrapped_context->events.on_connect_error((struct us_connecting_socket_t *)s, code);
}
if (wrapped_context->old_events.on_connect_error) {
wrapped_context->old_events.on_connect_error((struct us_connecting_socket_t *)s, code);
}
return s;
}
@@ -1982,14 +1990,14 @@ ssl_wrapped_on_socket_connect_error(struct us_internal_ssl_socket_t *s, int code
struct us_wrapped_socket_context_t *wrapped_context =
(struct us_wrapped_socket_context_t *)us_internal_ssl_socket_context_ext(
context);
if (wrapped_context->old_events.on_connecting_socket_error) {
wrapped_context->old_events.on_connecting_socket_error((struct us_socket_t *)s, code);
}
if (wrapped_context->events.on_connecting_socket_error) {
wrapped_context->events.on_connecting_socket_error((struct us_socket_t *)s, code);
}
if (wrapped_context->old_events.on_connecting_socket_error) {
wrapped_context->old_events.on_connecting_socket_error((struct us_socket_t *)s, code);
}
return s;
}
@@ -2005,7 +2013,30 @@ us_internal_ssl_socket_open(struct us_internal_ssl_socket_t *s, int is_client,
return s;
// start SSL open
return ssl_on_open(s, is_client, ip, ip_length);
return ssl_on_open(s, is_client, ip, ip_length, NULL);
}
struct us_socket_t *us_socket_upgrade_to_tls(us_socket_r s, us_socket_context_r new_context, const char *sni) {
// Resize to tls + ext size
void** prev_ext_ptr = (void**)us_socket_ext(0, s);
void* prev_ext = *prev_ext_ptr;
struct us_internal_ssl_socket_t *socket =
(struct us_internal_ssl_socket_t *)us_socket_context_adopt_socket(
0, new_context, s,
(sizeof(struct us_internal_ssl_socket_t) - sizeof(struct us_socket_t)) + sizeof(void*));
socket->ssl = NULL;
socket->ssl_write_wants_read = 0;
socket->ssl_read_wants_write = 0;
socket->fatal_error = 0;
socket->handshake_state = HANDSHAKE_PENDING;
void** new_ext_ptr = (void**)us_socket_ext(1, (struct us_socket_t *)socket);
*new_ext_ptr = prev_ext;
ssl_on_open(socket, 1, NULL, 0, sni);
return (struct us_socket_t *)socket;
}
struct us_internal_ssl_socket_t *us_internal_ssl_socket_wrap_with_tls(

View File

@@ -7,12 +7,6 @@
#include <openssl/x509.h>
#include <string.h>
static const int root_certs_size = sizeof(root_certs) / sizeof(root_certs[0]);
static X509 *root_cert_instances[sizeof(root_certs) / sizeof(root_certs[0])] = {
NULL};
static X509 *root_extra_cert_instances = {NULL};
static std::atomic_flag root_cert_instances_lock = ATOMIC_FLAG_INIT;
static std::atomic_bool root_cert_instances_initialized = 0;
// This callback is used to avoid the default passphrase callback in OpenSSL
// which will typically prompt for the passphrase. The prompting is designed
@@ -78,7 +72,9 @@ end:
return NULL;
}
static void us_internal_init_root_certs() {
static void us_internal_init_root_certs(X509 *root_cert_instances[sizeof(root_certs) / sizeof(root_certs[0])], X509 *&root_extra_cert_instances) {
static std::atomic_flag root_cert_instances_lock = ATOMIC_FLAG_INIT;
static std::atomic_bool root_cert_instances_initialized = 0;
if (std::atomic_load(&root_cert_instances_initialized) == 1)
return;
@@ -123,7 +119,11 @@ extern "C" X509_STORE *us_get_default_ca_store() {
return NULL;
}
us_internal_init_root_certs();
static X509 *root_cert_instances[sizeof(root_certs) / sizeof(root_certs[0])] = {
NULL};
static X509 *root_extra_cert_instances = NULL;
us_internal_init_root_certs(root_cert_instances, root_extra_cert_instances);
// load all root_cert_instances on the default ca store
for (size_t i = 0; i < root_certs_size; i++) {

View File

@@ -96,6 +96,10 @@ enum {
LIBUS_LISTEN_EXCLUSIVE_PORT = 1,
/* Allow socket to keep writing after readable side closes */
LIBUS_SOCKET_ALLOW_HALF_OPEN = 2,
/* Setting reusePort allows multiple sockets on the same host to bind to the same port. Incoming connections are distributed by the operating system to listening sockets. This option is available only on some platforms, such as Linux 3.9+, DragonFlyBSD 3.6+, FreeBSD 12.0+, Solaris 11.4, and AIX 7.2.5+*/
LIBUS_LISTEN_REUSE_PORT = 4,
/* etting ipv6Only will disable dual-stack support, i.e., binding to host :: won't make 0.0.0.0 be bound.*/
LIBUS_SOCKET_IPV6_ONLY = 8,
};
/* Library types publicly available */
@@ -190,7 +194,7 @@ struct us_socket_context_options_t {
};
struct us_bun_verify_error_t {
long error;
int error;
const char* code;
const char* reason;
};
@@ -338,6 +342,8 @@ struct us_loop_t *us_socket_context_loop(int ssl, us_socket_context_r context) n
* Used mainly for "socket upgrades" such as when transitioning from HTTP to WebSocket. */
struct us_socket_t *us_socket_context_adopt_socket(int ssl, us_socket_context_r context, us_socket_r s, int ext_size);
struct us_socket_t *us_socket_upgrade_to_tls(us_socket_r s, us_socket_context_r new_context, const char *sni);
/* Create a child socket context which acts much like its own socket context with its own callbacks yet still relies on the
* parent socket context for some shared resources. Child socket contexts should be used together with socket adoptions and nothing else. */
struct us_socket_context_t *us_create_child_socket_context(int ssl, us_socket_context_r context, int context_ext_size);

View File

@@ -22,6 +22,7 @@
#include <sys/ioctl.h>
#endif
/* The loop has 2 fallthrough polls */
void us_internal_loop_data_init(struct us_loop_t *loop, void (*wakeup_cb)(struct us_loop_t *loop),
void (*pre_cb)(struct us_loop_t *loop), void (*post_cb)(struct us_loop_t *loop)) {

View File

@@ -613,7 +613,9 @@ namespace uWS
* ought to be handled as an error. */
std::string_view transferEncodingString = req->getHeader("transfer-encoding");
std::string_view contentLengthString = req->getHeader("content-length");
if (transferEncodingString.length() && contentLengthString.length()) {
auto transferEncodingStringLen = transferEncodingString.length();
auto contentLengthStringLen = contentLengthString.length();
if (transferEncodingStringLen && contentLengthStringLen) {
/* Returning fullptr is the same as calling the errorHandler */
/* We could be smart and set an error in the context along with this, to indicate what
* http error response we might want to return */
@@ -623,6 +625,15 @@ namespace uWS
/* Parse query */
const char *querySeparatorPtr = (const char *) memchr(req->headers->value.data(), '?', req->headers->value.length());
req->querySeparator = (unsigned int) ((querySeparatorPtr ? querySeparatorPtr : req->headers->value.data() + req->headers->value.length()) - req->headers->value.data());
// lets check if content len is valid before calling requestHandler
if(contentLengthStringLen) {
remainingStreamingBytes = toUnsignedInteger(contentLengthString);
if (remainingStreamingBytes == UINT64_MAX) {
/* Parser error */
return {HTTP_ERROR_400_BAD_REQUEST, FULLPTR};
}
}
/* If returned socket is not what we put in we need
* to break here as we either have upgraded to
@@ -642,7 +653,7 @@ namespace uWS
/* RFC 9112 6.3
* If a message is received with both a Transfer-Encoding and a Content-Length header field,
* the Transfer-Encoding overrides the Content-Length. */
if (transferEncodingString.length()) {
if (transferEncodingStringLen) {
/* If a proxy sent us the transfer-encoding header that 100% means it must be chunked or else the proxy is
* not RFC 9112 compliant. Therefore it is always better to assume this is the case, since that entirely eliminates
@@ -665,6 +676,7 @@ namespace uWS
dataHandler(user, chunk, chunk.length() == 0);
}
if (isParsingInvalidChunkedEncoding(remainingStreamingBytes)) {
// TODO: what happen if we already responded?
return {HTTP_ERROR_400_BAD_REQUEST, FULLPTR};
}
unsigned int consumed = (length - (unsigned int) dataToConsume.length());
@@ -672,13 +684,8 @@ namespace uWS
length = (unsigned int) dataToConsume.length();
consumedTotal += consumed;
}
} else if (contentLengthString.length()) {
remainingStreamingBytes = toUnsignedInteger(contentLengthString);
if (remainingStreamingBytes == UINT64_MAX) {
/* Parser error */
return {HTTP_ERROR_400_BAD_REQUEST, FULLPTR};
}
} else if (contentLengthStringLen) {
if (!CONSUME_MINIMALLY) {
unsigned int emittable = (unsigned int) std::min<uint64_t>(remainingStreamingBytes, length);
dataHandler(user, std::string_view(data, emittable), emittable == remainingStreamingBytes);

View File

@@ -19,6 +19,26 @@ At its core is the _Bun runtime_, a fast JavaScript runtime designed as a drop-i
<br/>
</div>
## Features:
- Live in-editor error messages (gif below)
- Test runner codelens
- Debugger support
- Run scripts from package.json
- Visual lockfile viewer (`bun.lockb`)
## In-editor error messages
When running programs with Bun from a Visual Studio Code terminal, Bun will connect to the extension and report errors as they happen, at the exact location they happened. We recommend using this feature with `bun --watch` so you can see errors on every save.
![Error messages example](./error-messages.gif)
<div align="center">
<sup>In the example above VSCode is saving on every keypress. Under normal configuration you'd only see errors on every save.</sup>
</div>
Errors are cleared whenever you start typing, or whenever the extension detects that Bun just started running (or reloading) a new program.
## Configuration
### `.vscode/launch.json`
@@ -75,8 +95,8 @@ You can use the following configurations to debug JavaScript and TypeScript file
// The URL of the WebSocket inspector to attach to.
// This value can be retrieved by using `bun --inspect`.
"url": "ws://localhost:6499/",
}
]
},
],
}
```
@@ -91,8 +111,11 @@ You can use the following configurations to customize the behavior of the Bun ex
// If support for Bun should be added to the default "JavaScript Debug Terminal".
"bun.debugTerminal.enabled": true,
// If the debugger should stop on the first line of the program.
"bun.debugTerminal.stopOnEntry": false,
// Glob pattern to find test files. Defaults to the value shown below.
"bun.test.filePattern": "**/*{.test.,.spec.,_test_,_spec_}{js,ts,tsx,jsx,mts,cts}",
}
```
```

Binary file not shown.

After

Width:  |  Height:  |  Size: 462 KiB

View File

@@ -0,0 +1,3 @@
.bake-debug
dist
node_modules

View File

@@ -0,0 +1,6 @@
export default {
port: 3000,
app: {
framework: "react",
},
};

View File

@@ -0,0 +1,10 @@
import { PropsWithChildren } from "react";
export default function Layout({ children }: PropsWithChildren) {
return (
<div>
{children}
<footer>some rights reserved - {new Date().toString()}</footer>
</div>
);
}

View File

@@ -0,0 +1,17 @@
"use client";
import { useState } from "react";
function App() {
const [count, setCount] = useState(null);
return (
<>
{/* @ts-expect-error */}
<button onClick={() => setCount(count => count.charAt(0))}>count is {count}</button>
</>
);
}
export default App;

View File

@@ -0,0 +1,3 @@
export default function Two() {
return <p>Wow a second page! Bake is groundbreaking</p>;
}

View File

@@ -0,0 +1 @@
Math.max = undefined;

View File

@@ -3,10 +3,13 @@ import { describe, expect, test } from "bun:test";
describe("example", () => {
test("it works", () => {
expect(1).toBe(1);
expect(1).not.toBe(2);
expect(10).toBe(10);
expect(() => {
throw new TypeError("Oops! I did it again.");
}).toThrow();
expect(() => {
throw new Error("Parent error.", {
cause: new TypeError("Child error."),

View File

@@ -1,7 +1,7 @@
type OS = "Windows";
import * as os from "node:os";
Bun.serve({
fetch(req: Request) {
return new Response(`Hello, ${"Windows" as OS}!`);
return new Response(`Hello from ${os.arch()}!`);
},
});

View File

@@ -2,10 +2,16 @@
"private": true,
"name": "example",
"dependencies": {
"axios": "^1.7.7",
"elysia": "^0.6.3",
"express": "^4.18.2",
"mime": "^3.0.0",
"mime-db": "^1.52.0"
"mime-db": "^1.52.0",
"react": "^0.0.0-experimental-380f5d67-20241113",
"react-dom": "^0.0.0-experimental-380f5d67-20241113",
"react-refresh": "^0.0.0-experimental-380f5d67-20241113",
"react-server-dom-bun": "^0.0.0-experimental-603e6108-20241029",
"react-server-dom-webpack": "^0.0.0-experimental-380f5d67-20241113"
},
"scripts": {
"run": "node hello.js",

View File

@@ -0,0 +1,7 @@
function getOldestPersonInBooking(ages: number[]): number {
console.log("ok");
throw new Error("TODO! Perhaps we can use Math.max() for this?");
}
const ticketAges = [5, 25, 30];
console.log(getOldestPersonInBooking(ticketAges));

View File

@@ -0,0 +1,9 @@
import axios from "axios";
async function foo() {
const res = await axios.get("http://example.com");
throw new Error("potato");
}
console.log(await foo());

View File

@@ -14,9 +14,6 @@
"jsx": "preserve",
"allowSyntheticDefaultImports": true,
"forceConsistentCasingInFileNames": true,
"allowJs": true,
"types": [
"bun-types" // add Bun global
]
"allowJs": true
}
}

View File

@@ -0,0 +1,13 @@
// await Bun.sleep(100);
interface User {
name: string;
}
const user = {
name: "Alistair",
} as User;
console.log(`First letter us '${user.name.charAt(0)}'`);
await Bun.sleep(100);

View File

@@ -1,6 +1,6 @@
{
"name": "bun-vscode",
"version": "0.0.15",
"version": "0.0.22",
"author": "oven",
"repository": {
"type": "git",
@@ -18,48 +18,9 @@
"esbuild": "^0.19.2",
"typescript": "^5.0.0"
},
"description": "The Visual Studio Code extension for Bun.",
"displayName": "Bun for Visual Studio Code",
"engines": {
"vscode": "^1.60.0"
},
"extensionKind": [
"workspace"
],
"galleryBanner": {
"color": "#3B3738",
"theme": "dark"
},
"homepage": "https://bun.sh/",
"icon": "assets/icon.png",
"keywords": [
"bun",
"node.js",
"javascript",
"typescript",
"vscode"
],
"license": "MIT",
"publisher": "oven",
"scripts": {
"build": "node scripts/build.mjs",
"pretest": "bun run build",
"test": "node scripts/test.mjs",
"dev": "vscode-test --config scripts/dev.mjs",
"prepublish": "npm version patch && bun run build",
"publish": "cd extension && bunx vsce publish"
},
"workspaceTrust": {
"request": "never"
},
"workspaces": [
"../bun-debug-adapter-protocol",
"../bun-inspector-protocol"
],
"activationEvents": [
"onStartupFinished"
],
"browser": "dist/web-extension.js",
"bugs": {
"url": "https://github.com/oven-sh/bun/issues"
},
@@ -84,6 +45,18 @@
"scope": "window",
"default": null
},
"bun.diagnosticsSocket.enabled": {
"type": "boolean",
"description": "If Bun extension should communicate with Bun over a socket to show errors in editor.",
"scope": "window",
"default": true
},
"bun.bunlockb.enabled": {
"type": "boolean",
"description": "If visual lockfile viewer (`bun.lockb`) should be enabled ",
"scope": "window",
"default": true
},
"bun.debugTerminal.enabled": {
"type": "boolean",
"description": "If Bun should be added to the JavaScript Debug Terminal.",
@@ -95,6 +68,21 @@
"description": "If the debugger should stop on the first line when used in the JavaScript Debug Terminal.",
"scope": "window",
"default": false
},
"bun.test.filePattern": {
"type": "string",
"default": "**/*{.test.,.spec.,_test_,_spec_}{js,ts,tsx,jsx,mts,cts}",
"description": "Glob pattern to find test files"
},
"bun.test.customFlag": {
"type": "string",
"default": "",
"description": "Custom flag added to the end of test command"
},
"bun.test.customScript": {
"type": "string",
"default": "",
"description": "Custom script to use instead of `bun test`, for example script from `package.json`"
}
}
},
@@ -122,6 +110,20 @@
"category": "Bun",
"enablement": "!inDebugMode && resourceLangId =~ /^(javascript|typescript|javascriptreact|typescriptreact)$/ && !isInDiffEditor && resourceScheme == 'untitled'",
"icon": "$(play-circle)"
},
{
"command": "extension.bun.runTest",
"title": "Run all tests",
"shortTitle": "Run Test",
"category": "Bun",
"icon": "$(play)"
},
{
"command": "extension.bun.watchTest",
"title": "Run all tests in watch mode",
"shortTitle": "Run Test Watch",
"category": "Bun",
"icon": "$(sync)"
}
],
"menus": {
@@ -328,5 +330,43 @@
}
}
]
}
}
},
"description": "The Visual Studio Code extension for Bun.",
"displayName": "Bun",
"engines": {
"vscode": "^1.60.0"
},
"extensionKind": [
"workspace"
],
"galleryBanner": {
"color": "#3B3738",
"theme": "dark"
},
"homepage": "https://bun.sh/",
"icon": "assets/icon.png",
"keywords": [
"bun",
"node.js",
"javascript",
"typescript",
"vscode"
],
"license": "MIT",
"publisher": "oven",
"scripts": {
"build": "node scripts/build.mjs",
"pretest": "bun run build",
"test": "node scripts/test.mjs",
"dev": "vscode-test --config scripts/dev.mjs",
"prepublish": "npm version patch && bun run build",
"publish": "cd extension && bunx vsce publish"
},
"workspaceTrust": {
"request": "never"
},
"workspaces": [
"../bun-debug-adapter-protocol",
"../bun-inspector-protocol"
]
}

View File

@@ -1,8 +1,10 @@
import * as vscode from "vscode";
import { registerDebugger, debugCommand } from "./features/debug";
import { registerDebugger } from "./features/debug";
import { registerDiagnosticsSocket } from "./features/diagnostics/diagnostics";
import { registerBunlockEditor } from "./features/lockfile";
import { registerPackageJsonProviders } from "./features/tasks/package.json";
import { registerTaskProvider } from "./features/tasks/tasks";
import { registerTestCodeLens, registerTestRunner } from "./features/tests";
async function runUnsavedCode() {
const editor = vscode.window.activeTextEditor;
@@ -44,9 +46,14 @@ export function activate(context: vscode.ExtensionContext) {
registerDebugger(context);
registerTaskProvider(context);
registerPackageJsonProviders(context);
registerDiagnosticsSocket(context);
registerTestRunner(context);
registerTestCodeLens(context);
// Only register for text editors
context.subscriptions.push(vscode.commands.registerTextEditorCommand("extension.bun.runUnsavedCode", runUnsavedCode));
}
export function deactivate() {}
export function getConfig<T>(path: string, scope?: vscode.ConfigurationScope) {
return vscode.workspace.getConfiguration("bun", scope).get<T>(path);
}

View File

@@ -4,12 +4,13 @@ import { join } from "node:path";
import * as vscode from "vscode";
import {
type DAP,
DebugAdapter,
getAvailablePort,
getRandomId,
TCPSocketSignal,
UnixSignal,
WebSocketDebugAdapter,
} from "../../../bun-debug-adapter-protocol";
import { getConfig } from "../extension";
export const DEBUG_CONFIGURATION: vscode.DebugConfiguration = {
type: "bun",
@@ -101,16 +102,18 @@ async function injectDebugTerminal(terminal: vscode.Terminal): Promise<void> {
}
const { env } = creationOptions as vscode.TerminalOptions;
if (env["BUN_INSPECT"]) {
if (env && env["BUN_INSPECT"]) {
return;
}
const session = new TerminalDebugSession();
await session.initialize();
const { adapter, signal } = session;
const stopOnEntry = getConfig("debugTerminal.stopOnEntry") === true;
const query = stopOnEntry ? "break=1" : "wait=1";
const debugSession = new TerminalDebugSession();
await debugSession.initialize();
const { adapter, signal } = debugSession;
const debug = vscode.window.createTerminal({
...creationOptions,
name: "JavaScript Debug Terminal",
@@ -118,6 +121,7 @@ async function injectDebugTerminal(terminal: vscode.Terminal): Promise<void> {
...env,
"BUN_INSPECT": `${adapter.url}?${query}`,
"BUN_INSPECT_NOTIFY": signal.url,
BUN_INSPECT_CONNECT_TO: "",
},
});
@@ -234,7 +238,10 @@ interface RuntimeExceptionThrownEvent {
}
class FileDebugSession extends DebugSession {
adapter: DebugAdapter;
// If these classes are moved/published, we should make sure
// we remove these non-null assertions so consumers of
// this lib are not running into these hard
adapter!: WebSocketDebugAdapter;
sessionId?: string;
untitledDocPath?: string;
bunEvalPath?: string;
@@ -258,7 +265,7 @@ class FileDebugSession extends DebugSession {
: `ws+unix://${tmpdir()}/${uniqueId}.sock`;
const { untitledDocPath, bunEvalPath } = this;
this.adapter = new DebugAdapter(url, untitledDocPath, bunEvalPath);
this.adapter = new WebSocketDebugAdapter(url, untitledDocPath, bunEvalPath);
if (untitledDocPath) {
this.adapter.on("Adapter.response", (response: DebugProtocolResponse) => {
@@ -319,7 +326,7 @@ class FileDebugSession extends DebugSession {
}
class TerminalDebugSession extends FileDebugSession {
signal: TCPSocketSignal | UnixSignal;
signal!: TCPSocketSignal | UnixSignal;
constructor() {
super();
@@ -346,6 +353,7 @@ class TerminalDebugSession extends FileDebugSession {
env: {
"BUN_INSPECT": `${this.adapter.url}?wait=1`,
"BUN_INSPECT_NOTIFY": this.signal.url,
BUN_INSPECT_CONNECT_TO: "",
},
isTransient: true,
iconPath: new vscode.ThemeIcon("debug-console"),
@@ -365,10 +373,6 @@ function getRuntime(scope?: vscode.ConfigurationScope): string {
return "bun";
}
function getConfig<T>(path: string, scope?: vscode.ConfigurationScope) {
return vscode.workspace.getConfiguration("bun", scope).get<T>(path);
}
export async function runUnsavedCode() {
const editor = vscode.window.activeTextEditor;
if (!editor || !editor.document.isUntitled) return;

View File

@@ -0,0 +1,279 @@
import * as fs from "node:fs/promises";
import { Socket } from "node:net";
import * as os from "node:os";
import { inspect } from "node:util";
import * as vscode from "vscode";
import {
getAvailablePort,
NodeSocketDebugAdapter,
TCPSocketSignal,
UnixSignal,
} from "../../../../bun-debug-adapter-protocol";
import type { JSC } from "../../../../bun-inspector-protocol";
import { typedGlobalState } from "../../global-state";
import { getConfig } from "../../extension";
const output = vscode.window.createOutputChannel("Bun - Diagnostics");
const ansiRegex = (() => {
const ST = "(?:\\u0007|\\u001B\\u005C|\\u009C)";
const pattern = [
`[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?${ST})`,
"(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-nq-uy=><~]))",
].join("|");
return new RegExp(pattern, "g");
})();
function stripAnsi(str: string) {
return str.replace(ansiRegex, "");
}
class EditorStateManager {
private diagnosticCollection: vscode.DiagnosticCollection;
private disposables: vscode.Disposable[] = [];
public constructor() {
this.diagnosticCollection = vscode.languages.createDiagnosticCollection("BunDiagnostics");
}
getVisibleEditorsWithErrors() {
return vscode.window.visibleTextEditors.filter(editor => {
const diagnostics = this.diagnosticCollection.get(editor.document.uri);
return diagnostics && diagnostics.length > 0;
});
}
clearInFile(uri: vscode.Uri) {
if (this.diagnosticCollection.has(uri)) {
output.appendLine(`Clearing diagnostics for ${uri.toString()}`);
this.diagnosticCollection.delete(uri);
}
}
clearAll(reason: string) {
output.appendLine("Clearing all because: " + reason);
this.diagnosticCollection.clear();
}
set(uri: vscode.Uri, diagnostic: vscode.Diagnostic) {
this.diagnosticCollection.set(uri, [diagnostic]);
}
dispose() {
this.clearAll("Editor state was disposed");
this.disposables.forEach(d => d.dispose());
}
}
class BunDiagnosticsManager {
private readonly editorState: EditorStateManager;
private readonly signal: UnixSignal | TCPSocketSignal;
private readonly context: vscode.ExtensionContext;
public get signalUrl() {
return this.signal.url;
}
private static async getOrRecreateSignal(context: vscode.ExtensionContext) {
const globalState = typedGlobalState(context.globalState);
const existing = globalState.get("BUN_INSPECT_CONNECT_TO");
const isWin = os.platform() === "win32";
if (existing) {
if (existing.type === "unix") {
output.appendLine(`Reusing existing unix socket: ${existing.url}`);
if ("url" in existing) {
await fs.unlink(existing.url).catch(() => {
// ? lol
});
}
return new UnixSignal(existing.url);
} else {
output.appendLine(`Reusing existing tcp socket on: ${existing.port}`);
return new TCPSocketSignal(existing.port);
}
}
if (isWin) {
const port = await getAvailablePort();
await globalState.update("BUN_INSPECT_CONNECT_TO", {
type: "tcp",
port,
});
output.appendLine(`Created new tcp socket on: ${port}`);
return new TCPSocketSignal(port);
} else {
const signal = new UnixSignal();
await globalState.update("BUN_INSPECT_CONNECT_TO", {
type: "unix",
url: signal.url,
});
output.appendLine(`Created new unix socket: ${signal.url}`);
return signal;
}
}
// private static getOrCreateOldVersionInspectURL = createGlobalStateGenerationFn(
// "DIAGNOSTICS_BUN_INSPECT",
// async () => {
// const url =
// process.platform === "win32"
// ? `ws://127.0.0.1:${await getAvailablePort()}/${getRandomId()}`
// : `ws+unix://${os.tmpdir()}/${getRandomId()}.sock`;
// return url;
// },
// );
public static async initialize(context: vscode.ExtensionContext) {
const signal = await BunDiagnosticsManager.getOrRecreateSignal(context);
return new BunDiagnosticsManager(context, signal);
}
/**
* Called when Bun pings BUN_INSPECT_NOTIFY (indicating a program has started).
*/
private async handleSocketConnection(socket: Socket) {
const debugAdapter = new NodeSocketDebugAdapter(socket);
this.editorState.clearAll("A new socket connected");
debugAdapter.on("LifecycleReporter.reload", async () => {
this.editorState.clearAll("LifecycleReporter reported a reload event");
});
debugAdapter.on("Inspector.event", e => {
output.appendLine(`Received inspector event: ${e.method}`);
});
debugAdapter.on("Inspector.error", e => {
output.appendLine(inspect(e, true, null));
});
debugAdapter.on("LifecycleReporter.error", event => this.handleLifecycleError(event));
const ok = await debugAdapter.start();
if (!ok) {
await vscode.window.showErrorMessage("Failed to start debug adapter");
debugAdapter.removeAllListeners();
return;
}
debugAdapter.initialize({
adapterID: "bun-vsc-terminal-debug-adapter",
enableControlFlowProfiler: false,
enableLifecycleAgentReporter: true,
sendImmediatePreventExit: false,
enableDebugger: false, // Performance overhead when debugger is enabled
});
}
private handleLifecycleError(event: JSC.LifecycleReporter.ErrorEvent) {
const message = stripAnsi(event.message).trim() || event.name || "Error";
output.appendLine(
`Received error event: '{name:${event.name}} ${message.split("\n")[0].trim().substring(0, 100)}'`,
);
const [url = null] = event.urls;
const [line = null, col = null] = event.lineColumns;
if (url === null || url.length === 0 || line === null || col === null) {
output.appendLine("No valid url or line/column found in error event");
output.appendLine(JSON.stringify(event));
return;
}
const uri = vscode.Uri.file(url);
// range is really just 1 character here..
const range = new vscode.Range(new vscode.Position(line - 1, col - 1), new vscode.Position(line - 1, col));
const document = vscode.workspace.textDocuments.find(doc => doc.uri.toString() === uri.toString());
// ...but we want to highlight the entire word after(inclusive) the character
const rangeOfWord = document?.getWordRangeAtPosition(range.start) ?? range; // Fallback to just the character if no editor or no word range is found
const diagnostic = new vscode.Diagnostic(rangeOfWord, message, vscode.DiagnosticSeverity.Error);
diagnostic.source = "Bun";
const relatedInformation = event.urls.flatMap((url, i) => {
if (i === 0 || url === "") {
return [];
}
const [line = null, col = null] = event.lineColumns.slice(i * 2, i * 2 + 2);
if (line === null || col === null) {
return [];
}
return [
new vscode.DiagnosticRelatedInformation(
new vscode.Location(vscode.Uri.file(url), new vscode.Position(line - 1, col - 1)),
message,
),
];
});
diagnostic.relatedInformation = relatedInformation;
this.editorState.set(uri, diagnostic);
}
public dispose() {
return vscode.Disposable.from(this.editorState, {
dispose: () => {
this.signal.close();
this.signal.removeAllListeners();
},
});
}
private constructor(context: vscode.ExtensionContext, signal: UnixSignal | TCPSocketSignal) {
this.editorState = new EditorStateManager();
this.signal = signal;
this.context = context;
this.context.subscriptions.push(
// on did type
vscode.workspace.onDidChangeTextDocument(e => {
this.editorState.clearInFile(e.document.uri);
}),
);
this.signal.on("Signal.Socket.connect", this.handleSocketConnection.bind(this));
}
}
const description = new vscode.MarkdownString(
"Bun's VSCode extension communicates with Bun over a socket. We set the url in your terminal with the `BUN_INSPECT_NOTIFY` environment variable",
);
export async function registerDiagnosticsSocket(context: vscode.ExtensionContext) {
context.environmentVariableCollection.clear();
context.environmentVariableCollection.description = description;
if (!getConfig("diagnosticsSocket.enabled")) return;
const manager = await BunDiagnosticsManager.initialize(context);
context.environmentVariableCollection.replace("BUN_INSPECT_CONNECT_TO", manager.signalUrl);
context.subscriptions.push(manager);
}

View File

@@ -1,6 +1,7 @@
import { spawn } from "node:child_process";
import * as vscode from "vscode";
import { styleLockfile } from "./lockfile.style";
import { getConfig } from "../../extension";
export type BunLockfile = vscode.CustomDocument & {
readonly preview: string;
@@ -36,6 +37,11 @@ export class BunLockfileEditorProvider implements vscode.CustomReadonlyEditorPro
}
function renderLockfile({ webview }: vscode.WebviewPanel, preview: string, extensionUri: vscode.Uri): void {
if (!getConfig("bunlockb.enabled")) {
webview.html = "<code>bun.bunlockb</code> config option is disabled."
return
}
const styleVSCodeUri = webview.asWebviewUri(vscode.Uri.joinPath(extensionUri, "assets", "vscode.css"));
const lockfileContent = styleLockfile(preview);
@@ -49,7 +55,7 @@ function renderLockfile({ webview }: vscode.WebviewPanel, preview: string, exten
<html lang="en">
<head>
<meta charset="UTF-8">
<meta http-equiv="Content-Security-Policy" content="default-src 'none'; style-src ${webview.cspSource};">
<meta http-equiv="Content-Security-Policy" content="default-src 'none'; style-src ${webview.cspSource};">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<link href="${styleVSCodeUri}" rel="stylesheet" />
</head>

Some files were not shown because too many files have changed in this diff Show More