Compare commits

...

133 Commits

Author SHA1 Message Date
Meghan Denny
7b566e2cfc Revert "yay"
This reverts commit 6ae4158762.
2025-03-21 13:52:15 -07:00
chloe caruso
6ae4158762 yay 2025-03-21 13:40:51 -07:00
190n
8dc95b041a Fix bun --inspect-brk hanging (#18362) 2025-03-21 13:35:39 -07:00
Meghan Denny
211fd4fa06 deps: bump WebKit (#18349) 2025-03-21 04:40:45 -07:00
Dylan Conway
10665821c4 node:crypto: move Cipheriv and Decipheriv to native code (#18342) 2025-03-21 02:52:52 -07:00
Dylan Conway
a3585ff961 node:crypto: implement hkdf and hkdfSync (#18312) 2025-03-21 01:03:01 -07:00
Meghan Denny
2aeff10a85 [publish images] 2025-03-20 21:46:15 -07:00
Meghan Denny
f2c8e63ae1 update to llvm 19 and c++ 23 (#18317)
Co-authored-by: nektro <5464072+nektro@users.noreply.github.com>
2025-03-20 21:44:19 -07:00
pfg
40bfda0f87 Remove debug assertion failure for missing error code in switch case (#18345) 2025-03-20 21:29:12 -07:00
Jarred Sumner
9888570456 Introduce Bun.Cookie & Bun.CookieMap & request.cookies (in BunRequest) (#18073)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
Co-authored-by: pfg <pfg@pfg.pw>
2025-03-20 21:29:00 -07:00
Vincent (Wen Yu) Ge
a1690cd708 Trivial formatting fix in S3 docs (#18346) 2025-03-20 20:15:04 -07:00
Meghan Denny
e602e2b887 Revert "disallow test() within test()" (#18338) 2025-03-20 20:12:20 -07:00
Don Isaac
eae2d61f12 fix(node): add all already-passing tests (#18299) 2025-03-20 20:04:08 -07:00
Jarred Sumner
8e246e1e67 Add precompiled header (#18321) 2025-03-20 19:27:46 -07:00
Kai Tamkun
f30ca39242 More node:http compatibility (#18339)
Co-authored-by: Ciro Spaciari <ciro.spaciari@gmail.com>
2025-03-20 19:16:35 -07:00
Zack Radisic
9f68db4818 Implement vm.compileFunction and fix some node:vm tests (#18285) 2025-03-20 19:08:07 -07:00
Meghan Denny
f1cd5abfaa ci: compress libbun-profile.a before uploading (#18322) 2025-03-20 14:13:45 -07:00
Don Isaac
a8a7da3466 fix(spawn): memory leak in "pipe"d stdout/stderr (#18316)
Co-authored-by: DonIsaac <22823424+DonIsaac@users.noreply.github.com>
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2025-03-20 12:15:32 -07:00
Jarred Sumner
da9c980d26 Fix shell crash in load test (#18320) 2025-03-20 11:36:43 -07:00
nmarks
27cf0d5eaf Remove references to developers former name (#18319)
Co-authored-by: chloe caruso <git@paperclover.net>
2025-03-20 00:53:46 -07:00
chloe caruso
b5cbf16cb8 module pr 2 (#18266) 2025-03-20 00:45:44 -07:00
Meghan Denny
2024fa09d7 js: fix many typescript errors (#18272)
Co-authored-by: Dylan Conway <35280289+dylan-conway@users.noreply.github.com>
2025-03-19 22:39:24 -07:00
Jarred Sumner
46b2a58c25 Small improvements to internal types 2025-03-19 19:26:13 -07:00
Don Isaac
d871b2ebdc fix(node): fix several whatwg tests (#18296) 2025-03-19 18:27:30 -07:00
Jarred Sumner
dc51dab7bc Sort some arrays 2025-03-19 15:45:38 -07:00
Jarred Sumner
e39305dd91 Remove deprecated shim wrapper for zig <> c++ fns (#18269)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
Co-authored-by: Dylan Conway <dylan.conway567@gmail.com>
2025-03-19 15:40:08 -07:00
Ashcon Partovi
6e1f1c4da7 Initial support for node:test (#18140) 2025-03-19 11:49:00 -07:00
Alistair Smith
21a42a0dee types: update Uint8Array methods (#18305) 2025-03-19 10:59:33 -07:00
Don Isaac
982083b3e9 fix(node/http): misc fixes (#18294) 2025-03-18 21:22:39 -07:00
Jarred Sumner
40e222c43b Reduce binary size by 400 KB (#18280)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
Co-authored-by: Dylan Conway <35280289+dylan-conway@users.noreply.github.com>
2025-03-18 21:02:01 -07:00
Kai Tamkun
c8634668e7 Fix connection event being emitted multiple times per socket in node:http (#18295) 2025-03-18 21:01:07 -07:00
Don Isaac
fa9bb75ad3 fix(uws): make Socket bindings safer (#18286)
Co-authored-by: DonIsaac <22823424+DonIsaac@users.noreply.github.com>
Co-authored-by: chloe caruso <git@paperclover.net>
2025-03-18 19:38:15 -07:00
Don Isaac
c47e402025 fix: crash in Bun.inspect.table (#18256)
Co-authored-by: DonIsaac <22823424+DonIsaac@users.noreply.github.com>
2025-03-18 18:56:56 -07:00
Meghan Denny
a3f48c1d47 ci: include version diffs in dependency update pr descriptions (#18283) 2025-03-18 17:03:48 -07:00
Ben Grant
de048cb474 [publish images] 2025-03-18 11:52:58 -07:00
190n
0c5ee31707 Correctly handle unknown type in FileSystem.DirEntry.addEntry (#18172)
Co-authored-by: 190n <7763597+190n@users.noreply.github.com>
2025-03-18 11:50:15 -07:00
Dylan Conway
c820b0c5e1 node:crypto: implement generatePrime(Sync) and checkPrime(Sync) (#18268) 2025-03-18 11:48:24 -07:00
Don Isaac
d09e381cbc fix(css): :global in css modules (#18257) 2025-03-17 17:15:54 -07:00
190n
53d631f1bd chore: address review feedback from #17820 (#18261) 2025-03-17 16:38:34 -07:00
pfg
74768449bc disallow test() within test() (#18203) 2025-03-15 21:34:35 -07:00
github-actions[bot]
294adc2269 deps: update lolhtml to v2.2.0 (#18222)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2025-03-15 21:33:41 -07:00
Ciro Spaciari
ff97424667 fix(SQL) implement unix socket support (#18196)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2025-03-15 11:51:20 -07:00
Meghan Denny
cbf8d7cad6 empty commit to verify CI post- image publish 2025-03-15 01:16:29 -07:00
Meghan Denny
0c41951b58 [publish images] 2025-03-14 23:55:35 -07:00
Meghan Denny
50b36696f8 ci: upgrade to alpine 3.21 (#18054) 2025-03-14 23:52:39 -07:00
190n
de4182f305 chore: upgrade zig to 0.14.0 (#17820)
Co-authored-by: 190n <7763597+190n@users.noreply.github.com>
Co-authored-by: Zack Radisic <56137411+zackradisic@users.noreply.github.com>
Co-authored-by: pfg <pfg@pfg.pw>
Co-authored-by: pfgithub <6010774+pfgithub@users.noreply.github.com>
Co-authored-by: Dylan Conway <dylan.conway567@gmail.com>
2025-03-14 22:13:31 -07:00
Dylan Conway
4214cc0aaa followup #18044 and #17850 (#18205) 2025-03-14 21:26:12 -07:00
chloe caruso
d1c77f5061 fix dev server regressions from 1.2.5's hmr rewrite (#18109)
Co-authored-by: Zack Radisic <zack@theradisic.com>
Co-authored-by: zackradisic <56137411+zackradisic@users.noreply.github.com>
2025-03-14 21:24:14 -07:00
190n
45e01cdaf2 Mark other PGlite test as TODO on Linux x64 (#18201) 2025-03-14 19:02:45 -07:00
pfg
2fc19daeec Update spawn docs to add timeout and resourceUsage (#18204) 2025-03-14 19:02:22 -07:00
chloe caruso
60c0b9ab96 fix debug windows build (#18178) 2025-03-14 15:25:35 -07:00
Ciro Spaciari
7f948f9c3e fix(sql) fix query parameters options (#18194) 2025-03-14 13:39:55 -07:00
Meghan Denny
66fb9f1097 test: install detect-libc (#18185) 2025-03-14 09:49:19 -07:00
Don Isaac
062a5b9bf8 fix(shell): remove unecessary allocations when printing errors (#17898) 2025-03-14 08:45:34 -07:00
Ciro Spaciari
5bedf15462 fix(crypto) Fix ED25519 from private (#18188) 2025-03-13 23:18:48 -07:00
Meghan Denny
d7aee40387 node: fix test-buffer-creation-regression.js (#18184) 2025-03-13 21:44:43 -07:00
Don Isaac
26f08fabd7 fix(ShadowRealm): give global objects a unique execution context id (#18179) 2025-03-13 21:00:35 -07:00
Jarred Sumner
05b48ce57c Implement node:crypto DiffieHellman (in native code) (#17850)
Co-authored-by: Dylan Conway <dylan.conway567@gmail.com>
2025-03-13 20:26:25 -07:00
Don Isaac
1ed87f4e83 fix: deadlock in Cow debug checks (#18173)
Co-authored-by: DonIsaac <22823424+DonIsaac@users.noreply.github.com>
2025-03-13 16:42:06 -07:00
Niklas Mollenhauer
b089558674 fix: removal of trailing slash in s3 presign (#18158) 2025-03-13 13:19:04 -07:00
Ciro Spaciari
45df1dbba0 fix(usockets) only add socket and context to the free list after socket on_close callback returns (#18144) 2025-03-13 12:45:53 -07:00
Ciro Spaciari
beb32770f0 fix(tests) move to the right folder (#18130) 2025-03-13 12:40:49 -07:00
Meghan Denny
3eec297282 js: no longer provide our own 'detect-libc' (#18138) 2025-03-13 12:40:37 -07:00
Don Isaac
b0b6c979ee fix(bun-plugin-svelte): handle "svelte" export conditions (#18150) 2025-03-13 12:40:22 -07:00
Indigo
7d69ac03ec sqlite: Enable passing options to Database.deserialize to enable strict mode (#17726)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2025-03-13 12:36:51 -07:00
Ashcon Partovi
d7e08abce8 Fix setTimeout(() => {}) from emitting a warning (#18160) 2025-03-13 11:37:48 -07:00
Nicholas Lister
d907942966 docs: fix typos in plugins.md (#18163) 2025-03-13 11:05:21 -07:00
aab
fe0e737f7b types: fix error for Uint8Array.fromBase64 (#18153) 2025-03-13 09:03:45 -07:00
Alistair Smith
8da959df85 fix: Move ShellError inside bun module decl (#18149) 2025-03-12 21:57:04 -07:00
pfg
d7a047a533 Fix #18131 (global catch-all route does not work with callback handler) (#18148) 2025-03-12 21:39:31 -07:00
Meghan Denny
c260223127 node: fix test-tls-translate-peer-certificate.js (#18136) 2025-03-12 21:00:22 -07:00
Meghan Denny
e834a80b7b node: fix test-tls-timeout-server-2.js (#18143) 2025-03-12 19:31:22 -07:00
pfg
7011dd6524 Update webkit build instructions (#18142) 2025-03-12 18:15:35 -07:00
190n
cde668b54c Better edge case handling in napi_value<->String conversion (#18107) 2025-03-12 18:15:00 -07:00
Zack Radisic
01db86e915 Fix #18064 (#18134) 2025-03-12 16:08:16 -07:00
chloe caruso
85376147a4 node:module compatibility pt 1 (#18106) 2025-03-12 15:47:41 -07:00
Meghan Denny
d2ecce272c node: fix test-net-server-close-before-calling-lookup-callback.js (#18103) 2025-03-12 14:21:24 -07:00
Meghan Denny
7ee0b428d6 node: fix test-tls-connect-simple.js (#18094) 2025-03-12 14:20:39 -07:00
Meghan Denny
9482e4c86a node: fix test-tls-close-event-after-write.js (#18098) 2025-03-12 14:20:14 -07:00
Meghan Denny
42276a9500 node: fix test-tls-connect-hwm-option.js (#18096) 2025-03-12 14:20:02 -07:00
Kai Tamkun
ae8f78c84d UDP: reset cached address and remoteAddress properties (#18043) 2025-03-12 14:19:44 -07:00
Meghan Denny
9636852224 node: fix test-tls-client-abort2.js (#18099) 2025-03-12 14:19:22 -07:00
Meghan Denny
5f72715a42 node: fix test-tls-invoke-queued.js (#18091) 2025-03-12 14:19:08 -07:00
Ciro Spaciari
c60b5dd4d6 compat(http) more compat in http (#18074) 2025-03-12 14:18:51 -07:00
Meghan Denny
42c474a21f node: fix test-net-socket-end-callback.js (#18102) 2025-03-12 14:17:29 -07:00
Meghan Denny
04078fbf61 node: fix test-tls-0-dns-altname.js (#18100) 2025-03-12 14:17:18 -07:00
Zack Radisic
28ebbb3f20 Fix node:vm test (#18081) 2025-03-12 14:16:03 -07:00
ippsav
96fa32bcc1 Fix transpiler encoding issue (#18057) 2025-03-12 13:58:53 -07:00
Pham Minh Triet
b3246b6971 fix(docs): remove extra character (#18123) 2025-03-12 13:26:27 -07:00
Meghan Denny
0345414ded node: fix test-net-reuseport.js (#18104) 2025-03-12 12:25:39 -07:00
Alistair Smith
01d214b276 Fix some higher priority @types/bun issues (devserver, serve) (#18121) 2025-03-12 18:38:31 +00:00
pfg
fdd181d68d Even more child process tests passing (#18052) 2025-03-11 22:52:12 -07:00
pfg
5c7df736bf Bring back btjs (#18108) 2025-03-11 22:51:05 -07:00
Meghan Denny
29870cb572 node: fix test-tls-interleave.js (#18092) 2025-03-11 20:33:42 -07:00
Meghan Denny
32223e90e3 node: fix test-tls-transport-destroy-after-own-gc.js (#18087) 2025-03-11 20:33:25 -07:00
Meghan Denny
31198cdbd9 node: fix test-tls-connect-pipe.js (#18095) 2025-03-11 20:33:13 -07:00
Meghan Denny
971f2b1ed7 node: fix test-tls-destroy-whilst-write.js (#18093) 2025-03-11 20:32:52 -07:00
chloe caruso
832cf91e88 remove a memory leak in bun.String.concat/createFromConcat (#18084) 2025-03-11 20:30:51 -07:00
Kai Tamkun
2e010073aa Fix express responses dying early (#18080) 2025-03-11 19:53:50 -07:00
Ciro Spaciari
4c93b72906 compat(http2) more http2 compatibility improvements (#18060)
Co-authored-by: cirospaciari <6379399+cirospaciari@users.noreply.github.com>
2025-03-11 19:46:05 -07:00
Meghan Denny
7091fd5791 node: fix test-tls-write-error.js (#18082) 2025-03-11 18:46:15 -07:00
Meghan Denny
e5edd388a0 node: fix test-tls-use-after-free-regression.js (#18085) 2025-03-11 18:45:12 -07:00
Meghan Denny
b887270e25 node: fix test-tls-no-rsa-key.js (#18090) 2025-03-11 18:40:30 -07:00
Meghan Denny
fc0d0ad8d3 node: fix test-tls-set-encoding.js (#18088) 2025-03-11 18:39:15 -07:00
Dylan Conway
ddfc8555f7 crypto: fix test-crypto-random.js (#18044)
Co-authored-by: Meghan Denny <meghan@bun.sh>
2025-03-11 18:21:20 -07:00
Meghan Denny
6d0739f7d9 js: de-class-ify node:tls.TLSSocket (#18058) 2025-03-11 16:37:50 -07:00
Don Isaac
fdd750e4b5 docs(bun-plugin-svelte): add example (#18076) 2025-03-11 14:39:10 -07:00
Don Isaac
9a5afe371a fix(bun-plugin-svelte): fix svelte module imports (#18042) 2025-03-11 12:01:15 -07:00
Dylan Conway
5123561889 fix assertion in JSBuffer.cpp (#18048) 2025-03-11 10:20:15 -07:00
Meghan Denny
ba7f59355f js: de-class-ify node:net.Socket (#17997) 2025-03-10 23:37:11 -07:00
Michael H
a79f92df9e CI: fix canary uploading for x64 macos (#18053) 2025-03-10 21:59:13 -07:00
Meghan Denny
8bc88763ec Bump 2025-03-10 21:06:52 -07:00
Kai Tamkun
4a0e982bb2 node:http improvements (#17093)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
Co-authored-by: Pham Minh Triet <92496972+Nanome203@users.noreply.github.com>
Co-authored-by: snwy <snwy@snwy.me>
Co-authored-by: Ciro Spaciari <ciro.spaciari@gmail.com>
Co-authored-by: cirospaciari <cirospaciari@users.noreply.github.com>
Co-authored-by: Ben Grant <ben@bun.sh>
2025-03-10 20:19:29 -07:00
Ciro Spaciari
013fdddc6e feat(CSRF) implement Bun.CSRF (#18045)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2025-03-10 17:51:57 -07:00
190n
a9ca465ad0 Bump WebKit (#18039) 2025-03-10 12:39:20 -07:00
Alistair Smith
cd4d75ee7b Fix type error for base64 operations (#18034)
Co-authored-by: azom <dev@azom.ca>
2025-03-10 09:40:29 -07:00
pfg
aa2e109f5f Add launch configuration for rr (#17963) 2025-03-09 00:19:20 -08:00
Dylan Conway
45e3c9da70 Add destroy and destructors to Hmac, Verify, Sign, and Hash (#17996) 2025-03-07 22:55:39 -08:00
Jarred Sumner
cee026b87e Micro optimize latin1IdentifierContinueLength (#17972) 2025-03-07 21:46:14 -08:00
Dylan Conway
1a68ce05dc Add a few passing tests for node:crypto (#17987) 2025-03-07 20:53:06 -08:00
Don Isaac
bf0253df1d fix(cli): ignore --loader flag when running as node (#17992) 2025-03-07 20:32:07 -08:00
Jarred Sumner
2e3e6a15e0 Make TimeoutObject 8 bytes smaller (#17976)
Co-authored-by: Ben Grant <ben@bun.sh>
2025-03-07 20:07:31 -08:00
chloe caruso
589fa6274d dev server: forgotten changes (#17985)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2025-03-07 17:53:07 -08:00
Pham Minh Triet
4cf0d39e58 fix(docs): typo in css.md (#17973) 2025-03-07 17:28:45 -08:00
Kilian Brachtendorf
a1952c71f7 docs: add note about bun publish respecting NPM_CONFIG_TOKEN (#17975) 2025-03-07 17:28:16 -08:00
Dylan Conway
48df26462d fix test-crypto-randomuuid.js (#17955) 2025-03-07 17:05:17 -08:00
chloe caruso
66cf62c3c4 dev server: rewrite HMRModule, support sync esm + hot.accept (#17954)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2025-03-07 15:12:16 -08:00
Meghan Denny
9d6729fef3 docs: simplify bundler headings 2025-03-07 01:34:01 -08:00
Meghan Denny
20144ced54 docs: bundler/css.md: remove redundant heading 2025-03-07 01:22:18 -08:00
Meghan Denny
2e6cbd9a4d node: update test/common (#17786) 2025-03-07 00:32:23 -08:00
Meghan Denny
85f49a7a1a node: fix test-net-server-listen-options-signal.js (#17782)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2025-03-07 00:32:05 -08:00
Meghan Denny
6ba858dfbb node: fix test-net-connect-reset.js (#17823) 2025-03-07 00:31:41 -08:00
Meghan Denny
7b423d5ff8 node: fix test-warn-stream-wrap.js (#17937) 2025-03-07 00:30:56 -08:00
1558 changed files with 76705 additions and 25393 deletions

View File

@@ -1,12 +1,12 @@
ARG LLVM_VERSION="18"
ARG REPORTED_LLVM_VERSION="18.1.8"
ARG LLVM_VERSION="19"
ARG REPORTED_LLVM_VERSION="19.1.7"
ARG OLD_BUN_VERSION="1.1.38"
ARG DEFAULT_CFLAGS="-mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -ffunction-sections -fdata-sections -faddrsig -fno-unwind-tables -fno-asynchronous-unwind-tables"
ARG DEFAULT_CXXFLAGS="-flto=full -fwhole-program-vtables -fforce-emit-vtables"
ARG BUILDKITE_AGENT_TAGS="queue=linux,os=linux,arch=${TARGETARCH}"
FROM --platform=$BUILDPLATFORM ubuntu:20.04 as base-arm64
FROM --platform=$BUILDPLATFORM ubuntu:18.04 as base-amd64
FROM --platform=$BUILDPLATFORM ubuntu:20.04 as base-amd64
FROM base-$TARGETARCH as base
ARG LLVM_VERSION

View File

@@ -107,9 +107,9 @@ const buildPlatforms = [
{ os: "linux", arch: "aarch64", distro: "amazonlinux", release: "2023", features: ["docker"] },
{ os: "linux", arch: "x64", distro: "amazonlinux", release: "2023", features: ["docker"] },
{ os: "linux", arch: "x64", baseline: true, distro: "amazonlinux", release: "2023", features: ["docker"] },
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.20" },
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.20" },
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.20" },
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.21" },
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.21" },
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.21" },
{ os: "windows", arch: "x64", release: "2019" },
{ os: "windows", arch: "x64", baseline: true, release: "2019" },
];
@@ -134,9 +134,9 @@ const testPlatforms = [
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "24.04", tier: "latest" },
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "22.04", tier: "previous" },
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "20.04", tier: "oldest" },
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.20", tier: "latest" },
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.20", tier: "latest" },
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.20", tier: "latest" },
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.21", tier: "latest" },
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.21", tier: "latest" },
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.21", tier: "latest" },
{ os: "windows", arch: "x64", release: "2019", tier: "oldest" },
{ os: "windows", arch: "x64", release: "2019", baseline: true, tier: "oldest" },
];

View File

@@ -201,6 +201,8 @@ function create_release() {
local artifacts=(
bun-darwin-aarch64.zip
bun-darwin-aarch64-profile.zip
bun-darwin-x64.zip
bun-darwin-x64-profile.zip
bun-linux-aarch64.zip
bun-linux-aarch64-profile.zip
bun-linux-x64.zip

View File

@@ -11,8 +11,8 @@ on:
env:
BUN_VERSION: "1.2.0"
LLVM_VERSION: "18.1.8"
LLVM_VERSION_MAJOR: "18"
LLVM_VERSION: "19.1.7"
LLVM_VERSION_MAJOR: "19"
jobs:
clang-format:

View File

@@ -11,8 +11,8 @@ on:
env:
BUN_VERSION: "1.2.0"
LLVM_VERSION: "18.1.8"
LLVM_VERSION_MAJOR: "18"
LLVM_VERSION: "19.1.7"
LLVM_VERSION_MAJOR: "19"
jobs:
clang-tidy:

View File

@@ -3,7 +3,6 @@ name: Lint
permissions:
contents: read
env:
LLVM_VERSION: 16
BUN_VERSION: "1.2.0"
on:

View File

@@ -89,4 +89,6 @@ jobs:
Updates c-ares to version ${{ steps.check-version.outputs.tag }}
Compare: https://github.com/c-ares/c-ares/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-cares.yml)

View File

@@ -89,4 +89,6 @@ jobs:
Updates libarchive to version ${{ steps.check-version.outputs.tag }}
Compare: https://github.com/libarchive/libarchive/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-libarchive.yml)

View File

@@ -89,4 +89,6 @@ jobs:
Updates libdeflate to version ${{ steps.check-version.outputs.tag }}
Compare: https://github.com/ebiggers/libdeflate/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-libdeflate.yml)

View File

@@ -89,4 +89,6 @@ jobs:
Updates lolhtml to version ${{ steps.check-version.outputs.tag }}
Compare: https://github.com/cloudflare/lol-html/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-lolhtml.yml)

View File

@@ -89,4 +89,6 @@ jobs:
Updates lshpack to version ${{ steps.check-version.outputs.tag }}
Compare: https://github.com/litespeedtech/ls-hpack/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-lshpack.yml)

View File

@@ -106,4 +106,6 @@ jobs:
Updates SQLite to version ${{ steps.check-version.outputs.latest }}
Compare: https://sqlite.org/src/vdiff?from=${{ steps.check-version.outputs.current }}&to=${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-sqlite3.yml)

View File

@@ -1,4 +1,16 @@
# command script import vendor/zig/tools/lldb_pretty_printers.py
command script import vendor/WebKit/Tools/lldb/lldb_webkit.py
# Tell LLDB what to do when the debugged process receives SIGPWR: pass it through to the process
# (-p), but do not stop the process (-s) or notify the user (-n).
#
# JSC's garbage collector sends this signal (as configured by Bun WebKit in
# Thread::initializePlatformThreading() in ThreadingPOSIX.cpp) to the JS thread to suspend or resume
# it. So stopping the process would just create noise when debugging any long-running script.
process handle -p true -s false -n false SIGPWR
# type summary add --summary-string "${var} | inner=${var[0-30]}, source=${var[33-64]}, tag=${var[31-32]}" "unsigned long"
command script import misctools/lldb/lldb_pretty_printers.py
type category enable zig.lang
type category enable zig.std
command script import misctools/lldb/lldb_webkit.py
command script delete btjs
command alias btjs p {printf("gathering btjs trace...\n");printf("%s\n", (char*)dumpBtjsTrace())}

47
.vscode/launch.json generated vendored
View File

@@ -22,7 +22,6 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -38,7 +37,6 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -60,7 +58,6 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -76,7 +73,6 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -92,7 +88,6 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -108,7 +103,6 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -125,7 +119,6 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -147,7 +140,6 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -169,7 +161,6 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -188,7 +179,6 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -203,7 +193,6 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -221,7 +210,6 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -236,7 +224,6 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -253,7 +240,6 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -275,7 +261,6 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -297,7 +282,6 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -313,7 +297,6 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -329,7 +312,6 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -345,7 +327,6 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -361,7 +342,6 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -378,7 +358,6 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -400,7 +379,6 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -421,7 +399,6 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
// bun test [*]
{
@@ -437,7 +414,6 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -452,7 +428,6 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -468,7 +443,6 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -488,7 +462,6 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -503,7 +476,6 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
// Windows: bun test [file]
{
@@ -1129,7 +1101,24 @@
],
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "bun",
"name": "[JS] bun test [file]",
"runtime": "${workspaceFolder}/build/debug/bun-debug",
"runtimeArgs": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
},
{
"type": "midas-rr",
"request": "attach",
"name": "rr",
"trace": "Off",
"setupCommands": ["handle SIGPWR nostop noprint pass"],
},
],
"inputs": [

View File

@@ -80,7 +80,7 @@ $ sudo zypper install clang18 lld18 llvm18
{% /codetabs %}
If none of the above solutions apply, you will have to install it [manually](https://github.com/llvm/llvm-project/releases/tag/llvmorg-18.1.8).
If none of the above solutions apply, you will have to install it [manually](https://github.com/llvm/llvm-project/releases/tag/llvmorg-19.1.7).
Make sure Clang/LLVM 18 is in your path:
@@ -205,18 +205,30 @@ WebKit is not cloned by default (to save time and disk space). To clone and buil
# Clone WebKit into ./vendor/WebKit
$ git clone https://github.com/oven-sh/WebKit vendor/WebKit
# Check out the commit hash specified in `set(WEBKIT_VERSION <commit_hash>)` in cmake/tools/SetupWebKit.cmake
$ git -C vendor/WebKit checkout <commit_hash>
# Make a debug build of JSC. This will output build artifacts in ./vendor/WebKit/WebKitBuild/Debug
# Optionally, you can use `make jsc` for a release build
$ make jsc-debug
$ make jsc-debug && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
# Build bun with the local JSC build
$ bun run build:local
```
Using `bun run build:local` will build Bun in the `./build/debug-local` directory (instead of `./build/debug`), you'll have to change a couple of places to use this new directory:
- The first line in [`src/js/builtins.d.ts`](/src/js/builtins.d.ts)
- The `CompilationDatabase` line in [`.clangd` config](/.clangd) should be `CompilationDatabase: build/debug-local`
- In [`build.zig`](/build.zig), the `codegen_path` option should be `build/debug-local/codegen` (instead of `build/debug/codegen`)
- In [`.vscode/launch.json`](/.vscode/launch.json), many configurations use `./build/debug/`, change them as you see fit
Note that the WebKit folder, including build artifacts, is 8GB+ in size.
If you are using a JSC debug build and using VScode, make sure to run the `C/C++: Select a Configuration` command to configure intellisense to find the debug headers.
Note that if you change make changes to our [WebKit fork](https://github.com/oven-sh/WebKit), you will also have to change [`SetupWebKit.cmake`](/cmake/tools/SetupWebKit.cmake) to point to the commit hash.
## Troubleshooting
### 'span' file not found on Ubuntu

2
LATEST
View File

@@ -1 +1 @@
1.2.4
1.2.5

View File

@@ -91,9 +91,9 @@ ZIG ?= $(shell which zig 2>/dev/null || echo -e "error: Missing zig. Please make
# This is easier to happen than you'd expect.
# Using realpath here causes issues because clang uses clang++ as a symlink
# so if that's resolved, it won't build for C++
REAL_CC = $(shell which clang-18 2>/dev/null || which clang 2>/dev/null)
REAL_CXX = $(shell which clang++-18 2>/dev/null || which clang++ 2>/dev/null)
CLANG_FORMAT = $(shell which clang-format-18 2>/dev/null || which clang-format 2>/dev/null)
REAL_CC = $(shell which clang-19 2>/dev/null || which clang 2>/dev/null)
REAL_CXX = $(shell which clang++-19 2>/dev/null || which clang++ 2>/dev/null)
CLANG_FORMAT = $(shell which clang-format-19 2>/dev/null || which clang-format 2>/dev/null)
CC = $(REAL_CC)
CXX = $(REAL_CXX)
@@ -117,14 +117,14 @@ CC_WITH_CCACHE = $(CCACHE_PATH) $(CC)
ifeq ($(OS_NAME),darwin)
# Find LLVM
ifeq ($(wildcard $(LLVM_PREFIX)),)
LLVM_PREFIX = $(shell brew --prefix llvm@18)
LLVM_PREFIX = $(shell brew --prefix llvm@19)
endif
ifeq ($(wildcard $(LLVM_PREFIX)),)
LLVM_PREFIX = $(shell brew --prefix llvm)
endif
ifeq ($(wildcard $(LLVM_PREFIX)),)
# This is kinda ugly, but I can't find a better way to error :(
LLVM_PREFIX = $(shell echo -e "error: Unable to find llvm. Please run 'brew install llvm@18' or set LLVM_PREFIX=/path/to/llvm")
LLVM_PREFIX = $(shell echo -e "error: Unable to find llvm. Please run 'brew install llvm@19' or set LLVM_PREFIX=/path/to/llvm")
endif
LDFLAGS += -L$(LLVM_PREFIX)/lib
@@ -164,7 +164,7 @@ CMAKE_FLAGS_WITHOUT_RELEASE = -DCMAKE_C_COMPILER=$(CC) \
-DCMAKE_OSX_DEPLOYMENT_TARGET=$(MIN_MACOS_VERSION) \
$(CMAKE_CXX_COMPILER_LAUNCHER_FLAG) \
-DCMAKE_AR=$(AR) \
-DCMAKE_RANLIB=$(which llvm-18-ranlib 2>/dev/null || which llvm-ranlib 2>/dev/null) \
-DCMAKE_RANLIB=$(which llvm-19-ranlib 2>/dev/null || which llvm-ranlib 2>/dev/null) \
-DCMAKE_CXX_STANDARD=20 \
-DCMAKE_C_STANDARD=17 \
-DCMAKE_CXX_STANDARD_REQUIRED=ON \
@@ -191,7 +191,7 @@ endif
ifeq ($(OS_NAME),linux)
LIBICONV_PATH =
AR = $(shell which llvm-ar-18 2>/dev/null || which llvm-ar 2>/dev/null || which ar 2>/dev/null)
AR = $(shell which llvm-ar-19 2>/dev/null || which llvm-ar 2>/dev/null || which ar 2>/dev/null)
endif
OPTIMIZATION_LEVEL=-O3 $(MARCH_NATIVE)
@@ -255,7 +255,7 @@ DEFAULT_LINKER_FLAGS= -pthread -ldl
endif
ifeq ($(OS_NAME),darwin)
_MIMALLOC_OBJECT_FILE = 0
JSC_BUILD_STEPS += jsc-build-mac jsc-copy-headers
JSC_BUILD_STEPS += jsc-build-mac
JSC_BUILD_STEPS_DEBUG += jsc-build-mac-debug
_MIMALLOC_FILE = libmimalloc.a
_MIMALLOC_INPUT_PATH = libmimalloc.a
@@ -286,7 +286,7 @@ STRIP=/usr/bin/strip
endif
ifeq ($(OS_NAME),linux)
STRIP=$(shell which llvm-strip 2>/dev/null || which llvm-strip-18 2>/dev/null || which strip 2>/dev/null || echo "Missing strip")
STRIP=$(shell which llvm-strip 2>/dev/null || which llvm-strip-19 2>/dev/null || which strip 2>/dev/null || echo "Missing strip")
endif
@@ -674,7 +674,7 @@ endif
.PHONY: assert-deps
assert-deps:
@echo "Checking if the required utilities are available..."
@if [ $(CLANG_VERSION) -lt "18" ]; then echo -e "ERROR: clang version >=18 required, found: $(CLANG_VERSION). Install with:\n\n $(POSIX_PKG_MANAGER) install llvm@18"; exit 1; fi
@if [ $(CLANG_VERSION) -lt "19" ]; then echo -e "ERROR: clang version >=19 required, found: $(CLANG_VERSION). Install with:\n\n $(POSIX_PKG_MANAGER) install llvm@19"; exit 1; fi
@cmake --version >/dev/null 2>&1 || (echo -e "ERROR: cmake is required."; exit 1)
@$(PYTHON) --version >/dev/null 2>&1 || (echo -e "ERROR: python is required."; exit 1)
@$(ESBUILD) --version >/dev/null 2>&1 || (echo -e "ERROR: esbuild is required."; exit 1)
@@ -924,7 +924,7 @@ bun-codesign-release-local-debug:
.PHONY: jsc
jsc: jsc-build jsc-copy-headers jsc-bindings
jsc: jsc-build
.PHONY: jsc-debug
jsc-debug: jsc-build-debug
.PHONY: jsc-build

View File

@@ -0,0 +1,53 @@
import crypto from "node:crypto";
import { bench, run } from "../runner.mjs";
// Pre-generate DH params to avoid including setup in benchmarks
const dhSize = 1024; // Reduced from 2048 for faster testing
const dh = crypto.createDiffieHellman(dhSize);
const dhPrime = dh.getPrime();
const dhGenerator = dh.getGenerator();
// Classical Diffie-Hellman
bench("DH - generateKeys", () => {
const alice = crypto.createDiffieHellman(dhPrime, dhGenerator);
return alice.generateKeys();
});
bench("DH - computeSecret", () => {
// Setup
const alice = crypto.createDiffieHellman(dhPrime, dhGenerator);
const aliceKey = alice.generateKeys();
const bob = crypto.createDiffieHellman(dhPrime, dhGenerator);
const bobKey = bob.generateKeys();
// Benchmark just the secret computation
return alice.computeSecret(bobKey);
});
// ECDH with prime256v1 (P-256)
bench("ECDH-P256 - generateKeys", () => {
const ecdh = crypto.createECDH("prime256v1");
return ecdh.generateKeys();
});
bench("ECDH-P256 - computeSecret", () => {
// Setup
const alice = crypto.createECDH("prime256v1");
const aliceKey = alice.generateKeys();
const bob = crypto.createECDH("prime256v1");
const bobKey = bob.generateKeys();
// Benchmark just the secret computation
return alice.computeSecret(bobKey);
});
// ECDH with secp384r1 (P-384)
bench("ECDH-P384 - computeSecret", () => {
const alice = crypto.createECDH("secp384r1");
const aliceKey = alice.generateKeys();
const bob = crypto.createECDH("secp384r1");
const bobKey = bob.generateKeys();
return alice.computeSecret(bobKey);
});
await run();

View File

@@ -0,0 +1,44 @@
import crypto from "node:crypto";
import { bench, run } from "../runner.mjs";
function generateTestKeyPairs() {
const curves = crypto.getCurves();
const keys = {};
for (const curve of curves) {
const ecdh = crypto.createECDH(curve);
ecdh.generateKeys();
keys[curve] = {
compressed: ecdh.getPublicKey("hex", "compressed"),
uncompressed: ecdh.getPublicKey("hex", "uncompressed"),
instance: ecdh,
};
}
return keys;
}
const testKeys = generateTestKeyPairs();
bench("ECDH key format - P256 compressed to uncompressed", () => {
const publicKey = testKeys["prime256v1"].compressed;
return crypto.ECDH.convertKey(publicKey, "prime256v1", "hex", "hex", "uncompressed");
});
bench("ECDH key format - P256 uncompressed to compressed", () => {
const publicKey = testKeys["prime256v1"].uncompressed;
return crypto.ECDH.convertKey(publicKey, "prime256v1", "hex", "hex", "compressed");
});
bench("ECDH key format - P384 compressed to uncompressed", () => {
const publicKey = testKeys["secp384r1"].compressed;
return crypto.ECDH.convertKey(publicKey, "secp384r1", "hex", "hex", "uncompressed");
});
bench("ECDH key format - P384 uncompressed to compressed", () => {
const publicKey = testKeys["secp384r1"].uncompressed;
return crypto.ECDH.convertKey(publicKey, "secp384r1", "hex", "hex", "compressed");
});
await run();

50
bench/crypto/hkdf.mjs Normal file
View File

@@ -0,0 +1,50 @@
import crypto from "node:crypto";
import { bench, run } from "../runner.mjs";
// Sample keys with different lengths
const keys = {
short: "secret",
long: "this-is-a-much-longer-secret-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
};
// Test parameters
const salts = ["", "salt"];
const infos = ["", "info"];
const hashes = ["sha256", "sha512"];
const sizes = [10, 1024];
// Benchmark sync HKDF
for (const hash of hashes) {
for (const keyName of Object.keys(keys)) {
const key = keys[keyName];
for (const size of sizes) {
bench(`hkdfSync ${hash} ${keyName}-key ${size} bytes`, () => {
return crypto.hkdfSync(hash, key, "salt", "info", size);
});
}
}
}
// Benchmark different combinations of salt and info
for (const salt of salts) {
for (const info of infos) {
bench(`hkdfSync sha256 with ${salt ? "salt" : "no-salt"} and ${info ? "info" : "no-info"}`, () => {
return crypto.hkdfSync("sha256", "secret", salt, info, 64);
});
}
}
// Benchmark async HKDF (using promises for cleaner benchmark)
// Note: async benchmarks in Mitata require returning a Promise
for (const hash of hashes) {
bench(`hkdf ${hash} async`, async () => {
return new Promise((resolve, reject) => {
crypto.hkdf(hash, "secret", "salt", "info", 64, (err, derivedKey) => {
if (err) reject(err);
else resolve(derivedKey);
});
});
});
}
await run();

43
bench/crypto/primes.mjs Normal file
View File

@@ -0,0 +1,43 @@
import { checkPrime, checkPrimeSync, generatePrime, generatePrimeSync } from "node:crypto";
import { bench, run } from "../runner.mjs";
const prime512 = generatePrimeSync(512);
const prime2048 = generatePrimeSync(2048);
bench("checkPrimeSync 512", () => {
return checkPrimeSync(prime512);
});
bench("checkPrimeSync 2048", () => {
return checkPrimeSync(prime2048);
});
bench("checkPrime 512", async () => {
const promises = Array.from({ length: 10 }, () => new Promise(resolve => checkPrime(prime512, resolve)));
await Promise.all(promises);
});
bench("checkPrime 2048", async () => {
const promises = Array.from({ length: 10 }, () => new Promise(resolve => checkPrime(prime2048, resolve)));
await Promise.all(promises);
});
bench("generatePrimeSync 512", () => {
return generatePrimeSync(512);
});
bench("generatePrimeSync 2048", () => {
return generatePrimeSync(2048);
});
bench("generatePrime 512", async () => {
const promises = Array.from({ length: 10 }, () => new Promise(resolve => generatePrime(512, resolve)));
await Promise.all(promises);
});
bench("generatePrime 2048", async () => {
const promises = Array.from({ length: 10 }, () => new Promise(resolve => generatePrime(2048, resolve)));
await Promise.all(promises);
});
await run();

50
bench/crypto/random.mjs Normal file
View File

@@ -0,0 +1,50 @@
import crypto from "crypto";
import { bench, run } from "../runner.mjs";
bench("randomInt - sync", () => {
crypto.randomInt(1000);
});
bench("randomInt - async", async () => {
const { promise, resolve } = Promise.withResolvers();
crypto.randomInt(1000, () => {
resolve();
});
await promise;
});
bench("randonBytes - 32", () => {
crypto.randomBytes(32);
});
bench("randomBytes - 256", () => {
crypto.randomBytes(256);
});
const buf = Buffer.alloc(256);
bench("randomFill - 32", async () => {
const { promise, resolve } = Promise.withResolvers();
crypto.randomFill(buf, 0, 32, () => {
resolve();
});
await promise;
});
bench("randomFill - 256", async () => {
const { promise, resolve } = Promise.withResolvers();
crypto.randomFill(buf, 0, 256, () => {
resolve();
});
await promise;
});
bench("randomFillSync - 32", () => {
crypto.randomFillSync(buf, 0, 32);
});
bench("randomFillSync - 256", () => {
crypto.randomFillSync(buf, 0, 256);
});
await run();

View File

@@ -12,6 +12,7 @@
"eventemitter3": "^5.0.0",
"execa": "^8.0.1",
"fast-glob": "3.3.1",
"fastify": "^5.0.0",
"fdir": "^6.1.0",
"mitata": "^1.0.25",
"react": "^18.3.1",

View File

@@ -0,0 +1,13 @@
import express from "express";
const app = express();
const port = 3000;
var i = 0;
app.get("/", (req, res) => {
res.send("Hello World!" + i++);
});
app.listen(port, () => {
console.log(`Express app listening at http://localhost:${port}`);
});

View File

@@ -0,0 +1,20 @@
import Fastify from "fastify";
const fastify = Fastify({
logger: false,
});
fastify.get("/", async (request, reply) => {
return { hello: "world" };
});
const start = async () => {
try {
await fastify.listen({ port: 3000 });
} catch (err) {
fastify.log.error(err);
process.exit(1);
}
};
start();

View File

@@ -19,17 +19,17 @@ const OperatingSystem = @import("src/env.zig").OperatingSystem;
const pathRel = fs.path.relative;
/// Do not rename this constant. It is scanned by some scripts to determine which zig version to install.
const recommended_zig_version = "0.14.0-dev.2987+183bb8b08";
const recommended_zig_version = "0.14.0";
comptime {
if (!std.mem.eql(u8, builtin.zig_version_string, recommended_zig_version)) {
@compileError(
"" ++
"Bun requires Zig version " ++ recommended_zig_version ++ " (found " ++
builtin.zig_version_string ++ "). This is " ++
"automatically configured via Bun's CMake setup. You likely meant to run " ++
"`bun setup`. If you are trying to upgrade the Zig compiler, " ++
"run `./scripts/download-zig.sh master` or comment this message out.",
"Bun requires Zig version " ++ recommended_zig_version ++ ", but you have " ++
builtin.zig_version_string ++ ". This is automatically configured via Bun's " ++
"CMake setup. You likely meant to run `bun run build`. If you are trying to " ++
"upgrade the Zig compiler, edit ZIG_COMMIT in cmake/tools/SetupZig.cmake or " ++
"comment this error out.",
);
}
}
@@ -319,7 +319,21 @@ pub fn build(b: *Build) !void {
.{ .os = .linux, .arch = .aarch64 },
.{ .os = .linux, .arch = .x86_64, .musl = true },
.{ .os = .linux, .arch = .aarch64, .musl = true },
});
}, &.{ .Debug, .ReleaseFast });
}
// zig build check-all-debug
{
const step = b.step("check-all-debug", "Check for semantic analysis errors on all supported platforms in debug mode");
addMultiCheck(b, step, build_options, &.{
.{ .os = .windows, .arch = .x86_64 },
.{ .os = .mac, .arch = .x86_64 },
.{ .os = .mac, .arch = .aarch64 },
.{ .os = .linux, .arch = .x86_64 },
.{ .os = .linux, .arch = .aarch64 },
.{ .os = .linux, .arch = .x86_64, .musl = true },
.{ .os = .linux, .arch = .aarch64, .musl = true },
}, &.{.Debug});
}
// zig build check-windows
@@ -327,21 +341,21 @@ pub fn build(b: *Build) !void {
const step = b.step("check-windows", "Check for semantic analysis errors on Windows");
addMultiCheck(b, step, build_options, &.{
.{ .os = .windows, .arch = .x86_64 },
});
}, &.{ .Debug, .ReleaseFast });
}
{
const step = b.step("check-macos", "Check for semantic analysis errors on Windows");
addMultiCheck(b, step, build_options, &.{
.{ .os = .mac, .arch = .x86_64 },
.{ .os = .mac, .arch = .aarch64 },
});
}, &.{ .Debug, .ReleaseFast });
}
{
const step = b.step("check-linux", "Check for semantic analysis errors on Windows");
addMultiCheck(b, step, build_options, &.{
.{ .os = .linux, .arch = .x86_64 },
.{ .os = .linux, .arch = .aarch64 },
});
}, &.{ .Debug, .ReleaseFast });
}
// zig build translate-c-headers
@@ -369,9 +383,10 @@ pub fn addMultiCheck(
parent_step: *Step,
root_build_options: BunBuildOptions,
to_check: []const struct { os: OperatingSystem, arch: Arch, musl: bool = false },
optimize: []const std.builtin.OptimizeMode,
) void {
for (to_check) |check| {
for ([_]std.builtin.Mode{ .Debug, .ReleaseFast }) |mode| {
for (optimize) |mode| {
const check_target = b.resolveTargetQuery(.{
.os_tag = OperatingSystem.stdOSTag(check.os),
.cpu_arch = check.arch,

View File

@@ -2,3 +2,7 @@
# https://github.com/oven-sh/bun/issues/16289
[test]
preload = ["./test/js/node/harness.ts", "./test/preload.ts"]
[install]
# Node.js never auto-installs modules.
auto = "disable"

View File

@@ -419,7 +419,15 @@ function(register_command)
list(APPEND CMD_EFFECTIVE_OUTPUTS ${artifact})
if(BUILDKITE)
file(RELATIVE_PATH filename ${BUILD_PATH} ${artifact})
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} buildkite-agent artifact upload ${filename})
if(filename STREQUAL "libbun-profile.a")
# libbun-profile.a is now over 5gb in size, compress it first
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} rm -r ${BUILD_PATH}/codegen)
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} rm -r ${CACHE_PATH})
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} gzip -6 libbun-profile.a)
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} buildkite-agent artifact upload libbun-profile.a.gz)
else()
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} buildkite-agent artifact upload ${filename})
endif()
endif()
endforeach()

View File

@@ -738,7 +738,7 @@ endif()
# --- C/C++ Properties ---
set_target_properties(${bun} PROPERTIES
CXX_STANDARD 20
CXX_STANDARD 23
CXX_STANDARD_REQUIRED YES
CXX_EXTENSIONS YES
CXX_VISIBILITY_PRESET hidden
@@ -747,6 +747,18 @@ set_target_properties(${bun} PROPERTIES
VISIBILITY_INLINES_HIDDEN YES
)
if (NOT WIN32)
# Enable precompiled headers
# Only enable in these scenarios:
# 1. NOT in CI, OR
# 2. In CI AND BUN_CPP_ONLY is enabled
if(NOT CI OR (CI AND BUN_CPP_ONLY))
target_precompile_headers(${bun} PRIVATE
"$<$<COMPILE_LANGUAGE:CXX>:${CWD}/src/bun.js/bindings/root.h>"
)
endif()
endif()
# --- C/C++ Includes ---
if(WIN32)
@@ -901,6 +913,10 @@ if(NOT WIN32)
-Werror
)
endif()
else()
target_compile_options(${bun} PUBLIC
-Wno-nullability-completeness
)
endif()
# --- Linker options ---
@@ -943,28 +959,17 @@ endif()
if(LINUX)
if(NOT ABI STREQUAL "musl")
# on arm64
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm|ARM|arm64|ARM64|aarch64|AARCH64")
target_link_options(${bun} PUBLIC
-Wl,--wrap=exp
-Wl,--wrap=expf
-Wl,--wrap=fcntl64
-Wl,--wrap=log
-Wl,--wrap=log2
-Wl,--wrap=log2f
-Wl,--wrap=logf
-Wl,--wrap=pow
-Wl,--wrap=powf
)
else()
target_link_options(${bun} PUBLIC
-Wl,--wrap=exp
-Wl,--wrap=expf
-Wl,--wrap=log2f
-Wl,--wrap=logf
-Wl,--wrap=powf
)
endif()
target_link_options(${bun} PUBLIC
-Wl,--wrap=exp
-Wl,--wrap=expf
-Wl,--wrap=fcntl64
-Wl,--wrap=log
-Wl,--wrap=log2
-Wl,--wrap=log2f
-Wl,--wrap=logf
-Wl,--wrap=pow
-Wl,--wrap=powf
)
endif()
if(NOT ABI STREQUAL "musl")

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
cloudflare/lol-html
COMMIT
4f8becea13a0021c8b71abd2dcc5899384973b66
67f1d4ffd6b74db7e053fb129dcce620193c180d
)
set(LOLHTML_CWD ${VENDOR_PATH}/lolhtml/c-api)

View File

@@ -120,6 +120,9 @@ foreach(i RANGE ${BUILDKITE_JOBS_MAX_INDEX})
endif()
if(BUILDKITE)
if(BUILDKITE_ARTIFACT_PATH STREQUAL "libbun-profile.a")
set(BUILDKITE_ARTIFACT_PATH libbun-profile.a.gz)
endif()
set(BUILDKITE_DOWNLOAD_COMMAND buildkite-agent artifact download ${BUILDKITE_ARTIFACT_PATH} . --build ${BUILDKITE_BUILD_UUID} --step ${BUILDKITE_JOB_ID})
else()
set(BUILDKITE_DOWNLOAD_COMMAND curl -L -o ${BUILDKITE_ARTIFACT_PATH} ${BUILDKITE_ARTIFACTS_URL}/${BUILDKITE_ARTIFACT_ID})
@@ -135,6 +138,20 @@ foreach(i RANGE ${BUILDKITE_JOBS_MAX_INDEX})
OUTPUT
${BUILD_PATH}/${BUILDKITE_ARTIFACT_PATH}
)
if(BUILDKITE_ARTIFACT_PATH STREQUAL "libbun-profile.a.gz")
add_custom_command(
COMMENT
"Unpacking libbun-profile.a.gz"
VERBATIM COMMAND
gunzip libbun-profile.a.gz
WORKING_DIRECTORY
${BUILD_PATH}
OUTPUT
${BUILD_PATH}/libbun-profile.a
DEPENDS
${BUILD_PATH}/libbun-profile.a.gz
)
endif()
endforeach()
list(APPEND BUILDKITE_JOBS_MATCH ${BUILDKITE_JOB_NAME})

View File

@@ -36,7 +36,8 @@ endif()
string(REPLACE "\n" ";" GIT_CHANGED_SOURCES "${GIT_DIFF}")
if(CI)
setx(GIT_CHANGED_SOURCES ${GIT_CHANGED_SOURCES})
set(GIT_CHANGED_SOURCES "${GIT_CHANGED_SOURCES}")
message(STATUS "Set GIT_CHANGED_SOURCES: ${GIT_CHANGED_SOURCES}")
endif()
list(TRANSFORM GIT_CHANGED_SOURCES PREPEND ${CWD}/)

View File

@@ -12,7 +12,7 @@ if(NOT ENABLE_LLVM)
return()
endif()
set(DEFAULT_LLVM_VERSION "18.1.8")
set(DEFAULT_LLVM_VERSION "19.1.7")
optionx(LLVM_VERSION STRING "The version of LLVM to use" DEFAULT ${DEFAULT_LLVM_VERSION})

View File

@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
if(NOT WEBKIT_VERSION)
set(WEBKIT_VERSION 2be7f2f098210ce560f72ef95d93b008bc8eaaa1)
set(WEBKIT_VERSION 91bf2baced1b1309c7e05f19177c97fefec20976)
endif()
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)

View File

@@ -20,7 +20,7 @@ else()
unsupported(CMAKE_SYSTEM_NAME)
endif()
set(ZIG_COMMIT "bb9d6ab2c0bbbf20cc24dad03e88f3b3ffdb7de7")
set(ZIG_COMMIT "cd1995944508e4c946deb75bd70947d302e0db37")
optionx(ZIG_TARGET STRING "The zig target to use" DEFAULT ${DEFAULT_ZIG_TARGET})
if(CMAKE_BUILD_TYPE STREQUAL "Release")

View File

@@ -12,5 +12,3 @@ Alternatively, use `process.dlopen`:
let mod = { exports: {} };
process.dlopen(mod, "./my-node-module.node");
```
Bun polyfills the [`detect-libc`](https://npmjs.com/package/detect-libc) package, which is used by many Node-API modules to detect which `.node` binding to `require`.

View File

@@ -715,7 +715,7 @@ await S3Client.delete("my-file.txt", credentials);
await S3Client.unlink("my-file.txt", credentials);
```
## s3:// protocol
## `s3://` protocol
To make it easier to use the same code for local files and S3 files, the `s3://` protocol is supported in `fetch` and `Bun.file()`.

View File

@@ -77,6 +77,16 @@ console.log(text); // "const input = "hello world".repeat(400); ..."
---
- `ReadableStream`
- Use a readable stream as input.
---
- `Blob`
- Use a blob as input.
---
- `number`
- Read from the file with a given file descriptor.
@@ -129,13 +139,13 @@ Configure the output stream by passing one of the following values to `stdout/st
---
- `Bun.file()`
- Write to the specified file.
- `"ignore"`
- Discard the output.
---
- `null`
- Write to `/dev/null`.
- `Bun.file()`
- Write to the specified file.
---
@@ -174,7 +184,8 @@ const proc = Bun.spawn(["bun", "--version"]);
proc.kill();
proc.killed; // true
proc.kill(); // specify an exit code
proc.kill(15); // specify a signal code
proc.kill("SIGTERM"); // specify a signal name
```
The parent `bun` process will not terminate until all child processes have exited. Use `proc.unref()` to detach the child process from the parent.
@@ -184,6 +195,64 @@ const proc = Bun.spawn(["bun", "--version"]);
proc.unref();
```
## Resource usage
You can get information about the process's resource usage after it has exited:
```ts
const proc = Bun.spawn(["bun", "--version"]);
await proc.exited;
const usage = proc.resourceUsage();
console.log(`Max memory used: ${usage.maxRSS} bytes`);
console.log(`CPU time (user): ${usage.cpuTime.user} µs`);
console.log(`CPU time (system): ${usage.cpuTime.system} µs`);
```
## Using AbortSignal
You can abort a subprocess using an `AbortSignal`:
```ts
const controller = new AbortController();
const { signal } = controller;
const proc = Bun.spawn({
cmd: ["sleep", "100"],
signal,
});
// Later, to abort the process:
controller.abort();
```
## Using timeout and killSignal
You can set a timeout for a subprocess to automatically terminate after a specific duration:
```ts
// Kill the process after 5 seconds
const proc = Bun.spawn({
cmd: ["sleep", "10"],
timeout: 5000, // 5 seconds in milliseconds
});
await proc.exited; // Will resolve after 5 seconds
```
By default, timed-out processes are killed with the `SIGTERM` signal. You can specify a different signal with the `killSignal` option:
```ts
// Kill the process with SIGKILL after 5 seconds
const proc = Bun.spawn({
cmd: ["sleep", "10"],
timeout: 5000,
killSignal: "SIGKILL", // Can be string name or signal number
});
```
The `killSignal` option also controls which signal is sent when an AbortSignal is aborted.
## Inter-process communication (IPC)
Bun supports direct inter-process communication channel between two `bun` processes. To receive messages from a spawned Bun subprocess, specify an `ipc` handler.
@@ -233,11 +302,17 @@ process.send("Hello from child as string");
process.send({ message: "Hello from child as object" });
```
The `ipcMode` option controls the underlying communication format between the two processes:
The `serialization` option controls the underlying communication format between the two processes:
- `advanced`: (default) Messages are serialized using the JSC `serialize` API, which supports cloning [everything `structuredClone` supports](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm). This does not support transferring ownership of objects.
- `json`: Messages are serialized using `JSON.stringify` and `JSON.parse`, which does not support as many object types as `advanced` does.
To disconnect the IPC channel from the parent process, call:
```ts
childProc.disconnect();
```
### IPC between Bun & Node.js
To use IPC between a `bun` process and a Node.js process, set `serialization: "json"` in `Bun.spawn`. This is because Node.js and Bun use different JavaScript engines with different object serialization formats.
@@ -310,7 +385,7 @@ spawnSync echo hi 1.47 ms/iter (1.14 ms … 2.64 ms) 1.57 ms 2.37 ms
## Reference
A simple reference of the Spawn API and types are shown below. The real types have complex generics to strongly type the `Subprocess` streams with the options passed to `Bun.spawn` and `Bun.spawnSync`. For full details, find these types as defined [bun.d.ts](https://github.com/oven-sh/bun/blob/main/packages/bun-types/bun.d.ts).
A reference of the Spawn API and types are shown below. The real types have complex generics to strongly type the `Subprocess` streams with the options passed to `Bun.spawn` and `Bun.spawnSync`. For full details, find these types as defined [bun.d.ts](https://github.com/oven-sh/bun/blob/main/packages/bun-types/bun.d.ts).
```ts
interface Bun {
@@ -329,16 +404,25 @@ interface Bun {
namespace SpawnOptions {
interface OptionsObject {
cwd?: string;
env?: Record<string, string>;
stdin?: SpawnOptions.Readable;
stdout?: SpawnOptions.Writable;
stderr?: SpawnOptions.Writable;
onExit?: (
proc: Subprocess,
env?: Record<string, string | undefined>;
stdio?: [Writable, Readable, Readable];
stdin?: Writable;
stdout?: Readable;
stderr?: Readable;
onExit?(
subprocess: Subprocess,
exitCode: number | null,
signalCode: string | null,
error: Error | null,
) => void;
signalCode: number | null,
error?: ErrorLike,
): void | Promise<void>;
ipc?(message: any, subprocess: Subprocess): void;
serialization?: "json" | "advanced";
windowsHide?: boolean;
windowsVerbatimArguments?: boolean;
argv0?: string;
signal?: AbortSignal;
timeout?: number;
killSignal?: string | number;
}
type Readable =
@@ -366,39 +450,62 @@ namespace SpawnOptions {
| Request;
}
interface Subprocess<Stdin, Stdout, Stderr> {
interface Subprocess extends AsyncDisposable {
readonly stdin: FileSink | number | undefined;
readonly stdout: ReadableStream<Uint8Array> | number | undefined;
readonly stderr: ReadableStream<Uint8Array> | number | undefined;
readonly readable: ReadableStream<Uint8Array> | number | undefined;
readonly pid: number;
// the exact stream types here are derived from the generic parameters
readonly stdin: number | ReadableStream | FileSink | undefined;
readonly stdout: number | ReadableStream | undefined;
readonly stderr: number | ReadableStream | undefined;
readonly exited: Promise<number>;
readonly exitCode: number | undefined;
readonly signalCode: Signal | null;
readonly exitCode: number | null;
readonly signalCode: NodeJS.Signals | null;
readonly killed: boolean;
kill(exitCode?: number | NodeJS.Signals): void;
ref(): void;
unref(): void;
kill(code?: number): void;
send(message: any): void;
disconnect(): void;
resourceUsage(): ResourceUsage | undefined;
}
interface SyncSubprocess<Stdout, Stderr> {
readonly pid: number;
readonly success: boolean;
// the exact buffer types here are derived from the generic parameters
readonly stdout: Buffer | undefined;
readonly stderr: Buffer | undefined;
interface SyncSubprocess {
stdout: Buffer | undefined;
stderr: Buffer | undefined;
exitCode: number;
success: boolean;
resourceUsage: ResourceUsage;
signalCode?: string;
exitedDueToTimeout?: true;
pid: number;
}
type ReadableSubprocess = Subprocess<any, "pipe", "pipe">;
type WritableSubprocess = Subprocess<"pipe", any, any>;
type PipedSubprocess = Subprocess<"pipe", "pipe", "pipe">;
type NullSubprocess = Subprocess<null, null, null>;
interface ResourceUsage {
contextSwitches: {
voluntary: number;
involuntary: number;
};
type ReadableSyncSubprocess = SyncSubprocess<"pipe", "pipe">;
type NullSyncSubprocess = SyncSubprocess<null, null>;
cpuTime: {
user: number;
system: number;
total: number;
};
maxRSS: number;
messages: {
sent: number;
received: number;
};
ops: {
in: number;
out: number;
};
shmSize: number;
signalCount: number;
swapCount: number;
}
type Signal =
| "SIGABRT"

View File

@@ -11,7 +11,7 @@ Bun.listen({
socket: {
data(socket, data) {}, // message received from client
open(socket) {}, // socket opened
close(socket) {}, // socket closed
close(socket, error) {}, // socket closed
drain(socket) {}, // socket ready for more data
error(socket, error) {}, // error handler
},
@@ -30,7 +30,7 @@ Bun.listen({
open(socket) {},
data(socket, data) {},
drain(socket) {},
close(socket) {},
close(socket, error) {},
error(socket, error) {},
},
});
@@ -122,7 +122,7 @@ const socket = await Bun.connect({
socket: {
data(socket, data) {},
open(socket) {},
close(socket) {},
close(socket, error) {},
drain(socket) {},
error(socket, error) {},

View File

@@ -1,15 +1,13 @@
# CSS
Bun's bundler has built-in support for CSS with the following features:
- Transpiling modern/feature features to work on all browsers (including vendor prefixing)
- Transpiling modern/future features to work on all browsers (including vendor prefixing)
- Minification
- CSS Modules
- Tailwind (via a native bundler plugin)
## Transpiling
Bun's CSS bundler lets you use future/modern CSS features without having to worry about browser compatibility — all thanks to its transpiling and vendor prefixing features which are enabled by default.
Bun's CSS bundler lets you use modern/future CSS features without having to worry about browser compatibility — all thanks to its transpiling and vendor prefixing features which are enabled by default.
Bun's CSS parser and bundler is a direct Rust → Zig port of [LightningCSS](https://lightningcss.dev/), with a bundling approach inspired by esbuild. The transpiler converts modern CSS syntax into backwards-compatible equivalents that work across browsers.

234
docs/bundler/hmr.md Normal file
View File

@@ -0,0 +1,234 @@
Hot Module Replacement (HMR) allows you to update modules in a running
application without needing a full page reload. This preserves the application
state and improves the development experience.
HMR is enabled by default when using Bun's full-stack development server.
## `import.meta.hot` API Reference
Bun implements a client-side HMR API modeled after [Vite's `import.meta.hot` API](https://vitejs.dev/guide/api-hmr.html). It can be checked for with `if (import.meta.hot)`, tree-shaking it in production
```ts
if (import.meta.hot) {
// HMR APIs are available.
}
```
However, **this check is often not needed** as Bun will dead-code-eliminate
calls to all of the HMR APIs in production builds.
```ts
// This entire function call will be removed in production!
import.meta.hot.dispose(() => {
console.log("dispose");
});
```
For this to work, Bun forces these APIs to be called without indirection. That means the following do not work:
```ts#invalid-hmr-usage.ts
// INVALID: Assigning `hot` to a variable
const hot = import.meta.hot;
hot.accept();
// INVALID: Assigning `import.meta` to a variable
const meta = import.meta;
meta.hot.accept();
console.log(meta.hot.data);
// INVALID: Passing to a function
doSomething(import.meta.hot.dispose);
// OK: The full phrase "import.meta.hot.<API>" must be called directly:
import.meta.hot.accept();
// OK: `data` can be passed to functions:
doSomething(import.meta.hot.data);
```
{% callout %}
**Note** — The HMR API is still a work in progress. Some features are missing. HMR can be disabled in `Bun.serve` by setting the `development` option to `{ hmr: false }`.
{% endcallout %}
| | Method | Notes |
| --- | ------------------ | --------------------------------------------------------------------- |
| ✅ | `hot.accept()` | Indicate that a hot update can be replaced gracefully. |
| ✅ | `hot.data` | Persist data between module evaluations. |
| ✅ | `hot.dispose()` | Add a callback function to run when a module is about to be replaced. |
| ❌ | `hot.invalidate()` | |
| ✅ | `hot.on()` | Attach an event listener |
| ✅ | `hot.off()` | Remove an event listener from `on`. |
| ❌ | `hot.send()` | |
| 🚧 | `hot.prune()` | **NOTE**: Callback is currently never called. |
| ✅ | `hot.decline()` | No-op to match Vite's `import.meta.hot` |
### `import.meta.hot.accept()`
The `accept()` method indicates that a module can be hot-replaced. When called
without arguments, it indicates that this module can be replaced simply by
re-evaluating the file. After a hot update, importers of this module will be
automatically patched.
```ts#index.ts
import { getCount } from "./foo.ts";
console.log("count is ", getCount());
import.meta.hot.accept();
export function getNegativeCount() {
return -getCount();
}
```
This creates a hot-reloading boundary for all of the files that `index.ts`
imports. That means whenever `foo.ts` or any of its dependencies are saved, the
update will bubble up to `index.ts` will re-evaluate. Files that import
`index.ts` will then be patched to import the new version of
`getNegativeCount()`. If only `index.ts` is updated, only the one file will be
re-evaluated, and the counter in `foo.ts` is reused.
This may be used in combination with `import.meta.hot.data` to transfer state
from the previous module to the new one.
When no modules call `import.meta.hot.accept()` (and there isn't React Fast
Refresh or a plugin calling it for you), the page will reload when the file
updates, and a console warning shows which files were invalidated. This warning
is safe to ignore if it makes more sense to rely on full page reloads.
#### With callback
When provided one callback, `import.meta.hot.accept` will function how it does
in Vite. Instead of patching the importers of this module, it will call the
callback with the new module.
```ts
export const count = 0;
import.meta.hot.accept(newModule => {
if (newModule) {
// newModule is undefined when SyntaxError happened
console.log("updated: count is now ", newModule.count);
}
});
```
Prefer using `import.meta.hot.accept()` without an argument as it usually makes your code easier to understand.
#### Accepting other modules
```ts
import { count } from "./foo";
import.meta.hot.accept("./foo", () => {
if (!newModule) return;
console.log("updated: count is now ", count);
});
```
Indicates that a dependency's module can be accepted. When the dependency is updated, the callback will be called with the new module.
#### With multiple dependencies
```ts
import.meta.hot.accept(["./foo", "./bar"], newModules => {
// newModules is an array where each item corresponds to the updated module
// or undefined if that module had a syntax error
});
```
Indicates that multiple dependencies' modules can be accepted. This variant accepts an array of dependencies, where the callback will receive the updated modules, and `undefined` for any that had errors.
### `import.meta.hot.data`
`import.meta.hot.data` maintains state between module instances during hot
replacement, enabling data transfer from previous to new versions. When
`import.meta.hot.data` is written into, Bun will also mark this module as
capable of self-accepting (equivalent of calling `import.meta.hot.accept()`).
```ts
import { createRoot } from "react-dom/client";
import { App } from "./app";
const root = import.meta.hot.data.root ??= createRoot(elem);
root.render(<App />); // re-use an existing root
```
In production, `data` is inlined to be `{}`, meaning it cannot be used as a state holder.
The above pattern is recommended for stateful modules because Bun knows it can minify `{}.prop ??= value` into `value` in production.
### `import.meta.hot.dispose()`
Attaches an on-dispose callback. This is called:
- Just before the module is replaced with another copy (before the next is loaded)
- After the module is detached (removing all imports to this module, see `import.meta.hot.prune()`)
```ts
const sideEffect = setupSideEffect();
import.meta.hot.dispose(() => {
sideEffect.cleanup();
});
```
This callback is not called on route navigation or when the browser tab closes.
Returning a promise will delay module replacement until the module is disposed.
All dispose callbacks are called in parallel.
### `import.meta.hot.prune()`
Attaches an on-prune callback. This is called when all imports to this module
are removed, but the module was previously loaded.
This can be used to clean up resources that were created when the module was
loaded. Unlike `import.meta.hot.dispose()`, this pairs much better with `accept`
and `data` to manage stateful resources. A full example managing a `WebSocket`:
```ts
import { something } from "./something";
// Initialize or re-use a WebSocket connection
export const ws = (import.meta.hot.data.ws ??= new WebSocket(location.origin));
// If the module's import is removed, clean up the WebSocket connection.
import.meta.hot.prune(() => {
ws.close();
});
```
If `dispose` was used instead, the WebSocket would close and re-open on every
hot update. Both versions of the code will prevent page reloads when imported
files are updated.
### `import.meta.hot.on()` and `off()`
`on()` and `off()` are used to listen for events from the HMR runtime. Event names are prefixed with a prefix so that plugins do not conflict with each other.
```ts
import.meta.hot.on("bun:beforeUpdate", () => {
console.log("before a hot update");
});
```
When a file is replaced, all of its event listeners are automatically removed.
A list of all built-in events:
| Event | Emitted when |
| ---------------------- | ----------------------------------------------------------------------------------------------- |
| `bun:beforeUpdate` | before a hot update is applied. |
| `bun:afterUpdate` | after a hot update is applied. |
| `bun:beforeFullReload` | before a full page reload happens. |
| `bun:beforePrune` | before prune callbacks are called. |
| `bun:invalidate` | when a module is invalidated with `import.meta.hot.invalidate()` |
| `bun:error` | when a build or runtime error occurs |
| `bun:ws:disconnect` | when the HMR WebSocket connection is lost. This can indicate the development server is offline. |
| `bun:ws:connect` | when the HMR WebSocket connects or re-connects. |
For compatibility with Vite, the above events are also available via `vite:*` prefix instead of `bun:*`.

View File

@@ -82,6 +82,11 @@ The `--dry-run` flag can be used to simulate the publish process without actuall
$ bun publish --dry-run
```
### `--gzip-level`
Specify the level of gzip compression to use when packing the package. Only applies to `bun publish` without a tarball path argument. Values range from `0` to `9` (default is `9`).
{% bunCLIUsage command="publish" /%}
### `--auth-type`
If you have 2FA enabled for your npm account, `bun publish` will prompt you for a one-time password. This can be done through a browser or the CLI. The `--auth-type` flag can be used to tell the npm registry which method you prefer. The possible values are `web` and `legacy`, with `web` being the default.
@@ -102,7 +107,6 @@ Provide a one-time password directly to the CLI. If the password is valid, this
$ bun publish --otp 123456
```
### `--gzip-level`
Specify the level of gzip compression to use when packing the package. Only applies to `bun publish` without a tarball path argument. Values range from `0` to `9` (default is `9`).
{% bunCLIUsage command="publish" /%}
{% callout %}
**Note** - `bun publish` respects the `NPM_CONFIG_TOKEN` environment variable which can be used when publishing in github actions or automated workflows.
{% /callout %}

View File

@@ -215,15 +215,19 @@ export default {
page("bundler", "`Bun.build`", {
description: "Bundle code for consumption in the browser with Bun's native bundler.",
}),
page("bundler/html", "Bundle frontend & static sites", {
page("bundler/html", "HTML & static sites", {
description: `Zero-config HTML bundler for single-page apps and multi-page apps. Automatic bundling, TailwindCSS plugins, TypeScript, JSX, React support, and incredibly fast builds`,
}),
page("bundler/css", "Bundle, transpile, and minify CSS", {
page("bundler/css", "CSS", {
description: `Production ready CSS bundler with support for modern CSS features, CSS modules, and more.`,
}),
page("bundler/fullstack", "Fullstack Dev Server", {
description: "Serve your frontend and backend from the same app with Bun's dev server.",
}),
page("bundler/hmr", "Hot reloading", {
description: `Update modules in a running application without reloading the page using import.meta.hot`,
}),
page("bundler/loaders", "Loaders", {
description: "Bun's built-in loaders for the bundler and runtime",
}),

View File

@@ -60,7 +60,7 @@ Visual Studio can be installed graphically using the wizard or through WinGet:
After Visual Studio, you need the following:
- LLVM 18.1.8
- LLVM 19.1.7
- Go
- Rust
- NASM
@@ -81,7 +81,7 @@ After Visual Studio, you need the following:
> irm https://get.scoop.sh | iex
> scoop install nodejs-lts go rust nasm ruby perl ccache
# scoop seems to be buggy if you install llvm and the rest at the same time
> scoop install llvm@18.1.8
> scoop install llvm@19.1.7
```
{% /codetabs %}

View File

@@ -174,7 +174,7 @@ Some methods are not optimized yet.
### [`node:test`](https://nodejs.org/api/test.html)
🔴 Not implemented. Use [`bun:test`](https://bun.sh/docs/cli/test) instead.
🟡 Partly implemented. Missing mocks, snapshots, timers. Use [`bun:test`](https://bun.sh/docs/cli/test) instead.
### [`node:trace_events`](https://nodejs.org/api/tracing.html)
@@ -346,7 +346,7 @@ The table below lists all globals implemented by Node.js and Bun's current compa
### [`process`](https://nodejs.org/api/process.html)
🟡 Mostly implemented. `process.binding` (internal Node.js bindings some packages rely on) is partially implemented. `process.title` is a currently a no-op on macOS & Linux. `getActiveResourcesInfo` `setActiveResourcesInfo`, `getActiveResources` and `setSourceMapsEnabled` are stubs. Newer APIs like `process.loadEnvFile` and `process.getBuiltinModule` are not implemented yet.
🟡 Mostly implemented. `process.binding` (internal Node.js bindings some packages rely on) is partially implemented. `process.title` is currently a no-op on macOS & Linux. `getActiveResourcesInfo` `setActiveResourcesInfo`, `getActiveResources` and `setSourceMapsEnabled` are stubs. Newer APIs like `process.loadEnvFile` and `process.getBuiltinModule` are not implemented yet.
### [`queueMicrotask()`](https://developer.mozilla.org/en-US/docs/Web/API/queueMicrotask)

View File

@@ -329,7 +329,7 @@ await Bun.build({
{% callout %}
**NOTE**: Plugin lifcycle callbacks (`onStart()`, `onResolve()`, etc.) do not have the ability to modify the `build.config` object in the `setup()` function. If you want to mutate `build.config`, you must do so directly in the `setup()` function:
**NOTE**: Plugin lifecycle callbacks (`onStart()`, `onResolve()`, etc.) do not have the ability to modify the `build.config` object in the `setup()` function. If you want to mutate `build.config`, you must do so directly in the `setup()` function:
```ts
await Bun.build({
@@ -400,7 +400,7 @@ type Loader = "js" | "jsx" | "ts" | "tsx" | "css" | "json" | "toml" | "object";
### Namespaces
`onLoad` and `onResolve` accept an optional `namespace` string. What is a namespaace?
`onLoad` and `onResolve` accept an optional `namespace` string. What is a namespace?
Every module has a namespace. Namespaces are used to prefix the import in transpiled code; for instance, a loader with a `filter: /\.yaml$/` and `namespace: "yaml:"` will transform an import from `./myfile.yaml` into `yaml:./myfile.yaml`.

View File

@@ -1,13 +0,0 @@
# Tell LLDB what to do when the debugged process receives SIGPWR: pass it through to the process
# (-p), but do not stop the process (-s) or notify the user (-n).
#
# JSC's garbage collector sends this signal (as configured by Bun WebKit in
# Thread::initializePlatformThreading() in ThreadingPOSIX.cpp) to the JS thread to suspend or resume
# it. So stopping the process would just create noise when debugging any long-running script.
process handle -p true -s false -n false SIGPWR
command script import misctools/lldb/lldb_pretty_printers.py
type category enable zig.lang
type category enable zig.std
command script import misctools/lldb/lldb_webkit.py

View File

@@ -329,7 +329,7 @@ def btjs(debugger, command, result, internal_dict):
addressFormat = '#0{width}x'.format(width=target.GetAddressByteSize() * 2 + 2)
process = target.GetProcess()
thread = process.GetSelectedThread()
jscModule = target.module["JavaScriptCore"]
jscModule = target.module["JavaScriptCore"] or target.module["bun"] or target.module["bun-debug"]
if jscModule.FindSymbol("JSC::CallFrame::describeFrame").GetSize() or jscModule.FindSymbol("_ZN3JSC9CallFrame13describeFrameEv").GetSize():
annotateJSFrames = True

View File

@@ -1,7 +1,7 @@
{
"private": true,
"name": "bun",
"version": "1.2.5",
"version": "1.2.6",
"workspaces": [
"./packages/bun-types"
],

View File

@@ -8,7 +8,7 @@ The official [Svelte](https://svelte.dev/) plugin for [Bun](https://bun.sh/).
## Installation
```sh
bun add -D bun-plugin-svelte
$ bun add -D bun-plugin-svelte
```
## Dev Server Usage
@@ -16,52 +16,25 @@ bun add -D bun-plugin-svelte
`bun-plugin-svelte` integrates with Bun's [Fullstack Dev Server](https://bun.sh/docs/bundler/fullstack), giving you
HMR when developing your Svelte app.
```html
<!-- index.html -->
<html>
<head>
<script type="module" src="./index.ts"></script>
</head>
<body>
<div id="root"></div>
</body>
</html>
Start by registering it in your [bunfig.toml](https://bun.sh/docs/runtime/bunfig):
```toml
[serve.static]
plugins = ["bun-plugin-svelte"]
```
```ts
// index.ts
Then start your dev server:
import { mount, unmount } from "svelte";
import App from "./App.svelte";
// mount the application entrypoint to the DOM
const root = document.getElementById("root")!;
const app = mount(App, { target: root });
```
$ bun index.html
```
```svelte
<!-- App.svelte -->
<script lang="ts">
// out-of-the-box typescript support
let name: string = "Bun";
</script>
<main class="app">
<h1>Cookin up apps with {name}</h1>
</main>
<style>
h1 {
color: #ff3e00;
text-align: center;
font-size: 2em;
}
</style>
```
See the [example](https://github.com/oven-sh/bun/tree/main/packages/bun-plugin-svelte/example) for a complete example.
## Bundler Usage
`bun-plugin-svelte` lets you bundle Svelte components with [`Bun.build`](https://bun.sh/docs/bundler).
```ts
// build.ts
// to use: bun run build.ts
@@ -70,7 +43,7 @@ import { SveltePlugin } from "bun-plugin-svelte"; // NOTE: not published to npm
Bun.build({
entrypoints: ["src/index.ts"],
outdir: "dist",
target: "browser", // use "bun" or "node" to use Svelte components server-side
target: "browser",
sourcemap: true, // sourcemaps not yet supported
plugins: [
SveltePlugin({
@@ -84,3 +57,13 @@ Bun.build({
`bun-plugin-svelte` does not yet support server-side imports (e.g. for SSR).
This will be added in the near future.
## Not Yet Supported
Support for these features will be added in the near future
- Server-side imports/rendering
- Source maps
- CSS extensions (e.g. tailwind) in `<style>` blocks
- TypeScript-specific features (e.g. enums and namespaces). If you're using
TypeScript 5.8, consider enabling [`--erasableSyntaxOnly`](https://devblogs.microsoft.com/typescript/announcing-typescript-5-8-beta/#the---erasablesyntaxonly-option)

View File

@@ -0,0 +1,311 @@
<script lang="ts">
import FeatureCard from "./FeatureCard.svelte";
const links = [
{ text: "Bun Documentation", url: "https://bun.sh/docs" },
{ text: "Svelte Documentation", url: "https://svelte.dev/docs" },
{ text: "GitHub", url: "https://github.com/oven-sh/bun/tree/main/packages/bun-plugin-svelte" },
];
</script>
<main>
<div class="hero">
<div class="logo-container">
<a href="https://bun.sh" class="bun-logo">
<img
src="https://github.com/user-attachments/assets/50282090-adfd-4ddb-9e27-c30753c6b161"
alt="Bun Logo"
height="42"
/>
</a>
</div>
<h1><span class="highlight">bun-plugin-svelte</span></h1>
<p class="tagline">The official Svelte plugin for <a href="https://bun.sh" target="_blank">Bun</a></p>
<div class="cta-buttons">
<a href="https://bun.sh/docs/bundler/html" class="button primary">🚀 Get Started</a>
<a href="https://github.com/oven-sh/bun/tree/main/packages/bun-plugin-svelte/example" class="button secondary"
>👀 View Examples</a
>
</div>
</div>
<section class="usage">
<h2>🏃‍➡️ Quick Start</h2>
<div class="flex-grid">
<div>
<h3>1. Install from <a href="https://npmjs.com/package/bun-plugin-svelte" target="_blank">NPM</a></h3>
<pre><code class="language-bash">bun add -D bun-plugin-svelte</code></pre>
</div>
<div>
<h3>2. Add it to your <a href="https://bun.sh/docs/runtime/bunfig" target="_blank">bunfig.toml</a></h3>
<pre><code class="language-toml">
[serve.static]
plugins = ["bun-plugin-svelte"];
</code></pre>
</div>
</div>
</section>
<section class="features">
<h2>✨ Features</h2>
<div class="feature-grid">
<FeatureCard title="🔥 HMR Support" link="https://bun.sh/docs/bundler/html">
Integrates with Bun's Fullstack Dev Server for hot module replacement
</FeatureCard>
<FeatureCard title="📦 Bundling" link="https://bun.sh/docs/bundler">
Bundle Svelte components with <a href="https://bun.sh/docs/bundler">Bun.build</a>
</FeatureCard>
</div>
<section class="resources">
<h2>📖 Resources</h2>
<ul class="resource-links">
{#each links as link}
<li><a href={link.url} target="_blank" rel="noopener noreferrer">{link.text}</a></li>
{/each}
</ul>
</section>
<footer>
<p>Made with ❤️ by the Bun team</p>
</footer>
</section>
</main>
<style>
:global(body) {
margin: 0;
padding: 0;
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Oxygen, Ubuntu, Cantarell, "Open Sans",
"Helvetica Neue", sans-serif;
background-color: #f9f9f9;
color: #333;
}
:global(a) {
color: #ff3e00;
text-decoration: none;
position: relative;
}
:global(a::after) {
content: "";
position: absolute;
width: 100%;
height: 1px;
bottom: 0;
left: 0;
background-color: #ff3e00;
transform: scaleX(0);
transform-origin: bottom right;
transition: transform 0.3s ease-out;
}
:global(a:hover::after) {
transform: scaleX(1);
transform-origin: bottom left;
}
:global(a:visited) {
color: #ff3e00;
}
:global(pre > code.hljs) {
padding: 0;
}
main {
max-width: 1200px;
margin: 0 auto;
padding: 2rem;
}
.hero {
text-align: center;
padding: 3rem 1rem;
margin-bottom: 2rem;
display: flex;
flex-direction: column;
}
.logo-container {
margin-bottom: 1.5rem;
margin: auto 25%;
}
.bun-logo {
display: block;
transition: transform 0.3s ease;
}
.bun-logo:hover {
transform: scale(1.05);
}
.bun-logo img {
max-width: 33vw;
height: auto;
}
h1 {
font-size: 2.5rem;
margin-bottom: 0.5rem;
}
.highlight {
color: #ff3e00;
font-weight: bold;
}
/* Don't apply the underline effect to buttons and resource links */
.button::after,
.resource-links li a::after,
.bun-logo::after {
display: none;
}
.tagline {
font-size: 1.2rem;
color: #666;
margin-bottom: 2rem;
}
.cta-buttons {
display: flex;
justify-content: center;
gap: 1rem;
margin-bottom: 2rem;
}
.button {
display: inline-block;
padding: 0.8rem 1.5rem;
border-radius: 4px;
text-decoration: none;
font-weight: 600;
transition: all 0.2s ease;
}
.button.primary {
background-color: #ff3e00;
color: white;
box-shadow: 0 2px 10px rgba(255, 62, 0, 0.2);
}
.button.primary:hover {
background-color: #e63600;
transform: translateY(-2px);
box-shadow: 0 4px 12px rgba(255, 62, 0, 0.3);
}
.button.secondary {
background-color: #f0f0f0;
color: #333;
box-shadow: 0 2px 10px rgba(0, 0, 0, 0.1);
}
.button.secondary:hover {
background-color: #e6e6e6;
transform: translateY(-2px);
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.15);
}
section {
margin-bottom: 3rem;
padding: 2rem;
background-color: white;
border-radius: 8px;
box-shadow: 0 2px 15px rgba(0, 0, 0, 0.05);
}
h2 {
font-size: 1.8rem;
margin-bottom: 1.5rem;
color: #333;
border-bottom: 2px solid #f0f0f0;
padding-bottom: 0.5rem;
}
.flex-grid {
display: flex;
gap: 1.5rem;
}
.flex-grid > div {
flex: 1;
}
.feature-grid {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(240px, 1fr));
gap: 1.5rem;
margin-bottom: 2rem;
}
pre {
background-color: #f5f5f5;
padding: 1rem;
border-radius: 4px;
overflow-x: auto;
font-family: "SFMono-Regular", Consolas, "Liberation Mono", Menlo, monospace;
font-size: 0.9rem;
line-height: 1.5;
}
code {
color: #333;
}
.resource-links {
list-style: none;
padding: 0;
display: flex;
flex-wrap: wrap;
gap: 1rem;
}
.resource-links li a {
display: inline-block;
padding: 0.5rem 1rem;
background-color: #f5f5f5;
color: #333;
text-decoration: none;
border-radius: 4px;
transition: all 0.2s ease;
}
.resource-links li a:hover {
background-color: #ff3e00;
color: white;
transform: translateY(-2px);
}
footer {
text-align: center;
padding: 2rem 0;
color: #666;
font-size: 0.9rem;
}
@media (max-width: 768px) {
.feature-grid {
grid-template-columns: 1fr;
}
.cta-buttons {
flex-direction: column;
align-items: center;
}
.button {
width: 100%;
max-width: 300px;
margin-bottom: 0.5rem;
text-align: center;
}
.resource-links {
flex-direction: column;
}
}
</style>

View File

@@ -0,0 +1,28 @@
<script lang="ts">
let { title, link, children } = $props();
</script>
<div class="feature-card">
<h3>
<a href={link}>
{title}
</a>
</h3>
<p>
{@render children()}
</p>
</div>
<style>
.feature-card {
padding: 1rem;
border-radius: 0.5rem;
background-color: #fff;
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
transition: transform 0.3s ease;
}
.feature-card h3 {
margin-bottom: 0.5rem;
}
</style>

View File

@@ -0,0 +1,2 @@
[serve.static]
plugins = ["bun-plugin-svelte"]

View File

@@ -0,0 +1,25 @@
<html>
<head>
<script type="module" src="./index.ts"></script>
<link rel="prefetch" href="https://bun.sh/docs/bundler/plugins" />
<link rel="preconnect" href="https://bun.sh" />
<link rel="preconnect" href="https://github.com" />
<link
rel="stylesheet"
href="https://cdn.jsdelivr.net/gh/highlightjs/cdn-release@11.9.0/build/styles/default.min.css"
/>
<script src="https://cdn.jsdelivr.net/gh/highlightjs/cdn-release@11.9.0/build/highlight.min.js"></script>
<script src="https://cdn.jsdelivr.net/gh/highlightjs/cdn-release@11.9.0/build/languages/toml.min.js"></script>
<script>
hljs.highlightAll();
</script>
</head>
<body>
<div id="root"></div>
</body>
</html>

View File

@@ -0,0 +1,29 @@
import { mount, unmount } from "svelte";
import App from "./App.svelte";
declare global {
var didMount: boolean | undefined;
var hljs: any;
}
let app: Record<string, any> | undefined;
// mount the application entrypoint to the DOM on first load. On subsequent hot
// updates, the app will be unmounted and re-mounted via the accept handler.
const root = document.getElementById("root")!;
if (!globalThis.didMount) {
app = mount(App, { target: root });
}
globalThis.didMount = true;
if (import.meta.hot) {
import.meta.hot.accept(async () => {
// avoid unmounting twice when another update gets accepted while outros are playing
if (!app) return;
const prevApp = app;
app = undefined;
await unmount(prevApp, { outro: true });
app = mount(App, { target: root });
});
}

View File

@@ -1,6 +1,14 @@
{
"name": "bun-plugin-svelte",
"version": "0.0.1",
"version": "0.0.5",
"description": "Official Svelte plugin for Bun",
"repository": {
"type": "git",
"url": "https://github.com/oven-sh/bun",
"directory": "packages/bun-plugin-svelte"
},
"homepage": "https://bun.sh",
"license": "MIT",
"type": "module",
"module": "src/index.ts",
"index": "src/index.ts",
@@ -8,16 +16,18 @@
".": "./src/index.ts"
},
"scripts": {
"example": "bun --config=./example/bunfig.toml example/index.html",
"lint": "oxlint .",
"fmt": "prettier --write .",
"check:types": "tsc --noEmit",
"build:types": "tsc --emitDeclarationOnly --declaration --declarationDir ./dist"
},
"devDependencies": {
"bun-types": "canary",
"svelte": "^5.20.4"
"svelte": "^5.20.4",
"@threlte/core": "8.0.1"
},
"peerDependencies": {
"typescript": "^5",
"svelte": "^5"
},
"files": [

View File

@@ -1,7 +1,13 @@
import type { BunPlugin, BuildConfig, OnLoadResult } from "bun";
import { basename } from "node:path";
import { compile, compileModule } from "svelte/compiler";
import { getBaseCompileOptions, validateOptions, type SvelteOptions, hash } from "./options";
import {
getBaseCompileOptions,
validateOptions,
type SvelteOptions,
hash,
getBaseModuleCompileOptions,
} from "./options";
const kEmptyObject = Object.create(null);
const virtualNamespace = "bun-svelte";
@@ -23,38 +29,52 @@ function SveltePlugin(options: SvelteOptions = kEmptyObject as SvelteOptions): B
return {
name: "bun-plugin-svelte",
setup(builder) {
// resolve "svelte" export conditions
//
// FIXME: the dev server does not currently respect bundler configs; it
// just passes a fake one to plugins and then never uses it. we need to to
// update it to ~not~ do this.
if (builder?.config) {
var conditions = builder.config.conditions ?? [];
if (typeof conditions === "string") {
conditions = [conditions];
}
conditions.push("svelte");
builder.config.conditions = conditions;
}
const { config = kEmptyObject as Partial<BuildConfig> } = builder;
const baseCompileOptions = getBaseCompileOptions(options ?? (kEmptyObject as Partial<SvelteOptions>), config);
const baseModuleCompileOptions = getBaseModuleCompileOptions(
options ?? (kEmptyObject as Partial<SvelteOptions>),
config,
);
const ts = new Bun.Transpiler({
loader: "ts",
target: config.target,
});
builder
.onLoad({ filter: /\.svelte(?:\.[tj]s)?$/ }, async args => {
.onLoad({ filter: /\.svelte$/ }, async function onLoadSvelte(args) {
const { path } = args;
var isModule = false;
switch (path.substring(path.length - 2)) {
case "js":
case "ts":
isModule = true;
break;
}
const sourceText = await Bun.file(path).text();
const side =
args && "side" in args // "side" only passed when run from dev server
? (args as { side: "client" | "server" }).side
: "server";
const hmr = Boolean((args as { hmr?: boolean })["hmr"] ?? process.env.NODE_ENV !== "production");
const generate = baseCompileOptions.generate ?? side;
const compileFn = isModule ? compileModule : compile;
const result = compileFn(sourceText, {
const hmr = Boolean((args as { hmr?: boolean })["hmr"] ?? process.env.NODE_ENV !== "production");
const result = compile(sourceText, {
...baseCompileOptions,
generate,
filename: args.path,
hmr,
});
var { js, css } = result;
if (css?.code && generate != "server") {
const uid = `${basename(path)}-${hash(path)}-style`.replaceAll(`"`, `'`);
@@ -65,11 +85,37 @@ function SveltePlugin(options: SvelteOptions = kEmptyObject as SvelteOptions): B
return {
contents: result.js.code,
loader: "js",
loader: "ts",
} satisfies OnLoadResult;
// TODO: allow plugins to return multiple results.
// TODO: support layered sourcemaps
})
.onLoad({ filter: /\.svelte.[tj]s$/ }, async function onLoadSvelteModule(args) {
const { path } = args;
const side =
args && "side" in args // "side" only passed when run from dev server
? (args as { side: "client" | "server" }).side
: "server";
const generate = baseModuleCompileOptions.generate ?? side;
var sourceText = await Bun.file(path).text();
if (path.endsWith(".ts")) {
sourceText = await ts.transform(sourceText);
}
const result = compileModule(sourceText, {
...baseModuleCompileOptions,
generate,
filename: args.path,
});
// NOTE: we assume js/ts modules won't have CSS blocks in them, so no
// virtual modules get created.
return {
contents: result.js.code,
loader: "js",
};
})
.onResolve({ filter: /^bun-svelte:/ }, args => {
return {
path: args.path,

View File

@@ -1,6 +1,6 @@
import { strict as assert } from "node:assert";
import type { BuildConfig } from "bun";
import type { CompileOptions } from "svelte/compiler";
import { type BuildConfig } from "bun";
import type { CompileOptions, ModuleCompileOptions } from "svelte/compiler";
export interface SvelteOptions {
/**
@@ -44,8 +44,8 @@ export function validateOptions(options: unknown): asserts options is SvelteOpti
* @internal
*/
export function getBaseCompileOptions(pluginOptions: SvelteOptions, config: Partial<BuildConfig>): CompileOptions {
let { forceSide, development = false } = pluginOptions;
const { minify = false, target } = config;
let { development = false } = pluginOptions;
const { minify = false } = config;
const shouldMinify = Boolean(minify);
const {
@@ -60,6 +60,38 @@ export function getBaseCompileOptions(pluginOptions: SvelteOptions, config: Part
identifiers: shouldMinify,
};
const generate = generateSide(pluginOptions, config);
return {
css: "external",
generate,
preserveWhitespace: !minifyWhitespace,
preserveComments: !shouldMinify,
dev: development,
cssHash({ css }) {
// same prime number seed used by svelte/compiler.
// TODO: ensure this provides enough entropy
return `svelte-${hash(css)}`;
},
};
}
export function getBaseModuleCompileOptions(
pluginOptions: SvelteOptions,
config: Partial<BuildConfig>,
): ModuleCompileOptions {
const { development = false } = pluginOptions;
const generate = generateSide(pluginOptions, config);
return {
dev: development,
generate,
};
}
function generateSide(pluginOptions: SvelteOptions, config: Partial<BuildConfig>) {
let { forceSide } = pluginOptions;
const { target } = config;
if (forceSide == null && typeof target === "string") {
switch (target) {
case "browser":
@@ -73,19 +105,7 @@ export function getBaseCompileOptions(pluginOptions: SvelteOptions, config: Part
// warn? throw?
}
}
return {
css: "external",
generate: forceSide,
preserveWhitespace: !minifyWhitespace,
preserveComments: !shouldMinify,
dev: development,
cssHash({ css }) {
// same prime number seed used by svelte/compiler.
// TODO: ensure this provides enough entropy
return `svelte-${hash(css)}`;
},
};
return forceSide;
}
export const hash = (content: string): string => Bun.hash(content, 5381).toString(36);

View File

@@ -0,0 +1,17 @@
<script>
import { Canvas } from "@threlte/core";
let name = "Bun";
</script>
<main class="app">
<h1>Cookin up apps with {name}</h1>
<Canvas />
</main>
<style>
h1 {
color: #ff3e00;
text-align: center;
font-size: 2em;
}
</style>

View File

@@ -0,0 +1,15 @@
class Todo {
title: string | undefined = $state();
done: boolean = $state(false);
createdAt: Date = $state(new Date());
constructor(title: string) {
this.title = title;
}
public toggle(): void {
this.done = !this.done;
}
}
module.exports = Todo;

View File

@@ -0,0 +1,13 @@
export class Todo {
title: string | undefined = $state();
done: boolean = $state(false);
createdAt: Date = $state(new Date());
constructor(title: string) {
this.title = title;
}
public toggle(): void {
this.done = !this.done;
}
}

View File

@@ -0,0 +1,25 @@
<script lang="ts">
const Todo = require("./todo-cjs.svelte.ts");
let name = "World";
let todo: Todo = $state(new Todo("Hello World!"));
</script>
<main class="app">
<h1>Hello {todo.title}!</h1>
<!-- clicking calls toggle -->
<input type="checkbox" bind:checked={todo.done} />
<button onclick={todo.toggle}>Toggle</button>
</main>
<style>
h1 {
color: #ff3e00;
text-align: center;
font-size: 2em;
font-weight: 100;
}
.app {
box-sizing: border-box;
}
</style>

View File

@@ -0,0 +1,25 @@
<script lang="ts">
import { Todo } from "./todo.svelte";
let name = "World";
let todo: Todo = $state(new Todo("Hello World!"));
</script>
<main class="app">
<h1>Hello {todo.title}!</h1>
<!-- clicking calls toggle -->
<input type="checkbox" bind:checked={todo.done} />
<button onclick={todo.toggle}>Toggle</button>
</main>
<style>
h1 {
color: #ff3e00;
text-align: center;
font-size: 2em;
font-weight: 100;
}
.app {
box-sizing: border-box;
}
</style>

View File

@@ -4,6 +4,7 @@ import fs from "node:fs";
import os from "node:os";
import { render } from "svelte/server";
import { SveltePlugin } from "../src";
import type { BuildOutput } from "bun";
const fixturePath = (...segs: string[]) => path.join(import.meta.dirname, "fixtures", ...segs);
@@ -32,6 +33,55 @@ it("hello world component", async () => {
expect(res.success).toBeTrue();
});
describe("when importing `.svelte.ts` files with ESM", () => {
let res: BuildOutput;
beforeAll(async () => {
res = await Bun.build({
entrypoints: [fixturePath("with-modules.svelte")],
outdir,
plugins: [SveltePlugin()],
});
});
it("builds successfully", () => {
expect(res.success).toBeTrue();
});
it(`handles "svelte" export condition`, async () => {
const res = await Bun.build({
entrypoints: [fixturePath("svelte-export-condition.svelte")],
outdir,
plugins: [SveltePlugin()],
});
expect(res.success).toBeTrue();
});
});
describe("when importing `.svelte.ts` files with CJS", () => {
let res: BuildOutput;
beforeAll(async () => {
res = await Bun.build({
entrypoints: [fixturePath("with-cjs.svelte")],
outdir,
plugins: [SveltePlugin()],
});
});
it("builds successfully", () => {
expect(res.success).toBeTrue();
});
it("does not double-wrap the module with function(module, exports, __filename, __dirname)", async () => {
const ts = res.outputs.find(output => output.loader === "ts");
expect(ts).toBeDefined();
const code = await ts!.text();
expect(code).toContain("require_todo_cjs_svelte");
expect(code).toContain("var require_todo_cjs_svelte = __commonJS((exports, module) => {\n");
});
});
describe("Bun.build", () => {
it.each(["node", "bun"] as const)('Generates server-side code when targeting "node" or "bun"', async target => {
const res = await Bun.build({

View File

@@ -1,74 +1,3 @@
declare class _ShellError extends Error implements ShellOutput {
readonly stdout: Buffer;
readonly stderr: Buffer;
readonly exitCode: number;
/**
* Read from stdout as a string
*
* @param encoding - The encoding to use when decoding the output
* @returns Stdout as a string with the given encoding
* @example
*
* ## Read as UTF-8 string
*
* ```ts
* const output = await $`echo hello`;
* console.log(output.text()); // "hello\n"
* ```
*
* ## Read as base64 string
*
* ```ts
* const output = await $`echo ${atob("hello")}`;
* console.log(output.text("base64")); // "hello\n"
* ```
*
*/
text(encoding?: BufferEncoding): string;
/**
* Read from stdout as a JSON object
*
* @returns Stdout as a JSON object
* @example
*
* ```ts
* const output = await $`echo '{"hello": 123}'`;
* console.log(output.json()); // { hello: 123 }
* ```
*
*/
json(): any;
/**
* Read from stdout as an ArrayBuffer
*
* @returns Stdout as an ArrayBuffer
* @example
*
* ```ts
* const output = await $`echo hello`;
* console.log(output.arrayBuffer()); // ArrayBuffer { byteLength: 6 }
* ```
*/
arrayBuffer(): ArrayBuffer;
/**
* Read from stdout as a Blob
*
* @returns Stdout as a blob
* @example
* ```ts
* const output = await $`echo hello`;
* console.log(output.blob()); // Blob { size: 6, type: "" }
* ```
*/
blob(): Blob;
bytes(): Uint8Array;
}
/**
* Bun.js runtime APIs
*
@@ -185,6 +114,77 @@ declare module "bun" {
| SpawnOptions.Writable
| ReadableStream;
class ShellError extends Error implements ShellOutput {
readonly stdout: Buffer;
readonly stderr: Buffer;
readonly exitCode: number;
/**
* Read from stdout as a string
*
* @param encoding - The encoding to use when decoding the output
* @returns Stdout as a string with the given encoding
* @example
*
* ## Read as UTF-8 string
*
* ```ts
* const output = await $`echo hello`;
* console.log(output.text()); // "hello\n"
* ```
*
* ## Read as base64 string
*
* ```ts
* const output = await $`echo ${atob("hello")}`;
* console.log(output.text("base64")); // "hello\n"
* ```
*
*/
text(encoding?: BufferEncoding): string;
/**
* Read from stdout as a JSON object
*
* @returns Stdout as a JSON object
* @example
*
* ```ts
* const output = await $`echo '{"hello": 123}'`;
* console.log(output.json()); // { hello: 123 }
* ```
*
*/
json(): any;
/**
* Read from stdout as an ArrayBuffer
*
* @returns Stdout as an ArrayBuffer
* @example
*
* ```ts
* const output = await $`echo hello`;
* console.log(output.arrayBuffer()); // ArrayBuffer { byteLength: 6 }
* ```
*/
arrayBuffer(): ArrayBuffer;
/**
* Read from stdout as a Blob
*
* @returns Stdout as a blob
* @example
* ```ts
* const output = await $`echo hello`;
* console.log(output.blob()); // Blob { size: 6, type: "" }
* ```
*/
blob(): Blob;
bytes(): Uint8Array;
}
class ShellPromise extends Promise<ShellOutput> {
get stdin(): WritableStream;
/**
@@ -304,12 +304,12 @@ declare module "bun" {
new (): Shell;
}
type ShellError = _ShellError;
export interface Shell {
(strings: TemplateStringsArray, ...expressions: ShellExpression[]): ShellPromise;
readonly ShellError: typeof _ShellError;
readonly Shell: ShellConstructor;
readonly ShellError: typeof ShellError;
readonly ShellPromise: typeof ShellPromise;
/**
* Perform bash-like brace expansion on the given pattern.
@@ -362,9 +362,6 @@ declare module "bun" {
* Configure whether or not the shell should throw an exception on non-zero exit codes.
*/
throws(shouldThrow: boolean): this;
readonly ShellPromise: typeof ShellPromise;
readonly Shell: ShellConstructor;
}
export interface ShellOutput {
@@ -2309,10 +2306,68 @@ declare module "bun" {
*/
interface SavepointSQL extends SQL {}
type CSRFAlgorithm = "blake2b256" | "blake2b512" | "sha256" | "sha384" | "sha512" | "sha512-256";
interface CSRFGenerateOptions {
/**
* The number of milliseconds until the token expires. 0 means the token never expires.
* @default 24 * 60 * 60 * 1000 (24 hours)
*/
expiresIn?: number;
/**
* The encoding of the token.
* @default "base64url"
*/
encoding?: "base64" | "base64url" | "hex";
/**
* The algorithm to use for the token.
* @default "sha256"
*/
algorithm?: CSRFAlgorithm;
}
interface CSRFVerifyOptions {
/**
* The secret to use for the token. If not provided, a random default secret will be generated in memory and used.
*/
secret?: string;
/**
* The encoding of the token.
* @default "base64url"
*/
encoding?: "base64" | "base64url" | "hex";
/**
* The algorithm to use for the token.
* @default "sha256"
*/
algorithm?: CSRFAlgorithm;
/**
* The number of milliseconds until the token expires. 0 means the token never expires.
* @default 24 * 60 * 60 * 1000 (24 hours)
*/
maxAge?: number;
}
interface CSRF {
/**
* Generate a CSRF token.
* @param secret The secret to use for the token. If not provided, a random default secret will be generated in memory and used.
* @param options The options for the token.
* @returns The generated token.
*/
generate(secret?: string, options?: CSRFGenerateOptions): string;
/**
* Verify a CSRF token.
* @param token The token to verify.
* @param options The options for the token.
* @returns True if the token is valid, false otherwise.
*/
verify(token: string, options?: CSRFVerifyOptions): boolean;
}
var sql: SQL;
var postgres: SQL;
var SQL: SQL;
var CSRF: CSRF;
/**
* This lets you use macros as regular imports
* @example
@@ -2657,7 +2712,7 @@ declare module "bun" {
loader?: { [k in string]: Loader };
/**
* Specifies if and how to generate source maps.
*
*
* - `"none"` - No source maps are generated
* - `"linked"` - A separate `*.ext.map` file is generated alongside each
* `*.ext` file. A `//# sourceMappingURL` comment is added to the output
@@ -2665,11 +2720,11 @@ declare module "bun" {
* - `"inline"` - an inline source map is appended to the output file.
* - `"external"` - Generate a separate source map file for each input file.
* No `//# sourceMappingURL` comment is added to the output file.
*
*
* `true` and `false` are aliasees for `"inline"` and `"none"`, respectively.
*
*
* @default "none"
*
*
* @see {@link outdir} required for `"linked"` maps
* @see {@link publicPath} to customize the base url of linked source maps
*/
@@ -2704,10 +2759,10 @@ declare module "bun" {
env?: "inline" | "disable" | `${string}*`;
/**
* Whether to enable minification.
*
*
* Use `true`/`false` to enable/disable all minification options. Alternatively,
* you can pass an object for granular control over certain minifications.
*
*
* @default false
*/
minify?:
@@ -3705,6 +3760,7 @@ declare module "bun" {
interface BunRequest<T extends string = string> extends Request {
params: RouterTypes.ExtractRouteParams<T>;
readonly cookies: CookieMap;
}
interface GenericServeOptions {
@@ -4265,17 +4321,7 @@ declare module "bun" {
* Passing other options such as `port` or `hostname` won't do anything.
*/
reload<T, R extends { [K in keyof R]: RouterTypes.RouteValue<K & string> }>(
options: (
| (Omit<ServeOptions, "fetch"> & {
routes: R;
fetch?: (this: Server, request: Request, server: Server) => Response | Promise<Response>;
})
| (Omit<ServeOptions, "routes"> & {
routes?: never;
fetch: (this: Server, request: Request, server: Server) => Response | Promise<Response>;
})
| WebSocketServeOptions<T>
) & {
options: ServeFunctionOptions<T, R> & {
/**
* @deprecated Use `routes` instead in new code. This will continue to work for awhile though.
*/
@@ -4653,17 +4699,7 @@ declare module "bun" {
@param options.routes - Route definitions mapping paths to handlers
*/
function serve<T, R extends { [K in keyof R]: RouterTypes.RouteValue<K & string> }>(
options: (
| (DistributedOmit<Serve, "fetch"> & {
routes: R;
fetch?: (this: Server, request: Request, server: Server) => Response | Promise<Response>;
})
| (DistributedOmit<Serve, "routes"> & {
routes?: never;
fetch: (this: Server, request: Request, server: Server) => Response | Promise<Response>;
})
| WebSocketServeOptions<T>
) & {
options: ServeFunctionOptions<T, R> & {
/**
* @deprecated Use `routes` instead in new code. This will continue to work for a while though.
*/
@@ -4671,6 +4707,32 @@ declare module "bun" {
},
): Server;
type ServeFunctionOptions<T, R extends { [K in keyof R]: RouterTypes.RouteValue<K & string> }> =
| (DistributedOmit<Exclude<Serve<T>, WebSocketServeOptions<T>>, "fetch"> & {
routes: R;
fetch?: (this: Server, request: Request, server: Server) => Response | Promise<Response>;
})
| (DistributedOmit<Exclude<Serve<T>, WebSocketServeOptions<T>>, "routes"> & {
routes?: never;
fetch: (this: Server, request: Request, server: Server) => Response | Promise<Response>;
})
| (WebSocketServeOptions<T> & {
routes: R;
fetch?: (
this: Server,
request: Request,
server: Server,
) => Response | Promise<Response | void | undefined> | void | undefined;
})
| (WebSocketServeOptions<T> & {
routes?: never;
fetch: (
this: Server,
request: Request,
server: Server,
) => Response | Promise<Response | void | undefined> | void | undefined;
});
/**
* [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) powered by the fastest system calls available for operating on files.
*
@@ -6239,7 +6301,7 @@ declare module "bun" {
* @param socket
*/
open?(socket: Socket<Data>): void | Promise<void>;
close?(socket: Socket<Data>): void | Promise<void>;
close?(socket: Socket<Data>, error?: Error): void | Promise<void>;
error?(socket: Socket<Data>, error: Error): void | Promise<void>;
data?(socket: Socket<Data>, data: BinaryTypeList[DataBinaryType]): void | Promise<void>;
drain?(socket: Socket<Data>): void | Promise<void>;
@@ -6638,7 +6700,8 @@ declare module "bun" {
* This is useful for aborting a subprocess when some other part of the
* program is aborted, such as a `fetch` response.
*
* Internally, this works by calling `subprocess.kill(1)`.
* If the signal is aborted, the process will be killed with the signal
* specified by `killSignal` (defaults to SIGTERM).
*
* @example
* ```ts
@@ -6657,6 +6720,41 @@ declare module "bun" {
* ```
*/
signal?: AbortSignal;
/**
* The maximum amount of time the process is allowed to run in milliseconds.
*
* If the timeout is reached, the process will be killed with the signal
* specified by `killSignal` (defaults to SIGTERM).
*
* @example
* ```ts
* // Kill the process after 5 seconds
* const subprocess = Bun.spawn({
* cmd: ["sleep", "10"],
* timeout: 5000,
* });
* await subprocess.exited; // Will resolve after 5 seconds
* ```
*/
timeout?: number;
/**
* The signal to use when killing the process after a timeout or when the AbortSignal is aborted.
*
* @default "SIGTERM" (signal 15)
*
* @example
* ```ts
* // Kill the process with SIGKILL after 5 seconds
* const subprocess = Bun.spawn({
* cmd: ["sleep", "10"],
* timeout: 5000,
* killSignal: "SIGKILL",
* });
* ```
*/
killSignal?: string | number;
}
type OptionsToSubprocess<Opts extends OptionsObject> =
@@ -6902,6 +7000,8 @@ declare module "bun" {
resourceUsage: ResourceUsage;
signalCode?: string;
exitedDueToTimeout?: true;
pid: number;
}
/**
@@ -7429,4 +7529,86 @@ declare module "bun" {
| [pkg: string, info: BunLockFilePackageInfo, bunTag: string]
/** root */
| [pkg: string, info: Pick<BunLockFileBasePackageInfo, "bin" | "binDir">];
interface CookieInit {
name?: string;
value?: string;
domain?: string;
path?: string;
expires?: number | Date;
secure?: boolean;
sameSite?: CookieSameSite;
httpOnly?: boolean;
partitioned?: boolean;
maxAge?: number;
}
interface CookieStoreDeleteOptions {
name: string;
domain?: string | null;
path?: string;
}
interface CookieStoreGetOptions {
name?: string;
url?: string;
}
type CookieSameSite = "strict" | "lax" | "none";
class Cookie {
constructor(name: string, value: string, options?: CookieInit);
constructor(cookieString: string);
constructor(cookieObject?: CookieInit);
name: string;
value: string;
domain?: string;
path: string;
expires?: number;
secure: boolean;
sameSite: CookieSameSite;
partitioned: boolean;
maxAge?: number;
httpOnly: boolean;
isExpired(): boolean;
toString(): string;
toJSON(): CookieInit;
static parse(cookieString: string): Cookie;
static from(name: string, value: string, options?: CookieInit): Cookie;
static serialize(...cookies: Cookie[]): string;
}
class CookieMap implements Iterable<[string, Cookie]> {
constructor(init?: string[][] | Record<string, string> | string);
get(name: string): Cookie | null;
get(options?: CookieStoreGetOptions): Cookie | null;
getAll(name: string): Cookie[];
getAll(options?: CookieStoreGetOptions): Cookie[];
has(name: string, value?: string): boolean;
set(name: string, value: string): void;
set(options: CookieInit): void;
delete(name: string): void;
delete(options: CookieStoreDeleteOptions): void;
toString(): string;
toJSON(): Record<string, ReturnType<Cookie["toJSON"]>>;
readonly size: number;
entries(): IterableIterator<[string, Cookie]>;
keys(): IterableIterator<string>;
values(): IterableIterator<Cookie>;
forEach(callback: (value: Cookie, key: string, map: CookieMap) => void, thisArg?: any): void;
[Symbol.iterator](): IterableIterator<[string, Cookie]>;
}
}

View File

@@ -1,24 +1,192 @@
export {};
declare global {
namespace Bun {
type HMREventNames =
| "bun:ready"
| "bun:beforeUpdate"
| "bun:afterUpdate"
| "bun:beforeFullReload"
| "bun:beforePrune"
| "bun:invalidate"
| "bun:error"
| "bun:ws:disconnect"
| "bun:ws:connect";
/**
* The event names for the dev server
*/
type HMREvent = `bun:${HMREventNames}` | (string & {});
}
interface ImportMeta {
/**
* Hot module replacement
* Hot module replacement APIs. This value is `undefined` in production and
* can be used in an `if` statement to check if HMR APIs are available
*
* https://bun.sh/docs/bundler/fullstack
* ```ts
* if (import.meta.hot) {
* // HMR APIs are available
* }
* ```
*
* However, this check is usually not needed as Bun will dead-code-eliminate
* calls to all of the HMR APIs in production builds.
*
* https://bun.sh/docs/bundler/hmr
*/
hot: {
/**
* import.meta.hot.data maintains state between module instances during hot replacement, enabling data transfer from previous to new versions.
* `import.meta.hot.data` maintains state between module instances during
* hot replacement, enabling data transfer from previous to new versions.
* When `import.meta.hot.data` is written to, Bun will mark this module as
* capable of self-accepting (equivalent of calling `accept()`).
*
* @example
* ```ts
* import.meta.hot.data = {
* bun: 'is cool',
* };
* const root = import.meta.hot.data.root ??= createRoot(elem);
* root.render(<App />); // re-use an existing root
* ```
*
* In production, `data` is inlined to be `{}`. This is handy because Bun
* knows it can minify `{}.prop ??= value` into `value` in production.
*/
data: any;
/**
* Indicate that this module can be replaced simply by re-evaluating the
* file. After a hot update, importers of this module will be
* automatically patched.
*
* When `import.meta.hot.accept` is not used, the page will reload when
* the file updates, and a console message shows which files were checked.
*
* @example
* ```ts
* import { getCount } from "./foo";
*
* console.log("count is ", getCount());
*
* import.meta.hot.accept();
* ```
*/
accept(): void;
/**
* Indicate that this module can be replaced by evaluating the new module,
* and then calling the callback with the new module. In this mode, the
* importers do not get patched. This is to match Vite, which is unable
* to patch their import statements. Prefer using `import.meta.hot.accept()`
* without an argument as it usually makes your code easier to understand.
*
* When `import.meta.hot.accept` is not used, the page will reload when
* the file updates, and a console message shows which files were checked.
*
* @example
* ```ts
* export const count = 0;
*
* import.meta.hot.accept((newModule) => {
* if (newModule) {
* // newModule is undefined when SyntaxError happened
* console.log('updated: count is now ', newModule.count)
* }
* });
* ```
*
* In production, calls to this are dead-code-eliminated.
*/
accept(cb: (newModule: any | undefined) => void): void;
/**
* Indicate that a dependency's module can be accepted. When the dependency
* is updated, the callback will be called with the new module.
*
* When `import.meta.hot.accept` is not used, the page will reload when
* the file updates, and a console message shows which files were checked.
*
* @example
* ```ts
* import.meta.hot.accept('./foo', (newModule) => {
* if (newModule) {
* // newModule is undefined when SyntaxError happened
* console.log('updated: count is now ', newModule.count)
* }
* });
* ```
*/
accept(specifier: string, callback: (newModule: any) => void): void;
/**
* Indicate that a dependency's module can be accepted. This variant
* accepts an array of dependencies, where the callback will receive
* the one updated module, and `undefined` for the rest.
*
* When `import.meta.hot.accept` is not used, the page will reload when
* the file updates, and a console message shows which files were checked.
*/
accept(specifiers: string[], callback: (newModules: (any | undefined)[]) => void): void;
/**
* Attach an on-dispose callback. This is called:
* - Just before the module is replaced with another copy (before the next is loaded)
* - After the module is detached (removing all imports to this module)
*
* This callback is not called on route navigation or when the browser tab closes.
*
* Returning a promise will delay module replacement until the module is
* disposed. All dispose callbacks are called in parallel.
*/
dispose(cb: (data: any) => void | Promise<void>): void;
/**
* No-op
* @deprecated
*/
decline(): void;
// NOTE TO CONTRIBUTORS ////////////////////////////////////////
// Callback is currently never called for `.prune()` //
// so the types are commented out until we support it. //
////////////////////////////////////////////////////////////////
// /**
// * Attach a callback that is called when the module is removed from the module graph.
// *
// * This can be used to clean up resources that were created when the module was loaded.
// * Unlike `import.meta.hot.dispose()`, this pairs much better with `accept` and `data` to manage stateful resources.
// *
// * @example
// * ```ts
// * export const ws = (import.meta.hot.data.ws ??= new WebSocket(location.origin));
// *
// * import.meta.hot.prune(() => {
// * ws.close();
// * });
// * ```
// */
// prune(callback: () => void): void;
/**
* Listen for an event from the dev server
*
* For compatibility with Vite, event names are also available via vite:* prefix instead of bun:*.
*
* https://bun.sh/docs/bundler/hmr#import-meta-hot-on-and-off
* @param event The event to listen to
* @param callback The callback to call when the event is emitted
*/
on(event: Bun.HMREvent, callback: () => void): void;
/**
* Stop listening for an event from the dev server
*
* For compatibility with Vite, event names are also available via vite:* prefix instead of bun:*.
*
* https://bun.sh/docs/bundler/hmr#import-meta-hot-on-and-off
* @param event The event to stop listening to
* @param callback The callback to stop listening to
*/
off(event: Bun.HMREvent, callback: () => void): void;
};
}
}

View File

@@ -76,6 +76,17 @@ declare global {
revision: string;
reallyExit(code?: number): never;
dlopen(module: { exports: any }, filename: string, flags?: number): void;
_exiting: boolean;
noDeprecation: boolean;
binding(m: string): object;
binding(m: "constants"): {
os: typeof import("node:os").constants;
fs: typeof import("node:fs").constants;
crypto: typeof import("node:crypto").constants;
zlib: typeof import("node:zlib").constants;
trace: typeof import("node:trace").constants;
};
}
}
@@ -1765,6 +1776,71 @@ declare global {
*/
bytes(): Promise<Uint8Array>;
}
var Blob: typeof Blob;
interface Uint8Array {
/**
* Convert the Uint8Array to a base64 encoded string
* @returns The base64 encoded string representation of the Uint8Array
*/
toBase64(options?: { alphabet?: "base64" | "base64url"; omitPadding?: boolean }): string;
/**
* Set the contents of the Uint8Array from a base64 encoded string
* @param base64 The base64 encoded string to decode into the array
* @param offset Optional starting index to begin setting the decoded bytes (default: 0)
*/
setFromBase64(
base64: string,
offset?: number,
): {
/**
* The number of bytes read from the base64 string
*/
read: number;
/**
* The number of bytes written to the Uint8Array
* Will never be greater than the `.byteLength` of this Uint8Array
*/
written: number;
};
/**
* Convert the Uint8Array to a hex encoded string
* @returns The hex encoded string representation of the Uint8Array
*/
toHex(): string;
/**
* Set the contents of the Uint8Array from a hex encoded string
* @param hex The hex encoded string to decode into the array. The string must have
* an even number of characters, be valid hexadecimal characters and contain no whitespace.
*/
setFromHex(hex: string): {
/**
* The number of bytes read from the hex string
*/
read: number;
/**
* The number of bytes written to the Uint8Array
* Will never be greater than the `.byteLength` of this Uint8Array
*/
written: number;
};
}
interface Uint8ArrayConstructor {
/**
* Create a new Uint8Array from a base64 encoded string
* @param base64 The base64 encoded string to convert to a Uint8Array
* @returns A new Uint8Array containing the decoded data
*/
fromBase64(
base64: string,
options?: {
alphabet?: "base64" | "base64url";
lastChunkHandling?: "loose" | "strict" | "stop-before-partial";
},
): Uint8Array;
}
}

View File

@@ -477,6 +477,79 @@ declare module "bun:sqlite" {
*/
static deserialize(serialized: NodeJS.TypedArray | ArrayBufferLike, isReadOnly?: boolean): Database;
/**
* Load a serialized SQLite3 database. This version enables you to specify
* additional options such as `strict` to put the database into strict mode.
*
* Internally, this calls `sqlite3_deserialize`.
*
* @param serialized Data to load
* @returns `Database` instance
*
* @example
* ```ts
* test("supports serialize/deserialize", () => {
* const db = Database.open(":memory:");
* db.exec("CREATE TABLE test (id INTEGER PRIMARY KEY, name TEXT)");
* db.exec('INSERT INTO test (name) VALUES ("Hello")');
* db.exec('INSERT INTO test (name) VALUES ("World")');
*
* const input = db.serialize();
* const db2 = Database.deserialize(input, { strict: true });
*
* const stmt = db2.prepare("SELECT * FROM test");
* expect(JSON.stringify(stmt.get())).toBe(
* JSON.stringify({
* id: 1,
* name: "Hello",
* }),
* );
*
* expect(JSON.stringify(stmt.all())).toBe(
* JSON.stringify([
* {
* id: 1,
* name: "Hello",
* },
* {
* id: 2,
* name: "World",
* },
* ]),
* );
* db2.exec("insert into test (name) values ($foo)", { foo: "baz" });
* expect(JSON.stringify(stmt.all())).toBe(
* JSON.stringify([
* {
* id: 1,
* name: "Hello",
* },
* {
* id: 2,
* name: "World",
* },
* {
* id: 3,
* name: "baz",
* },
* ]),
* );
*
* const db3 = Database.deserialize(input, { readonly: true, strict: true });
* try {
* db3.exec("insert into test (name) values ($foo)", { foo: "baz" });
* throw new Error("Expected error");
* } catch (e) {
* expect(e.message).toBe("attempt to write a readonly database");
* }
* });
* ```
*/
static deserialize(
serialized: NodeJS.TypedArray | ArrayBufferLike,
options?: { readonly?: boolean; strict?: boolean; safeIntegers?: boolean },
): Database;
/**
* See `sqlite3_file_control` for more information.
* @link https://www.sqlite.org/c3ref/file_control.html

View File

@@ -72,7 +72,7 @@ void us_socket_context_close(int ssl, struct us_socket_context_t *context) {
while (ls) {
struct us_listen_socket_t *nextLS = (struct us_listen_socket_t *) ls->s.next;
us_listen_socket_close(ssl, ls);
ls = nextLS;
}
@@ -310,7 +310,7 @@ struct us_bun_verify_error_t us_socket_verify_error(int ssl, struct us_socket_t
}
#endif
return (struct us_bun_verify_error_t) { .error = 0, .code = NULL, .reason = NULL };
return (struct us_bun_verify_error_t) { .error = 0, .code = NULL, .reason = NULL };
}
void us_internal_socket_context_free(int ssl, struct us_socket_context_t *context) {
@@ -337,7 +337,7 @@ void us_socket_context_ref(int ssl, struct us_socket_context_t *context) {
}
void us_socket_context_unref(int ssl, struct us_socket_context_t *context) {
uint32_t ref_count = context->ref_count;
context->ref_count--;
context->ref_count--;
if (ref_count == 1) {
us_internal_socket_context_free(ssl, context);
}
@@ -520,7 +520,7 @@ void *us_socket_context_connect(int ssl, struct us_socket_context_t *context, co
}
struct us_connecting_socket_t *c = us_calloc(1, sizeof(struct us_connecting_socket_t) + socket_ext_size);
c->socket_ext_size = socket_ext_size;
c->socket_ext_size = socket_ext_size;
c->options = options;
c->ssl = ssl > 0;
c->timeout = 255;
@@ -641,9 +641,9 @@ void us_internal_socket_after_open(struct us_socket_t *s, int error) {
/* Emit error, close without emitting on_close */
/* There are two possible states here:
1. It's a us_connecting_socket_t*. DNS resolution failed, or a connection failed.
2. It's a us_socket_t*
/* There are two possible states here:
1. It's a us_connecting_socket_t*. DNS resolution failed, or a connection failed.
2. It's a us_socket_t*
We differentiate between these two cases by checking if the connect_state is null.
*/
@@ -887,7 +887,7 @@ void us_socket_context_on_connect_error(int ssl, struct us_socket_context_t *con
return;
}
#endif
context->on_connect_error = on_connect_error;
}
@@ -898,7 +898,7 @@ void us_socket_context_on_socket_connect_error(int ssl, struct us_socket_context
return;
}
#endif
context->on_socket_connect_error = on_connect_error;
}

View File

@@ -270,7 +270,6 @@ void us_loop_run_bun_tick(struct us_loop_t *loop, const struct timespec* timeout
/* Fetch ready polls */
#ifdef LIBUS_USE_EPOLL
loop->num_ready_polls = bun_epoll_pwait2(loop->fd, loop->ready_polls, 1024, timeout);
#else
do {

View File

@@ -22,7 +22,6 @@
#include <string.h>
#include <stdint.h>
#include <errno.h>
#ifndef WIN32
#include <fcntl.h>
#endif
@@ -168,13 +167,17 @@ void us_connecting_socket_close(int ssl, struct us_connecting_socket_t *c) {
if (!c->pending_resolve_callback) {
us_connecting_socket_free(ssl, c);
}
}
}
struct us_socket_t *us_socket_close(int ssl, struct us_socket_t *s, int code, void *reason) {
if(ssl) {
return (struct us_socket_t *)us_internal_ssl_socket_close((struct us_internal_ssl_socket_t *) s, code, reason);
}
if (!us_socket_is_closed(0, s)) {
/* make sure the context is alive until the callback ends */
us_socket_context_ref(ssl, s->context);
if (s->low_prio_state == 1) {
/* Unlink this socket from the low-priority queue */
if (!s->prev) s->context->loop->data.low_prio_head = s->next;
@@ -186,7 +189,6 @@ struct us_socket_t *us_socket_close(int ssl, struct us_socket_t *s, int code, vo
s->next = 0;
s->low_prio_state = 0;
us_socket_context_unref(ssl, s->context);
} else {
us_internal_socket_context_unlink_socket(ssl, s->context, s);
}
@@ -207,18 +209,27 @@ struct us_socket_t *us_socket_close(int ssl, struct us_socket_t *s, int code, vo
bsd_close_socket(us_poll_fd((struct us_poll_t *) s));
/* Link this socket to the close-list and let it be deleted after this iteration */
s->next = s->context->loop->data.closed_head;
s->context->loop->data.closed_head = s;
/* Any socket with prev = context is marked as closed */
s->prev = (struct us_socket_t *) s->context;
/* mark it as closed and call the callback */
struct us_socket_t *res = s;
if (!(us_internal_poll_type(&s->p) & POLL_TYPE_SEMI_SOCKET)) {
return s->context->on_close(s, code, reason);
res = s->context->on_close(s, code, reason);
}
/* Link this socket to the close-list and let it be deleted after this iteration */
s->next = s->context->loop->data.closed_head;
s->context->loop->data.closed_head = s;
/* unref the context after the callback ends */
us_socket_context_unref(ssl, s->context);
/* preserve the return value from on_close if its called */
return res;
}
return s;
}
@@ -435,18 +446,18 @@ int us_connecting_socket_get_error(int ssl, struct us_connecting_socket_t *c) {
return c->error;
}
/*
/*
Note: this assumes that the socket is non-TLS and will be adopted and wrapped with a new TLS context
context ext will not be copied to the new context, new context will contain us_wrapped_socket_context_t on ext
*/
struct us_socket_t *us_socket_wrap_with_tls(int ssl, struct us_socket_t *s, struct us_bun_socket_context_options_t options, struct us_socket_events_t events, int socket_ext_size) {
// only accepts non-TLS sockets
if (ssl) {
return NULL;
return NULL;
}
return(struct us_socket_t *) us_internal_ssl_socket_wrap_with_tls(s, options, events, socket_ext_size);
}
}
// if a TLS socket calls this, it will start SSL call open event and TLS handshake if required
// will have no effect if the socket is closed or is not TLS
@@ -503,9 +514,14 @@ void us_socket_nodelay(struct us_socket_t *s, int enabled) {
}
}
/// Returns 0 on success. Returned error values depend on the platform.
/// - on posix, returns `errno`
/// - on windows, when libuv is used, returns a UV err code
/// - on windows, LIBUS_USE_LIBUV is set, returns `WSAGetLastError()`
/// - on windows, otherwise returns result of `WSAGetLastError`
int us_socket_keepalive(us_socket_r s, int enabled, unsigned int delay){
if (!us_socket_is_shut_down(0, s)) {
bsd_socket_keepalive(us_poll_fd((struct us_poll_t *) s), enabled, delay);
return bsd_socket_keepalive(us_poll_fd((struct us_poll_t *) s), enabled, delay);
}
return 0;
}
@@ -544,4 +560,4 @@ void us_socket_resume(int ssl, struct us_socket_t *s) {
}
// we are readable and writable so we resume everything
us_poll_change(&s->p, s->context->loop, LIBUS_SOCKET_READABLE | LIBUS_SOCKET_WRITABLE);
}
}

View File

@@ -189,6 +189,10 @@ public:
* This function should probably be optimized a lot in future releases,
* it could be O(1) with a hash map of fullnames and their counts. */
unsigned int numSubscribers(std::string_view topic) {
if (!topicTree) {
return 0;
}
Topic *t = topicTree->lookupTopic(topic);
if (t) {
return (unsigned int) t->size();
@@ -408,14 +412,14 @@ public:
webSocketContext->getExt()->messageHandler = std::move(behavior.message);
webSocketContext->getExt()->drainHandler = std::move(behavior.drain);
webSocketContext->getExt()->subscriptionHandler = std::move(behavior.subscription);
webSocketContext->getExt()->closeHandler = std::move([closeHandler = std::move(behavior.close)](WebSocket<SSL, true, UserData> *ws, int code, std::string_view message) mutable {
webSocketContext->getExt()->closeHandler = [closeHandler = std::move(behavior.close)](WebSocket<SSL, true, UserData> *ws, int code, std::string_view message) mutable {
if (closeHandler) {
closeHandler(ws, code, message);
}
/* Destruct user data after returning from close handler */
((UserData *) ws->getUserData())->~UserData();
});
};
webSocketContext->getExt()->pingHandler = std::move(behavior.ping);
webSocketContext->getExt()->pongHandler = std::move(behavior.pong);
@@ -428,8 +432,8 @@ public:
webSocketContext->getExt()->maxLifetime = behavior.maxLifetime;
webSocketContext->getExt()->compression = behavior.compression;
/* Calculate idleTimeoutCompnents */
webSocketContext->getExt()->calculateIdleTimeoutCompnents(behavior.idleTimeout);
/* Calculate idleTimeoutComponents */
webSocketContext->getExt()->calculateIdleTimeoutComponents(behavior.idleTimeout);
httpContext->onHttp("GET", pattern, [webSocketContext, behavior = std::move(behavior)](auto *res, auto *req) mutable {
@@ -606,11 +610,20 @@ public:
return std::move(*this);
}
void setOnClose(HttpContextData<SSL>::OnSocketClosedCallback onClose) {
httpContext->getSocketContextData()->onSocketClosed = onClose;
}
TemplatedApp &&run() {
uWS::run();
return std::move(*this);
}
TemplatedApp &&setUsingCustomExpectHandler(bool value) {
httpContext->getSocketContextData()->usingCustomExpectHandler = value;
return std::move(*this);
}
};
typedef TemplatedApp<false> App;

View File

@@ -30,6 +30,9 @@
#include <iostream>
#include "libusockets.h"
#include "bun-usockets/src/internal/internal.h"
#include "LoopData.h"
#include "AsyncSocketData.h"
@@ -54,28 +57,6 @@ struct AsyncSocket {
template <typename, typename> friend struct TopicTree;
template <bool> friend struct HttpResponse;
private:
/* Helper, do not use directly (todo: move to uSockets or de-crazify) */
void throttle_helper(int toggle) {
/* These should be exposed by uSockets */
static thread_local int us_events[2] = {0, 0};
struct us_poll_t *p = (struct us_poll_t *) this;
struct us_loop_t *loop = us_socket_context_loop(SSL, us_socket_context(SSL, (us_socket_t *) this));
if (toggle) {
/* Pause */
int events = us_poll_events(p);
if (events) {
us_events[getBufferedAmount() ? 1 : 0] = events;
}
us_poll_change(p, loop, 0);
} else {
/* Resume */
int events = us_events[getBufferedAmount() ? 1 : 0];
us_poll_change(p, loop, events);
}
}
public:
/* Returns SSL pointer or FD as pointer */
@@ -105,13 +86,13 @@ public:
/* Experimental pause */
us_socket_t *pause() {
throttle_helper(1);
us_socket_pause(SSL, (us_socket_t *) this);
return (us_socket_t *) this;
}
/* Experimental resume */
us_socket_t *resume() {
throttle_helper(0);
us_socket_resume(SSL, (us_socket_t *) this);
return (us_socket_t *) this;
}

View File

@@ -81,7 +81,7 @@ struct AsyncSocketData {
}
/* Or emppty */
/* Or empty */
AsyncSocketData() = default;
};

View File

@@ -43,10 +43,10 @@ private:
HttpContext() = delete;
/* Maximum delay allowed until an HTTP connection is terminated due to outstanding request or rejected data (slow loris protection) */
static const int HTTP_IDLE_TIMEOUT_S = 10;
static constexpr int HTTP_IDLE_TIMEOUT_S = 10;
/* Minimum allowed receive throughput per second (clients uploading less than 16kB/sec get dropped) */
static const int HTTP_RECEIVE_THROUGHPUT_BYTES = 16 * 1024;
static constexpr int HTTP_RECEIVE_THROUGHPUT_BYTES = 16 * 1024;
us_socket_context_t *getSocketContext() {
return (us_socket_context_t *) this;
@@ -115,9 +115,8 @@ private:
us_socket_context_on_close(SSL, getSocketContext(), [](us_socket_t *s, int /*code*/, void */*reason*/) {
((AsyncSocket<SSL> *)s)->uncorkWithoutSending();
/* Get socket ext */
HttpResponseData<SSL> *httpResponseData = (HttpResponseData<SSL> *) us_socket_ext(SSL, s);
auto *httpResponseData = reinterpret_cast<HttpResponseData<SSL> *>(us_socket_ext(SSL, s));
/* Call filter */
HttpContextData<SSL> *httpContextData = getSocketContextDataS(s);
@@ -130,6 +129,9 @@ private:
httpResponseData->onAborted((HttpResponse<SSL> *)s, httpResponseData->userData);
}
if (httpResponseData->socketData && httpContextData->onSocketClosed) {
httpContextData->onSocketClosed(httpResponseData->socketData, SSL, s);
}
/* Destruct socket ext */
httpResponseData->~HttpResponseData<SSL>();
@@ -171,7 +173,7 @@ private:
proxyParser = &httpResponseData->proxyParser;
#endif
/* The return value is entirely up to us to interpret. The HttpParser only care for whether the returned value is DIFFERENT or not from passed user */
/* The return value is entirely up to us to interpret. The HttpParser cares only for whether the returned value is DIFFERENT from passed user */
auto [err, returnedSocket] = httpResponseData->consumePostPadded(data, (unsigned int) length, s, proxyParser, [httpContextData](void *s, HttpRequest *httpRequest) -> void * {
/* For every request we reset the timeout and hang until user makes action */
/* Warning: if we are in shutdown state, resetting the timer is a security issue! */
@@ -182,7 +184,7 @@ private:
httpResponseData->offset = 0;
/* Are we not ready for another request yet? Terminate the connection.
* Important for denying async pipelining until, if ever, we want to suppot it.
* Important for denying async pipelining until, if ever, we want to support it.
* Otherwise requests can get mixed up on the same connection. We still support sync pipelining. */
if (httpResponseData->state & HttpResponseData<SSL>::HTTP_RESPONSE_PENDING) {
us_socket_close(SSL, (us_socket_t *) s, 0, nullptr);
@@ -197,6 +199,8 @@ private:
httpResponseData->state |= HttpResponseData<SSL>::HTTP_CONNECTION_CLOSE;
}
httpResponseData->fromAncientRequest = httpRequest->isAncient();
/* Select the router based on SNI (only possible for SSL) */
auto *selectedRouter = &httpContextData->router;
if constexpr (SSL) {
@@ -358,9 +362,8 @@ private:
/* Handle HTTP write out (note: SSL_read may trigger this spuriously, the app need to handle spurious calls) */
us_socket_context_on_writable(SSL, getSocketContext(), [](us_socket_t *s) {
AsyncSocket<SSL> *asyncSocket = (AsyncSocket<SSL> *) s;
HttpResponseData<SSL> *httpResponseData = (HttpResponseData<SSL> *) asyncSocket->getAsyncSocketData();
auto *asyncSocket = reinterpret_cast<AsyncSocket<SSL> *>(s);
auto *httpResponseData = reinterpret_cast<HttpResponseData<SSL> *>(asyncSocket->getAsyncSocketData());
/* Ask the developer to write data and return success (true) or failure (false), OR skip sending anything and return success (true). */
if (httpResponseData->onWritable) {
@@ -369,7 +372,7 @@ private:
/* We expect the developer to return whether or not write was successful (true).
* If write was never called, the developer should still return true so that we may drain. */
bool success = httpResponseData->callOnWritable((HttpResponse<SSL> *)asyncSocket, httpResponseData->offset);
bool success = httpResponseData->callOnWritable(reinterpret_cast<HttpResponse<SSL> *>(asyncSocket), httpResponseData->offset);
/* The developer indicated that their onWritable failed. */
if (!success) {
@@ -396,28 +399,26 @@ private:
}
/* Expect another writable event, or another request within the timeout */
((HttpResponse<SSL> *) s)->resetTimeout();
reinterpret_cast<HttpResponse<SSL> *>(s)->resetTimeout();
return s;
});
/* Handle FIN, HTTP does not support half-closed sockets, so simply close */
us_socket_context_on_end(SSL, getSocketContext(), [](us_socket_t *s) {
((AsyncSocket<SSL> *)s)->uncorkWithoutSending();
auto *asyncSocket = reinterpret_cast<AsyncSocket<SSL> *>(s);
asyncSocket->uncorkWithoutSending();
/* We do not care for half closed sockets */
AsyncSocket<SSL> *asyncSocket = (AsyncSocket<SSL> *) s;
return asyncSocket->close();
});
/* Handle socket timeouts, simply close them so to not confuse client with FIN */
us_socket_context_on_timeout(SSL, getSocketContext(), [](us_socket_t *s) {
/* Force close rather than gracefully shutdown and risk confusing the client with a complete download */
AsyncSocket<SSL> *asyncSocket = (AsyncSocket<SSL> *) s;
// Node.js by default sclose the connection but they emit the timeout event before that
HttpResponseData<SSL> *httpResponseData = (HttpResponseData<SSL> *) asyncSocket->getAsyncSocketData();
AsyncSocket<SSL> *asyncSocket = reinterpret_cast<AsyncSocket<SSL> *>(s);
// Node.js by default closes the connection but they emit the timeout event before that
HttpResponseData<SSL> *httpResponseData = reinterpret_cast<HttpResponseData<SSL> *>(asyncSocket->getAsyncSocketData());
if (httpResponseData->onTimeout) {
httpResponseData->onTimeout((HttpResponse<SSL> *)s, httpResponseData->userData);
@@ -493,16 +494,20 @@ public:
}
}
httpContextData->currentRouter->add(methods, pattern, [handler = std::move(handler), parameterOffsets = std::move(parameterOffsets)](auto *r) mutable {
const bool &customContinue = httpContextData->usingCustomExpectHandler;
httpContextData->currentRouter->add(methods, pattern, [handler = std::move(handler), parameterOffsets = std::move(parameterOffsets), &customContinue](auto *r) mutable {
auto user = r->getUserData();
user.httpRequest->setYield(false);
user.httpRequest->setParameters(r->getParameters());
user.httpRequest->setParameterOffsets(&parameterOffsets);
/* Middleware? Automatically respond to expectations */
std::string_view expect = user.httpRequest->getHeader("expect");
if (expect.length() && expect == "100-continue") {
user.httpResponse->writeContinue();
if (!customContinue) {
/* Middleware? Automatically respond to expectations */
std::string_view expect = user.httpRequest->getHeader("expect");
if (expect.length() && expect == "100-continue") {
user.httpResponse->writeContinue();
}
}
handler(user.httpResponse, user.httpRequest);

View File

@@ -34,6 +34,7 @@ struct alignas(16) HttpContextData {
template <bool> friend struct TemplatedApp;
private:
std::vector<MoveOnlyFunction<void(HttpResponse<SSL> *, int)>> filterHandlers;
using OnSocketClosedCallback = void (*)(void* userData, int is_ssl, struct us_socket_t *rawSocket);
MoveOnlyFunction<void(const char *hostname)> missingServerNameHandler;
@@ -50,6 +51,10 @@ private:
void *upgradedWebSocket = nullptr;
bool isParsingHttp = false;
bool rejectUnauthorized = false;
bool usingCustomExpectHandler = false;
/* Used to simulate Node.js socket events. */
OnSocketClosedCallback onSocketClosed = nullptr;
// TODO: SNI
void clearRoutes() {

View File

@@ -239,7 +239,7 @@ namespace uWS
}
return unsignedIntegerValue;
}
static inline uint64_t hasLess(uint64_t x, uint64_t n) {
return (((x)-~0ULL/255*(n))&~(x)&~0ULL/255*128);
}
@@ -283,7 +283,7 @@ namespace uWS
}
return false;
}
static inline void *consumeFieldName(char *p) {
/* Best case fast path (particularly useful with clang) */
while (true) {
@@ -323,14 +323,14 @@ namespace uWS
uint64_t http;
__builtin_memcpy(&http, data, sizeof(uint64_t));
uint32_t first_four_bytes = http & static_cast<uint32_t>(0xFFFFFFFF);
// check if any of the first four bytes are > non-ascii
if ((first_four_bytes & 0x80808080) != 0) [[unlikely]] {
return 0;
}
first_four_bytes |= 0x20202020; // Lowercase the first four bytes
static constexpr char http_lowercase_bytes[4] = {'h', 't', 't', 'p'};
static constexpr uint32_t http_lowercase_bytes_int = __builtin_bit_cast(uint32_t, http_lowercase_bytes);
if (first_four_bytes == http_lowercase_bytes_int) [[likely]] {
@@ -343,7 +343,7 @@ namespace uWS
static constexpr char S_colon_slash_slash[4] = {'S', ':', '/', '/'};
static constexpr uint32_t S_colon_slash_slash_int = __builtin_bit_cast(uint32_t, S_colon_slash_slash);
// Extract the last four bytes from the uint64_t
const uint32_t last_four_bytes = (http >> 32) & static_cast<uint32_t>(0xFFFFFFFF);
return (last_four_bytes == s_colon_slash_slash_int) || (last_four_bytes == S_colon_slash_slash_int);
@@ -361,7 +361,7 @@ namespace uWS
if (&data[1] == end) [[unlikely]] {
return nullptr;
}
if (data[0] == 32 && (__builtin_expect(data[1] == '/', 1) || isHTTPorHTTPSPrefixForProxies(data + 1, end) == 1)) [[likely]] {
header.key = {start, (size_t) (data - start)};
data++;
@@ -536,7 +536,7 @@ namespace uWS
while (headers->value.length() && headers->value.front() < 33) {
headers->value.remove_prefix(1);
}
headers++;
/* We definitely have at least one header (or request line), so check if we are done */
@@ -598,7 +598,7 @@ namespace uWS
for (HttpRequest::Header *h = req->headers; (++h)->key.length(); ) {
req->bf.add(h->key);
}
/* Break if no host header (but we can have empty string which is different from nullptr) */
if (!req->getHeader("host").data()) {
return {HTTP_ERROR_400_BAD_REQUEST, FULLPTR};
@@ -611,11 +611,12 @@ namespace uWS
* ought to be handled as an error. */
std::string_view transferEncodingString = req->getHeader("transfer-encoding");
std::string_view contentLengthString = req->getHeader("content-length");
auto transferEncodingStringLen = transferEncodingString.length();
auto contentLengthStringLen = contentLengthString.length();
if (transferEncodingStringLen && contentLengthStringLen) {
/* Returning fullptr is the same as calling the errorHandler */
/* We could be smart and set an error in the context along with this, to indicate what
/* We could be smart and set an error in the context along with this, to indicate what
* http error response we might want to return */
return {HTTP_ERROR_400_BAD_REQUEST, FULLPTR};
}
@@ -623,7 +624,7 @@ namespace uWS
/* Parse query */
const char *querySeparatorPtr = (const char *) memchr(req->headers->value.data(), '?', req->headers->value.length());
req->querySeparator = (unsigned int) ((querySeparatorPtr ? querySeparatorPtr : req->headers->value.data() + req->headers->value.length()) - req->headers->value.data());
// lets check if content len is valid before calling requestHandler
if(contentLengthStringLen) {
remainingStreamingBytes = toUnsignedInteger(contentLengthString);
@@ -633,6 +634,14 @@ namespace uWS
}
}
// lets check if content len is valid before calling requestHandler
if(contentLengthStringLen) {
remainingStreamingBytes = toUnsignedInteger(contentLengthString);
if (remainingStreamingBytes == UINT64_MAX) {
/* Parser error */
return {HTTP_ERROR_400_BAD_REQUEST, FULLPTR};
}
}
/* If returned socket is not what we put in we need
* to break here as we either have upgraded to
* WebSockets or otherwise closed the socket. */
@@ -654,7 +663,7 @@ namespace uWS
if (transferEncodingStringLen) {
/* If a proxy sent us the transfer-encoding header that 100% means it must be chunked or else the proxy is
* not RFC 9112 compliant. Therefore it is always better to assume this is the case, since that entirely eliminates
* not RFC 9112 compliant. Therefore it is always better to assume this is the case, since that entirely eliminates
* all forms of transfer-encoding obfuscation tricks. We just rely on the header. */
/* RFC 9112 6.3
@@ -683,7 +692,6 @@ namespace uWS
consumedTotal += consumed;
}
} else if (contentLengthStringLen) {
if constexpr (!ConsumeMinimally) {
unsigned int emittable = (unsigned int) std::min<uint64_t>(remainingStreamingBytes, length);
dataHandler(user, std::string_view(data, emittable), emittable == remainingStreamingBytes);

View File

@@ -81,8 +81,12 @@ public:
/* Called only once per request */
void writeMark() {
if (getHttpResponseData()->state & HttpResponseData<SSL>::HTTP_WROTE_DATE_HEADER) {
return;
}
/* Date is always written */
writeHeader("Date", std::string_view(((LoopData *) us_loop_ext(us_socket_context_loop(SSL, (us_socket_context(SSL, (us_socket_t *) this)))))->date, 29));
getHttpResponseData()->state |= HttpResponseData<SSL>::HTTP_WROTE_DATE_HEADER;
}
/* Returns true on success, indicating that it might be feasible to write more data.
@@ -113,7 +117,8 @@ public:
httpResponseData->state |= HttpResponseData<SSL>::HTTP_CONNECTION_CLOSE;
}
if (httpResponseData->state & HttpResponseData<SSL>::HTTP_WRITE_CALLED) {
/* if write was called and there was previously no Content-Length header set */
if (httpResponseData->state & HttpResponseData<SSL>::HTTP_WRITE_CALLED && !(httpResponseData->state & HttpResponseData<SSL>::HTTP_WROTE_CONTENT_LENGTH_HEADER) && !httpResponseData->fromAncientRequest) {
/* We do not have tryWrite-like functionalities, so ignore optional in this path */
@@ -145,6 +150,8 @@ public:
}
}
}
} else {
this->uncork();
}
/* tryEnd can never fail when in chunked mode, since we do not have tryWrite (yet), only write */
@@ -152,7 +159,7 @@ public:
return true;
} else {
/* Write content-length on first call */
if (!(httpResponseData->state & HttpResponseData<SSL>::HTTP_END_CALLED)) {
if (!(httpResponseData->state & (HttpResponseData<SSL>::HTTP_END_CALLED))) {
/* Write mark, this propagates to WebSockets too */
writeMark();
@@ -162,7 +169,8 @@ public:
Super::write("Content-Length: ", 16);
writeUnsigned64(totalSize);
Super::write("\r\n\r\n", 4);
} else {
httpResponseData->state |= HttpResponseData<SSL>::HTTP_WROTE_CONTENT_LENGTH_HEADER;
} else if (!(httpResponseData->state & (HttpResponseData<SSL>::HTTP_WRITE_CALLED))) {
Super::write("\r\n", 2);
}
@@ -207,6 +215,8 @@ public:
}
}
}
} else {
this->uncork();
}
}
@@ -231,7 +241,7 @@ public:
/* Manually upgrade to WebSocket. Typically called in upgrade handler. Immediately calls open handler.
* NOTE: Will invalidate 'this' as socket might change location in memory. Throw away after use. */
template <typename UserData>
void upgrade(UserData &&userData, std::string_view secWebSocketKey, std::string_view secWebSocketProtocol,
us_socket_t *upgrade(UserData &&userData, std::string_view secWebSocketKey, std::string_view secWebSocketProtocol,
std::string_view secWebSocketExtensions,
struct us_socket_context_t *webSocketContext) {
@@ -313,8 +323,8 @@ public:
bool wasCorked = Super::isCorked();
/* Adopting a socket invalidates it, do not rely on it directly to carry any data */
WebSocket<SSL, true, UserData> *webSocket = (WebSocket<SSL, true, UserData> *) us_socket_context_adopt_socket(SSL,
(us_socket_context_t *) webSocketContext, (us_socket_t *) this, sizeof(WebSocketData) + sizeof(UserData));
us_socket_t *usSocket = us_socket_context_adopt_socket(SSL, (us_socket_context_t *) webSocketContext, (us_socket_t *) this, sizeof(WebSocketData) + sizeof(UserData));
WebSocket<SSL, true, UserData> *webSocket = (WebSocket<SSL, true, UserData> *) usSocket;
/* For whatever reason we were corked, update cork to the new socket */
if (wasCorked) {
@@ -344,6 +354,8 @@ public:
if (webSocketContextData->openHandler) {
webSocketContextData->openHandler(webSocket);
}
return usSocket;
}
/* Immediately terminate this Http response */
@@ -427,7 +439,7 @@ public:
/* End the response with an optional data chunk. Always starts a timeout. */
void end(std::string_view data = {}, bool closeConnection = false) {
internalEnd(data, data.length(), false, true, closeConnection);
internalEnd(data, data.length(), false, !(this->getHttpResponseData()->state & HttpResponseData<SSL>::HTTP_WROTE_CONTENT_LENGTH_HEADER), closeConnection);
}
/* Try and end the response. Returns [true, true] on success.
@@ -441,12 +453,12 @@ public:
bool sendTerminatingChunk(bool closeConnection = false) {
writeStatus(HTTP_200_OK);
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();
if (!(httpResponseData->state & HttpResponseData<SSL>::HTTP_WRITE_CALLED)) {
if (!(httpResponseData->state & (HttpResponseData<SSL>::HTTP_WRITE_CALLED | HttpResponseData<SSL>::HTTP_WROTE_CONTENT_LENGTH_HEADER))) {
/* Write mark on first call to write */
writeMark();
writeHeader("Transfer-Encoding", "chunked");
httpResponseData->state |= HttpResponseData<SSL>::HTTP_WRITE_CALLED;
httpResponseData->state |= HttpResponseData<SSL>::HTTP_WRITE_CALLED;
}
/* This will be sent always when state is HTTP_WRITE_CALLED inside internalEnd, so no need to write the terminating 0 chunk here */
@@ -456,33 +468,46 @@ public:
}
/* Write parts of the response in chunking fashion. Starts timeout if failed. */
bool write(std::string_view data) {
bool write(std::string_view data, size_t *writtenPtr = nullptr) {
writeStatus(HTTP_200_OK);
/* Do not allow sending 0 chunks, they mark end of response */
if (data.empty()) {
if (writtenPtr) {
*writtenPtr = 0;
}
/* If you called us, then according to you it was fine to call us so it's fine to still call us */
return true;
}
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();
if (!(httpResponseData->state & HttpResponseData<SSL>::HTTP_WRITE_CALLED)) {
/* Write mark on first call to write */
writeMark();
if (!(httpResponseData->state & HttpResponseData<SSL>::HTTP_WROTE_CONTENT_LENGTH_HEADER) && !httpResponseData->fromAncientRequest) {
if (!(httpResponseData->state & HttpResponseData<SSL>::HTTP_WRITE_CALLED)) {
/* Write mark on first call to write */
writeMark();
writeHeader("Transfer-Encoding", "chunked");
writeHeader("Transfer-Encoding", "chunked");
httpResponseData->state |= HttpResponseData<SSL>::HTTP_WRITE_CALLED;
}
Super::write("\r\n", 2);
writeUnsignedHex((unsigned int) data.length());
Super::write("\r\n", 2);
} else if (!(httpResponseData->state & HttpResponseData<SSL>::HTTP_WRITE_CALLED)) {
writeMark();
Super::write("\r\n", 2);
httpResponseData->state |= HttpResponseData<SSL>::HTTP_WRITE_CALLED;
}
Super::write("\r\n", 2);
writeUnsignedHex((unsigned int) data.length());
Super::write("\r\n", 2);
auto [written, failed] = Super::write(data.data(), (int) data.length());
/* Reset timeout on each sended chunk */
this->resetTimeout();
if (writtenPtr) {
*writtenPtr = written;
}
/* If we did not fail the write, accept more */
return !failed;
}
@@ -515,7 +540,7 @@ public:
Super::cork();
handler();
/* The only way we could possibly have changed the corked socket during handler call, would be if
/* The only way we could possibly have changed the corked socket during handler call, would be if
* the HTTP socket was upgraded to WebSocket and caused a realloc. Because of this we cannot use "this"
* from here downwards. The corking is done with corkUnchecked() in upgrade. It steals cork. */
auto *newCorkedSocket = loopData->getCorkedSocket();
@@ -582,7 +607,7 @@ public:
/* Attach handler for aborted HTTP request */
HttpResponse *onAborted(void* userData, HttpResponseData<SSL>::OnAbortedCallback handler) {
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();
httpResponseData->userData = userData;
httpResponseData->onAborted = handler;
return this;
@@ -590,7 +615,7 @@ public:
HttpResponse *onTimeout(void* userData, HttpResponseData<SSL>::OnTimeoutCallback handler) {
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();
httpResponseData->userData = userData;
httpResponseData->onTimeout = handler;
return this;
@@ -620,7 +645,7 @@ public:
return this;
}
/* Attach a read handler for data sent. Will be called with FIN set true if last segment. */
void onData(void* userData, HttpResponseData<SSL>::OnDataCallback handler) {
void onData(void* userData, HttpResponseData<SSL>::OnDataCallback handler) {
HttpResponseData<SSL> *data = getHttpResponseData();
data->userData = userData;
data->inStream = handler;
@@ -629,6 +654,17 @@ public:
data->received_bytes_per_timeout = 0;
}
void* getSocketData() {
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();
return httpResponseData->socketData;
}
void setSocketData(void* socketData) {
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();
httpResponseData->socketData = socketData;
}
void setWriteOffset(uint64_t offset) {
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();

View File

@@ -36,7 +36,7 @@ struct HttpResponseData : AsyncSocketData<SSL>, HttpParser {
using OnAbortedCallback = void (*)(uWS::HttpResponse<SSL>*, void*);
using OnTimeoutCallback = void (*)(uWS::HttpResponse<SSL>*, void*);
using OnDataCallback = void (*)(uWS::HttpResponse<SSL>* response, const char* chunk, size_t chunk_length, bool, void*);
/* When we are done with a response we mark it like so */
void markDone() {
onAborted = nullptr;
@@ -53,7 +53,7 @@ struct HttpResponseData : AsyncSocketData<SSL>, HttpParser {
}
/* Caller of onWritable. It is possible onWritable calls markDone so we need to borrow it. */
bool callOnWritable( uWS::HttpResponse<SSL>* response, uint64_t offset) {
bool callOnWritable(uWS::HttpResponse<SSL>* response, uint64_t offset) {
/* Borrow real onWritable */
auto* borrowedOnWritable = std::move(onWritable);
@@ -77,11 +77,14 @@ struct HttpResponseData : AsyncSocketData<SSL>, HttpParser {
HTTP_WRITE_CALLED = 2, // used
HTTP_END_CALLED = 4, // used
HTTP_RESPONSE_PENDING = 8, // used
HTTP_CONNECTION_CLOSE = 16 // used
HTTP_CONNECTION_CLOSE = 16, // used
HTTP_WROTE_CONTENT_LENGTH_HEADER = 32, // used
HTTP_WROTE_DATE_HEADER = 64, // used
};
/* Shared context pointer */
void* userData = nullptr;
void* socketData = nullptr;
/* Per socket event handlers */
OnWritableCallback onWritable = nullptr;
@@ -97,6 +100,7 @@ struct HttpResponseData : AsyncSocketData<SSL>, HttpParser {
/* Current state (content-length sent, status sent, write called, etc */
uint8_t state = 0;
uint8_t idleTimeout = 10; // default HTTP_TIMEOUT 10 seconds
bool fromAncientRequest = false;
#ifdef UWS_WITH_PROXY
ProxyParser proxyParser;

View File

@@ -24,6 +24,7 @@
#include "LoopData.h"
#include <libusockets.h>
#include <iostream>
#include "AsyncSocket.h"
extern "C" int bun_is_exiting();
@@ -52,6 +53,15 @@ private:
for (auto &p : loopData->preHandlers) {
p.second((Loop *) loop);
}
void *corkedSocket = loopData->getCorkedSocket();
if (corkedSocket) {
if (loopData->isCorkedSSL()) {
((uWS::AsyncSocket<true> *) corkedSocket)->uncork();
} else {
((uWS::AsyncSocket<false> *) corkedSocket)->uncork();
}
}
}
static void postCb(us_loop_t *loop) {
@@ -148,6 +158,10 @@ public:
getLazyLoop().loop = nullptr;
}
static LoopData* data(struct us_loop_t *loop) {
return (LoopData *) us_loop_ext(loop);
}
void addPostHandler(void *key, MoveOnlyFunction<void(Loop *)> &&handler) {
LoopData *loopData = (LoopData *) us_loop_ext((us_loop_t *) this);

View File

@@ -63,6 +63,7 @@ public:
}
delete [] corkBuffer;
}
void* getCorkedSocket() {
return this->corkedSocket;
}

View File

@@ -82,7 +82,14 @@ namespace ofats {
namespace any_detail {
using buffer = std::aligned_storage_t<sizeof(void*) * 2, alignof(void*)>;
template <std::size_t Len, std::size_t Align>
class my_aligned_storage_t {
private:
alignas(Align) std::byte t_buff[Len];
};
using buffer = my_aligned_storage_t<sizeof(void*) * 2, alignof(void*)>;
template <class T>
inline constexpr bool is_small_object_v =

View File

@@ -115,7 +115,7 @@ public:
char header[10];
int header_length = (int) protocol::formatMessage<isServer>(header, "", 0, opCode, message.length(), compress, fin);
int written = us_socket_write2(0, (struct us_socket_t *)this, header, header_length, message.data(), (int) message.length());
if (written != header_length + (int) message.length()) {
/* Buffer up backpressure */
if (written > header_length) {
@@ -289,7 +289,7 @@ public:
);
WebSocketData *webSocketData = (WebSocketData *) us_socket_ext(SSL, (us_socket_t *) this);
if (!webSocketData->subscriber) { return false; }
/* Cannot return numSubscribers as this is only for this particular websocket context */

View File

@@ -82,7 +82,7 @@ public:
std::pair<unsigned short, unsigned short> idleTimeoutComponents;
/* This is run once on start-up */
void calculateIdleTimeoutCompnents(unsigned short idleTimeout) {
void calculateIdleTimeoutComponents(unsigned short idleTimeout) {
unsigned short margin = 4;
/* 4, 8 or 16 seconds margin based on idleTimeout */
while ((int) idleTimeout - margin * 2 >= margin * 2 && margin < 16) {

View File

@@ -345,7 +345,7 @@ function Install-Rust {
function Install-Llvm {
Install-Package llvm `
-Command clang-cl `
-Version "18.1.8"
-Version "19.1.7"
Add-To-Path "$env:ProgramFiles\LLVM\bin"
}

View File

@@ -1,5 +1,5 @@
#!/bin/sh
# Version: 9
# Version: 10
# A script that installs the dependencies needed to build and test Bun.
# This should work on macOS and Linux with a POSIX shell.
@@ -897,7 +897,7 @@ install_build_essentials() {
}
llvm_version_exact() {
print "18.1.8"
print "19.1.7"
}
llvm_version() {
@@ -915,14 +915,12 @@ install_llvm() {
install_packages "llvm@$(llvm_version)"
;;
apk)
# alpine doesn't have a lld19 package on 3.21 atm so use bare one for now
install_packages \
"llvm$(llvm_version)" \
"clang$(llvm_version)" \
"scudo-malloc" \
--repository "http://dl-cdn.alpinelinux.org/alpine/edge/main"
install_packages \
"lld$(llvm_version)" \
--repository "http://dl-cdn.alpinelinux.org/alpine/edge/community"
"lld"
;;
esac
}
@@ -966,7 +964,7 @@ install_gcc() {
;;
esac
llvm_v="18"
llvm_v="19"
append_to_profile "export CC=clang-${llvm_v}"
append_to_profile "export CXX=clang++-${llvm_v}"
@@ -1132,6 +1130,35 @@ install_tailscale() {
esac
}
install_fuse_python() {
# only linux needs this
case "$pm" in
apk)
# Build and install from source (https://github.com/libfuse/python-fuse/blob/master/INSTALL)
install_packages \
python3-dev \
fuse-dev \
pkgconf \
py3-setuptools
python_fuse_version="1.0.9"
python_fuse_tarball=$(download_file "https://github.com/libfuse/python-fuse/archive/refs/tags/v$python_fuse_version.tar.gz")
python_fuse_tmpdir="$(dirname "$python_fuse_tarball")"
execute tar -xzf "$python_fuse_tarball" -C "$python_fuse_tmpdir"
execute sh -c "cd '$python_fuse_tmpdir/python-fuse-$python_fuse_version' && python setup.py build"
execute_sudo sh -c "cd '$python_fuse_tmpdir/python-fuse-$python_fuse_version' && python setup.py install"
# For Alpine we also need to make sure the kernel module is automatically loaded
execute_sudo sh -c "echo fuse >> /etc/modules-load.d/fuse.conf"
# Check that it was actually installed
execute python -c 'import fuse'
;;
apt | dnf | yum)
install_packages python3-fuse
;;
esac
}
create_buildkite_user() {
if ! [ "$ci" = "1" ] || ! [ "$os" = "linux" ]; then
return
@@ -1323,6 +1350,7 @@ main() {
install_common_software
install_build_essentials
install_chromium
install_fuse_python
clean_system
}

View File

@@ -256,13 +256,15 @@ async function runTests() {
for (const testPath of tests) {
const absoluteTestPath = join(testsPath, testPath);
const title = relative(cwd, absoluteTestPath).replaceAll(sep, "/");
if (isNodeParallelTest(testPath)) {
const runWithBunTest = title.includes("needs-test") || readFileSync(absoluteTestPath, "utf-8").includes('bun:test');
if (isNodeTest(testPath)) {
const testContent = readFileSync(absoluteTestPath, "utf-8");
const runWithBunTest =
title.includes("needs-test") || testContent.includes("bun:test") || testContent.includes("node:test");
const subcommand = runWithBunTest ? "test" : "run";
await runTest(title, async () => {
const { ok, error, stdout } = await spawnBun(execPath, {
cwd: cwd,
args: [subcommand, "--config=./bunfig.node-test.toml", absoluteTestPath],
args: [subcommand, "--config=" + join(import.meta.dirname, "../bunfig.node-test.toml"), absoluteTestPath],
timeout: getNodeParallelTestTimeout(title),
env: {
FORCE_COLOR: "0",
@@ -870,19 +872,26 @@ function isJavaScriptTest(path) {
}
/**
* @param {string} testPath
* @param {string} path
* @returns {boolean}
*/
function isNodeParallelTest(testPath) {
return testPath.replaceAll(sep, "/").includes("js/node/test/parallel/");
function isNodeTest(path) {
// Do not run node tests on macOS x64 in CI
// TODO: Unclear why we decided to do this?
if (isCI && isMacOS && isX64) {
return false;
}
const unixPath = path.replaceAll(sep, "/");
return unixPath.includes("js/node/test/parallel/") || unixPath.includes("js/node/test/sequential/");
}
/**
* @param {string} testPath
* @param {string} path
* @returns {boolean}
*/
function isNodeSequentialTest(testPath) {
return testPath.replaceAll(sep, "/").includes("js/node/test/sequential/");
function isClusterTest(path) {
const unixPath = path.replaceAll(sep, "/");
return unixPath.includes("js/node/cluster/test-") && unixPath.endsWith(".ts");
}
/**
@@ -890,21 +899,17 @@ function isNodeSequentialTest(testPath) {
* @returns {boolean}
*/
function isTest(path) {
if (isNodeParallelTest(path) && targetDoesRunNodeTests()) return true;
if (isNodeSequentialTest(path) && targetDoesRunNodeTests()) return true;
if (path.replaceAll(sep, "/").startsWith("js/node/cluster/test-") && path.endsWith(".ts")) return true;
return isTestStrict(path);
return isNodeTest(path) || isClusterTest(path) ? true : isTestStrict(path);
}
/**
* @param {string} path
* @returns {boolean}
*/
function isTestStrict(path) {
return isJavaScript(path) && /\.test|spec\./.test(basename(path));
}
function targetDoesRunNodeTests() {
if (isMacOS && isX64) return false;
return true;
}
/**
* @param {string} path
* @returns {boolean}

View File

@@ -466,11 +466,7 @@ pub const StandaloneModuleGraph = struct {
return output_bytes;
}
const page_size = if (Environment.isLinux and Environment.isAarch64)
// some linux distros do 64 KB pages on aarch64
64 * 1024
else
std.mem.page_size;
const page_size = std.heap.page_size_max;
pub const InjectOptions = struct {
windows_hide_console: bool = false,

View File

@@ -74,16 +74,17 @@ pub fn allocator(scope: *AllocationScope) Allocator {
const vtable: Allocator.VTable = .{
.alloc = alloc,
.resize = resize,
.remap = remap,
.free = free,
};
fn alloc(ctx: *anyopaque, len: usize, ptr_align: u8, ret_addr: usize) ?[*]u8 {
fn alloc(ctx: *anyopaque, len: usize, alignment: std.mem.Alignment, ret_addr: usize) ?[*]u8 {
const scope: *AllocationScope = @ptrCast(@alignCast(ctx));
scope.state.mutex.lock();
defer scope.state.mutex.unlock();
scope.state.allocations.ensureUnusedCapacity(scope.parent, 1) catch
return null;
const result = scope.parent.vtable.alloc(scope.parent.ptr, len, ptr_align, ret_addr) orelse
const result = scope.parent.vtable.alloc(scope.parent.ptr, len, alignment, ret_addr) orelse
return null;
const trace = StoredTrace.capture(ret_addr);
scope.state.allocations.putAssumeCapacityNoClobber(result, .{
@@ -94,12 +95,17 @@ fn alloc(ctx: *anyopaque, len: usize, ptr_align: u8, ret_addr: usize) ?[*]u8 {
return result;
}
fn resize(ctx: *anyopaque, buf: []u8, buf_align: u8, new_len: usize, ret_addr: usize) bool {
fn resize(ctx: *anyopaque, buf: []u8, alignment: std.mem.Alignment, new_len: usize, ret_addr: usize) bool {
const scope: *AllocationScope = @ptrCast(@alignCast(ctx));
return scope.parent.vtable.resize(scope.parent.ptr, buf, buf_align, new_len, ret_addr);
return scope.parent.vtable.resize(scope.parent.ptr, buf, alignment, new_len, ret_addr);
}
fn free(ctx: *anyopaque, buf: []u8, buf_align: u8, ret_addr: usize) void {
fn remap(ctx: *anyopaque, buf: []u8, alignment: std.mem.Alignment, new_len: usize, ret_addr: usize) ?[*]u8 {
const scope: *AllocationScope = @ptrCast(@alignCast(ctx));
return scope.parent.vtable.remap(scope.parent.ptr, buf, alignment, new_len, ret_addr);
}
fn free(ctx: *anyopaque, buf: []u8, alignment: std.mem.Alignment, ret_addr: usize) void {
const scope: *AllocationScope = @ptrCast(@alignCast(ctx));
scope.state.mutex.lock();
defer scope.state.mutex.unlock();
@@ -137,7 +143,7 @@ fn free(ctx: *anyopaque, buf: []u8, buf_align: u8, ret_addr: usize) void {
// sanitizer does not catch the invalid free.
}
scope.parent.vtable.free(scope.parent.ptr, buf, buf_align, ret_addr);
scope.parent.vtable.free(scope.parent.ptr, buf, alignment, ret_addr);
// If asan did not catch the free, panic now.
if (invalid) @panic("Invalid free");

View File

@@ -33,7 +33,7 @@ pub fn free(this: *const NullableAllocator, bytes: []const u8) void {
if (this.get()) |allocator| {
if (bun.String.isWTFAllocator(allocator)) {
// workaround for https://github.com/ziglang/zig/issues/4298
bun.String.StringImplAllocator.free(allocator.ptr, @constCast(bytes), 0, 0);
bun.String.StringImplAllocator.free(allocator.ptr, @constCast(bytes), .fromByteUnits(1), 0);
return;
}

View File

@@ -62,19 +62,15 @@ pub const LinuxMemFdAllocator = struct {
}
const AllocatorInterface = struct {
fn alloc(_: *anyopaque, _: usize, _: u8, _: usize) ?[*]u8 {
fn alloc(_: *anyopaque, _: usize, _: std.mem.Alignment, _: usize) ?[*]u8 {
// it should perform no allocations or resizes
return null;
}
fn resize(_: *anyopaque, _: []u8, _: u8, _: usize, _: usize) bool {
return false;
}
fn free(
ptr: *anyopaque,
buf: []u8,
_: u8,
_: std.mem.Alignment,
_: usize,
) void {
var this: *LinuxMemFdAllocator = @alignCast(@ptrCast(ptr));
@@ -86,7 +82,8 @@ pub const LinuxMemFdAllocator = struct {
pub const VTable = &std.mem.Allocator.VTable{
.alloc = &AllocatorInterface.alloc,
.resize = &resize,
.resize = &std.mem.Allocator.noResize,
.remap = &std.mem.Allocator.noRemap,
.free = &free,
};
};
@@ -95,7 +92,7 @@ pub const LinuxMemFdAllocator = struct {
var size = len;
// size rounded up to nearest page
size += (size + std.mem.page_size - 1) & std.mem.page_size;
size = std.mem.alignForward(usize, size, std.heap.pageSize());
var flags_mut = flags;
flags_mut.TYPE = .SHARED;

View File

@@ -4,9 +4,10 @@ const std = @import("std");
/// Single allocation only.
///
pub const MaxHeapAllocator = struct {
array_list: std.ArrayList(u8),
array_list: std.ArrayListAligned(u8, @alignOf(std.c.max_align_t)),
fn alloc(ptr: *anyopaque, len: usize, _: u8, _: usize) ?[*]u8 {
fn alloc(ptr: *anyopaque, len: usize, alignment: std.mem.Alignment, _: usize) ?[*]u8 {
bun.assert(alignment.toByteUnits() <= @alignOf(std.c.max_align_t));
var this = bun.cast(*MaxHeapAllocator, ptr);
this.array_list.items.len = 0;
this.array_list.ensureTotalCapacity(len) catch return null;
@@ -14,7 +15,7 @@ pub const MaxHeapAllocator = struct {
return this.array_list.items.ptr;
}
fn resize(_: *anyopaque, buf: []u8, _: u8, new_len: usize, _: usize) bool {
fn resize(_: *anyopaque, buf: []u8, _: std.mem.Alignment, new_len: usize, _: usize) bool {
_ = new_len;
_ = buf;
@panic("not implemented");
@@ -23,7 +24,7 @@ pub const MaxHeapAllocator = struct {
fn free(
_: *anyopaque,
_: []u8,
_: u8,
_: std.mem.Alignment,
_: usize,
) void {}
@@ -39,9 +40,10 @@ pub const MaxHeapAllocator = struct {
.alloc = &alloc,
.free = &free,
.resize = &resize,
.remap = &std.mem.Allocator.noRemap,
};
pub fn init(this: *MaxHeapAllocator, allocator: std.mem.Allocator) std.mem.Allocator {
this.array_list = std.ArrayList(u8).init(allocator);
this.array_list = .init(allocator);
return std.mem.Allocator{
.ptr = this,

View File

@@ -12,7 +12,7 @@ const Environment = @import("../env.zig");
fn mimalloc_free(
_: *anyopaque,
buf: []u8,
buf_align: u8,
alignment: mem.Alignment,
_: usize,
) void {
if (comptime Environment.enable_logs)
@@ -23,8 +23,8 @@ fn mimalloc_free(
// let's only enable it in debug mode
if (comptime Environment.isDebug) {
assert(mimalloc.mi_is_in_heap_region(buf.ptr));
if (mimalloc.canUseAlignedAlloc(buf.len, buf_align))
mimalloc.mi_free_size_aligned(buf.ptr, buf.len, buf_align)
if (mimalloc.canUseAlignedAlloc(buf.len, alignment.toByteUnits()))
mimalloc.mi_free_size_aligned(buf.ptr, buf.len, alignment.toByteUnits())
else
mimalloc.mi_free_size(buf.ptr, buf.len);
} else {
@@ -35,12 +35,12 @@ fn mimalloc_free(
const CAllocator = struct {
pub const supports_posix_memalign = true;
fn alignedAlloc(len: usize, alignment: usize) ?[*]u8 {
fn alignedAlloc(len: usize, alignment: mem.Alignment) ?[*]u8 {
if (comptime Environment.enable_logs)
log("mi_alloc({d}, {d})", .{ len, alignment });
log("mi_alloc({d}, {d})", .{ len, alignment.toByteUnits() });
const ptr: ?*anyopaque = if (mimalloc.canUseAlignedAlloc(len, alignment))
mimalloc.mi_malloc_aligned(len, alignment)
const ptr: ?*anyopaque = if (mimalloc.canUseAlignedAlloc(len, alignment.toByteUnits()))
mimalloc.mi_malloc_aligned(len, alignment.toByteUnits())
else
mimalloc.mi_malloc(len);
@@ -60,16 +60,11 @@ const CAllocator = struct {
return mimalloc.mi_malloc_size(ptr);
}
fn alloc(_: *anyopaque, len: usize, log2_align: u8, _: usize) ?[*]u8 {
if (comptime FeatureFlags.alignment_tweak) {
return alignedAlloc(len, log2_align);
}
const alignment = @as(usize, 1) << @as(Allocator.Log2Align, @intCast(log2_align));
fn alloc(_: *anyopaque, len: usize, alignment: mem.Alignment, _: usize) ?[*]u8 {
return alignedAlloc(len, alignment);
}
fn resize(_: *anyopaque, buf: []u8, _: u8, new_len: usize, _: usize) bool {
fn resize(_: *anyopaque, buf: []u8, _: mem.Alignment, new_len: usize, _: usize) bool {
if (new_len <= buf.len) {
return true;
}
@@ -93,17 +88,18 @@ pub const c_allocator = Allocator{
const c_allocator_vtable = &Allocator.VTable{
.alloc = &CAllocator.alloc,
.resize = &CAllocator.resize,
.remap = &std.mem.Allocator.noRemap,
.free = &CAllocator.free,
};
const ZAllocator = struct {
pub const supports_posix_memalign = true;
fn alignedAlloc(len: usize, alignment: usize) ?[*]u8 {
fn alignedAlloc(len: usize, alignment: mem.Alignment) ?[*]u8 {
log("ZAllocator.alignedAlloc: {d}\n", .{len});
const ptr = if (mimalloc.canUseAlignedAlloc(len, alignment))
mimalloc.mi_zalloc_aligned(len, alignment)
const ptr = if (mimalloc.canUseAlignedAlloc(len, alignment.toByteUnits()))
mimalloc.mi_zalloc_aligned(len, alignment.toByteUnits())
else
mimalloc.mi_zalloc(len);
@@ -123,11 +119,11 @@ const ZAllocator = struct {
return mimalloc.mi_malloc_size(ptr);
}
fn alloc(_: *anyopaque, len: usize, ptr_align: u8, _: usize) ?[*]u8 {
return alignedAlloc(len, ptr_align);
fn alloc(_: *anyopaque, len: usize, alignment: mem.Alignment, _: usize) ?[*]u8 {
return alignedAlloc(len, alignment);
}
fn resize(_: *anyopaque, buf: []u8, _: u8, new_len: usize, _: usize) bool {
fn resize(_: *anyopaque, buf: []u8, _: mem.Alignment, new_len: usize, _: usize) bool {
if (new_len <= buf.len) {
return true;
}
@@ -150,6 +146,7 @@ pub const z_allocator = Allocator{
const z_allocator_vtable = Allocator.VTable{
.alloc = &ZAllocator.alloc,
.resize = &ZAllocator.resize,
.remap = &std.mem.Allocator.noRemap,
.free = &ZAllocator.free,
};
const HugeAllocator = struct {

View File

@@ -209,11 +209,11 @@ pub const Arena = struct {
}
pub const supports_posix_memalign = true;
fn alignedAlloc(heap: *mimalloc.Heap, len: usize, alignment: usize) ?[*]u8 {
fn alignedAlloc(heap: *mimalloc.Heap, len: usize, alignment: mem.Alignment) ?[*]u8 {
log("Malloc: {d}\n", .{len});
const ptr: ?*anyopaque = if (mimalloc.canUseAlignedAlloc(len, alignment))
mimalloc.mi_heap_malloc_aligned(heap, len, alignment)
const ptr: ?*anyopaque = if (mimalloc.canUseAlignedAlloc(len, alignment.toByteUnits()))
mimalloc.mi_heap_malloc_aligned(heap, len, alignment.toByteUnits())
else
mimalloc.mi_heap_malloc(heap, len);
@@ -234,15 +234,10 @@ pub const Arena = struct {
return mimalloc.mi_malloc_usable_size(ptr);
}
fn alloc(arena: *anyopaque, len: usize, log2_align: u8, _: usize) ?[*]u8 {
fn alloc(arena: *anyopaque, len: usize, alignment: mem.Alignment, _: usize) ?[*]u8 {
const this = bun.cast(*mimalloc.Heap, arena);
// if (comptime Environment.isDebug)
// ArenaRegistry.assert(.{ .heap = this });
if (comptime FeatureFlags.alignment_tweak) {
return alignedAlloc(this, len, log2_align);
}
const alignment = @as(usize, 1) << @as(Allocator.Log2Align, @intCast(log2_align));
return alignedAlloc(
this,
@@ -251,7 +246,7 @@ pub const Arena = struct {
);
}
fn resize(_: *anyopaque, buf: []u8, _: u8, new_len: usize, _: usize) bool {
fn resize(_: *anyopaque, buf: []u8, _: mem.Alignment, new_len: usize, _: usize) bool {
if (new_len <= buf.len) {
return true;
}
@@ -267,7 +262,7 @@ pub const Arena = struct {
fn free(
_: *anyopaque,
buf: []u8,
buf_align: u8,
alignment: mem.Alignment,
_: usize,
) void {
// mi_free_size internally just asserts the size
@@ -275,8 +270,8 @@ pub const Arena = struct {
// but its good to have that assertion
if (comptime Environment.isDebug) {
assert(mimalloc.mi_is_in_heap_region(buf.ptr));
if (mimalloc.canUseAlignedAlloc(buf.len, buf_align))
mimalloc.mi_free_size_aligned(buf.ptr, buf.len, buf_align)
if (mimalloc.canUseAlignedAlloc(buf.len, alignment.toByteUnits()))
mimalloc.mi_free_size_aligned(buf.ptr, buf.len, alignment.toByteUnits())
else
mimalloc.mi_free_size(buf.ptr, buf.len);
} else {
@@ -288,5 +283,6 @@ pub const Arena = struct {
const c_allocator_vtable = Allocator.VTable{
.alloc = &Arena.alloc,
.resize = &Arena.resize,
.remap = &std.mem.Allocator.noRemap,
.free = &Arena.free,
};

View File

@@ -128,6 +128,8 @@ pub const Features = struct {
pub var process_dlopen: usize = 0;
pub var postgres_connections: usize = 0;
pub var s3: usize = 0;
pub var csrf_verify: usize = 0;
pub var csrf_generate: usize = 0;
comptime {
@export(&napi_module_register, .{ .name = "Bun__napi_module_register_count" });

View File

@@ -187,7 +187,7 @@ pub const FilePoll = struct {
pub fn unregister(this: *FilePoll, loop: *Loop) bool {
_ = loop;
// TODO(@paperdave): This cast is extremely suspicious. At best, `fd` is
// TODO(@paperclover): This cast is extremely suspicious. At best, `fd` is
// the wrong type (it should be a uv handle), at worst this code is a
// crash due to invalid memory access.
uv.uv_unref(@ptrFromInt(@intFromEnum(this.fd)));

View File

@@ -94,7 +94,7 @@ pub fn BabyList(comptime Type: type) type {
this.update(list_);
}
pub fn popOrNull(this: *@This()) ?Type {
pub fn pop(this: *@This()) ?Type {
if (this.len == 0) return null;
this.len -= 1;
return this.ptr[this.len];

View File

@@ -10,6 +10,7 @@
pub const DevServer = @This();
pub const debug = bun.Output.Scoped(.DevServer, false);
pub const igLog = bun.Output.scoped(.IncrementalGraph, false);
const DebugHTTPServer = @import("../bun.js/api/server.zig").DebugHTTPServer;
pub const Options = struct {
/// Arena must live until DevServer.deinit()
@@ -99,6 +100,11 @@ server_register_update_callback: JSC.Strong,
bun_watcher: *bun.Watcher,
directory_watchers: DirectoryWatchStore,
watcher_atomics: WatcherAtomics,
testing_batch_events: union(enum) {
disabled,
enable_after_bundle,
enabled: TestingBatch,
},
/// Number of bundles that have been executed. This is currently not read, but
/// will be used later to determine when to invoke graph garbage collection.
@@ -165,6 +171,9 @@ deferred_request_pool: bun.HiveArray(DeferredRequest.Node, DeferredRequest.max_p
/// UWS can handle closing the websocket connections themselves
active_websocket_connections: std.AutoHashMapUnmanaged(*HmrSocket, void),
relative_path_buf_lock: bun.DebugThreadLock,
relative_path_buf: bun.PathBuffer,
// Debugging
dump_dir: if (bun.FeatureFlags.bake_debugging_features) ?std.fs.Dir else void,
@@ -379,7 +388,7 @@ pub fn init(options: Options) bun.JSOOM!*DevServer {
.emit_visualizer_events = 0,
.has_pre_crash_handler = bun.FeatureFlags.bake_debugging_features and
options.dump_state_on_crash orelse
bun.getRuntimeFeatureFlag("BUN_DUMP_STATE_ON_CRASH"),
bun.getRuntimeFeatureFlag("BUN_DUMP_STATE_ON_CRASH"),
.frontend_only = options.framework.file_system_router_types.len == 0,
.client_graph = .empty,
.server_graph = .empty,
@@ -409,6 +418,8 @@ pub fn init(options: Options) bun.JSOOM!*DevServer {
true
else
bun.getRuntimeFeatureFlag("BUN_ASSUME_PERFECT_INCREMENTAL"),
.relative_path_buf_lock = .unlocked,
.testing_batch_events = .disabled,
.server_transpiler = undefined,
.client_transpiler = undefined,
@@ -419,6 +430,7 @@ pub fn init(options: Options) bun.JSOOM!*DevServer {
.watcher_atomics = undefined,
.log = undefined,
.deferred_request_pool = undefined,
.relative_path_buf = undefined,
});
errdefer bun.destroy(dev);
const allocator = dev.allocation_scope.allocator();
@@ -645,6 +657,8 @@ pub fn deinit(dev: *DevServer) void {
.framework = {},
.bundler_options = {},
.assume_perfect_incremental_bundling = {},
.relative_path_buf = {},
.relative_path_buf_lock = {},
.graph_safety_lock = dev.graph_safety_lock.lock(),
.bun_watcher = dev.bun_watcher.deinit(true),
@@ -739,6 +753,13 @@ pub fn deinit(dev: *DevServer) void {
event.aligned.files.deinit(dev.allocator);
event.aligned.extra_files.deinit(dev.allocator);
},
.testing_batch_events = switch (dev.testing_batch_events) {
.disabled => {},
.enabled => |*batch| {
batch.entry_points.deinit(allocator);
},
.enable_after_bundle => {},
},
};
dev.allocation_scope.deinit();
bun.destroy(dev);
@@ -775,6 +796,8 @@ pub fn memoryCost(dev: *DevServer) usize {
.server_register_update_callback = {},
.deferred_request_pool = {},
.assume_perfect_incremental_bundling = {},
.relative_path_buf = {},
.relative_path_buf_lock = {},
// pointers that are not considered a part of DevServer
.vm = {},
@@ -886,6 +909,13 @@ pub fn memoryCost(dev: *DevServer) usize {
.route_lookup = {
cost += memoryCostArrayHashMap(dev.route_lookup);
},
.testing_batch_events = switch (dev.testing_batch_events) {
.disabled => {},
.enabled => |batch| {
cost += memoryCostArrayHashMap(batch.entry_points.set);
},
.enable_after_bundle => {},
},
};
return cost;
}
@@ -915,7 +945,7 @@ fn initServerRuntime(dev: *DevServer) void {
if (!interface.isObject()) @panic("Internal assertion failure: expected interface from HMR runtime to be an object");
const fetch_function = interface.get(dev.vm.global, "handleRequest") catch null orelse
@panic("Internal assertion failure: expected interface from HMR runtime to contain handleRequest");
bun.assert(fetch_function.isCallable(dev.vm.jsc));
bun.assert(fetch_function.isCallable());
dev.server_fetch_function_callback = JSC.Strong.create(fetch_function, dev.vm.global);
const register_update = interface.get(dev.vm.global, "registerUpdate") catch null orelse
@panic("Internal assertion failure: expected interface from HMR runtime to contain registerUpdate");
@@ -1172,6 +1202,8 @@ fn ensureRouteIsBundled(
.loaded => {},
}
// TODO(@heimskr): store the request?
// Prepare a bundle with just this route.
var sfa = std.heap.stackFallback(4096, dev.allocator);
const temp_alloc = sfa.get();
@@ -1357,6 +1389,7 @@ fn onFrameworkRequestWithBundle(
router_type.server_file_string.get() orelse str: {
const name = dev.server_graph.bundled_files.keys()[fromOpaqueFileId(.server, router_type.server_file).get()];
const str = bun.String.createUTF8ForJS(dev.vm.global, dev.relativePath(name));
dev.releaseRelativePathBuf();
router_type.server_file_string = JSC.Strong.create(str, dev.vm.global);
break :str str;
},
@@ -1374,10 +1407,12 @@ fn onFrameworkRequestWithBundle(
route = dev.router.routePtr(bundle.route_index);
var route_name = bun.String.createUTF8(dev.relativePath(keys[fromOpaqueFileId(.server, route.file_page.unwrap().?).get()]));
arr.putIndex(global, 0, route_name.transferToJS(global));
dev.releaseRelativePathBuf();
n = 1;
while (true) {
if (route.file_layout.unwrap()) |layout| {
var layout_name = bun.String.createUTF8(dev.relativePath(keys[fromOpaqueFileId(.server, layout).get()]));
defer dev.releaseRelativePathBuf();
arr.putIndex(global, @intCast(n), layout_name.transferToJS(global));
n += 1;
}
@@ -1502,7 +1537,7 @@ fn generateHTMLPayload(dev: *DevServer, route_bundle_index: RouteBundle.Index, r
return array.items;
}
fn getJavaScriptCodeForHTMLFile(
fn generateJavaScriptCodeForHTMLFile(
dev: *DevServer,
index: bun.JSAst.Index,
import_records: []bun.BabyList(bun.ImportRecord),
@@ -1511,14 +1546,14 @@ fn getJavaScriptCodeForHTMLFile(
) bun.OOM![]const u8 {
var sfa_state = std.heap.stackFallback(65536, dev.allocator);
const sfa = sfa_state.get();
var array: std.ArrayListUnmanaged(u8) = std.ArrayListUnmanaged(u8).initCapacity(sfa, 65536) catch bun.outOfMemory();
var array = std.ArrayListUnmanaged(u8).initCapacity(sfa, 65536) catch bun.outOfMemory();
defer array.deinit(sfa);
const w = array.writer(sfa);
try w.writeAll(" ");
try bun.js_printer.writeJSONString(input_file_sources[index.get()].path.pretty, @TypeOf(w), w, .utf8);
try w.writeAll("(m) {\n ");
try w.writeAll(" return Promise.all([\n");
try w.writeAll(": [ [");
var any = false;
for (import_records[index.get()].slice()) |import| {
if (import.source_index.isValid()) {
if (!loaders[import.source_index.get()].isJavaScriptLike())
@@ -1530,15 +1565,20 @@ fn getJavaScriptCodeForHTMLFile(
if (file.flags.kind != .js)
continue;
}
try w.writeAll(" m.dynamicImport(");
if (!any) {
any = true;
try w.writeAll("\n");
}
try w.writeAll(" ");
try bun.js_printer.writeJSONString(import.path.pretty, @TypeOf(w), w, .utf8);
try w.writeAll("),\n ");
try w.writeAll(", 0,\n");
}
try w.writeAll(" ]);\n ");
try w.writeAll("},\n");
if (any) {
try w.writeAll(" ");
}
try w.writeAll("], [], [], () => {}, false],\n");
// Avoid-recloning if it is was moved to the hap
// Avoid-recloning if it is was moved to the heap
return if (array.items.ptr == &sfa_state.buffer)
try dev.allocator.dupe(u8, array.items)
else
@@ -1745,7 +1785,7 @@ fn prepareAndLogResolutionFailures(dev: *DevServer) !void {
// Theoretically, it shouldn't be possible for errors to leak into dev.log, but just in
// case that happens, they can be printed out.
if (dev.log.hasErrors()) {
if (dev.log.hasErrors() and dev.log.msgs.items.len > 0) {
if (Environment.isDebug) {
Output.debugWarn("dev.log should not be written into when using DevServer", .{});
}
@@ -1853,7 +1893,7 @@ fn generateClientBundle(dev: *DevServer, route_bundle: *RouteBundle) bun.OOM![]u
break :brk;
if (!dev.client_graph.stale_files.isSet(rfr_index.get())) {
try dev.client_graph.traceImports(rfr_index, &gts, .find_client_modules);
react_fast_refresh_id = dev.relativePath(rfr.import_source);
react_fast_refresh_id = rfr.import_source;
}
}
@@ -1876,7 +1916,7 @@ fn generateClientBundle(dev: *DevServer, route_bundle: *RouteBundle) bun.OOM![]u
const client_bundle = dev.client_graph.takeJSBundle(.{
.kind = .initial_response,
.initial_response_entry_point = if (client_file) |index|
dev.relativePath(dev.client_graph.bundled_files.keys()[index.get()])
dev.client_graph.bundled_files.keys()[index.get()]
else
"",
.react_refresh_entry_point = react_fast_refresh_id,
@@ -1956,6 +1996,7 @@ fn makeArrayForServerComponentsPatch(dev: *DevServer, global: *JSC.JSGlobalObjec
const names = dev.server_graph.bundled_files.keys();
for (items, 0..) |item, i| {
const str = bun.String.createUTF8(dev.relativePath(names[item.get()]));
defer dev.releaseRelativePathBuf();
defer str.deref();
arr.putIndex(global, @intCast(i), str.toJS(global));
}
@@ -2011,6 +2052,7 @@ pub fn finalizeBundle(
bv2: *bun.bundle_v2.BundleV2,
result: bun.bundle_v2.DevServerOutput,
) bun.OOM!void {
var had_sent_hmr_event = false;
defer {
bv2.deinit();
dev.current_bundle = null;
@@ -2019,6 +2061,20 @@ pub fn finalizeBundle(
// not fatal: the assets may be reindexed some time later.
};
// Signal for testing framework where it is in synchronization
if (dev.testing_batch_events == .enable_after_bundle) {
dev.testing_batch_events = .{ .enabled = .empty };
dev.publish(.testing_watch_synchronization, &.{
MessageId.testing_watch_synchronization.char(),
0,
}, .binary);
} else {
dev.publish(.testing_watch_synchronization, &.{
MessageId.testing_watch_synchronization.char(),
if (had_sent_hmr_event) 4 else 3,
}, .binary);
}
dev.startNextBundleIfPresent();
// Unref the ref added in `startAsyncBundle`
@@ -2075,7 +2131,7 @@ pub fn finalizeBundle(
.gts = undefined,
};
const quoted_source_contents = bv2.linker.graph.files.items(.quoted_source_contents);
const quoted_source_contents: []const []const u8 = bv2.linker.graph.files.items(.quoted_source_contents);
// Pass 1, update the graph's nodes, resolving every bundler source
// index into its `IncrementalGraph(...).FileIndex`
for (
@@ -2104,7 +2160,7 @@ pub fn finalizeBundle(
.{ .js = .{
.code = compile_result.code(),
.source_map = source_map,
.quoted_contents = .initOwned(quoted_contents, dev.allocator),
.quoted_contents = .initOwned(@constCast(quoted_contents), dev.allocator),
} },
graph == .ssr,
),
@@ -2175,7 +2231,7 @@ pub fn finalizeBundle(
for (result.htmlChunks()) |*chunk| {
const index = bun.JSAst.Index.init(chunk.entry_point.source_index);
const compile_result = chunk.compile_results_for_chunk[0].html;
const generated_js = try dev.getJavaScriptCodeForHTMLFile(
const generated_js = try dev.generateJavaScriptCodeForHTMLFile(
index,
import_records,
input_file_sources,
@@ -2187,7 +2243,7 @@ pub fn finalizeBundle(
.{ .js = .{
.code = generated_js,
.source_map = .empty,
.quoted_contents = comptime .initNeverFree(""),
.quoted_contents = .empty,
} },
false,
);
@@ -2244,6 +2300,11 @@ pub fn finalizeBundle(
}
// Index all failed files now that the incremental graph has been updated.
if (dev.incremental_result.failures_removed.items.len > 0 or
dev.incremental_result.failures_added.items.len > 0)
{
had_sent_hmr_event = true;
}
try dev.indexFailures();
try dev.client_graph.ensureStaleBitCapacity(false);
@@ -2322,7 +2383,7 @@ pub fn finalizeBundle(
if (will_hear_hot_update and
current_bundle.had_reload_event and
(dev.incremental_result.framework_routes_affected.items.len +
dev.incremental_result.html_routes_hard_affected.items.len) > 0 and
dev.incremental_result.html_routes_hard_affected.items.len) > 0 and
dev.bundling_failures.count() == 0)
{
has_route_bits_set = true;
@@ -2451,10 +2512,10 @@ pub fn finalizeBundle(
}
try w.writeInt(i32, -1, .little);
// Send CSS mutations
const css_chunks = result.cssChunks();
if (will_hear_hot_update) {
if (dev.client_graph.current_chunk_len > 0 or css_chunks.len > 0) {
// Send CSS mutations
const asset_values = dev.assets.files.values();
try w.writeInt(u32, @intCast(css_chunks.len), .little);
const sources = bv2.graph.input_files.items(.source);
@@ -2466,6 +2527,7 @@ pub fn finalizeBundle(
try w.writeAll(css_data);
}
// Send the JS chunk
if (dev.client_graph.current_chunk_len > 0) {
const hash = hash: {
var source_map_hash: bun.bundle_v2.ContentHasher.Hash = .init(0x4b12); // arbitrarily different seed than what .initial_response uses
@@ -2491,6 +2553,7 @@ pub fn finalizeBundle(
}
dev.publish(.hot_update, hot_update_payload.items, .binary);
had_sent_hmr_event = true;
}
if (dev.incremental_result.failures_added.items.len > 0) {
@@ -2570,6 +2633,7 @@ pub fn finalizeBundle(
break :file_name dev.relativePath(abs_path);
},
};
defer dev.releaseRelativePathBuf();
const total_count = bv2.graph.entry_points.items.len;
if (file_name) |name| {
Output.prettyError("<d>:<r> {s}", .{name});
@@ -2637,7 +2701,9 @@ fn startNextBundleIfPresent(dev: *DevServer) void {
dev.appendRouteEntryPointsIfNotStale(&entry_points, temp_alloc, route_bundle_index) catch bun.outOfMemory();
}
dev.startAsyncBundle(entry_points, is_reload, timer) catch bun.outOfMemory();
if (entry_points.set.count() > 0) {
dev.startAsyncBundle(entry_points, is_reload, timer) catch bun.outOfMemory();
}
dev.next_bundle.route_queue.clearRetainingCapacity();
}
@@ -2765,16 +2831,13 @@ fn onRequest(dev: *DevServer, req: *Request, resp: anytype) void {
return;
}
switch (dev.server.?) {
inline else => |s| {
if (@typeInfo(@TypeOf(s.app.?)).pointer.child.Response != @typeInfo(@TypeOf(resp)).pointer.child) {
unreachable; // mismatch between `is_ssl` with server and response types. optimize these checks out.
}
if (s.config.onRequest != .zero) {
s.onRequest(req, resp);
return;
}
},
if (DevServer.AnyResponse != @typeInfo(@TypeOf(resp)).pointer.child) {
unreachable; // mismatch between `is_ssl` with server and response types. optimize these checks out.
}
if (dev.server.?.config.onRequest != .zero) {
dev.server.?.onRequest(req, resp);
return;
}
sendBuiltInNotFound(resp);
@@ -2982,6 +3045,9 @@ pub fn IncrementalGraph(side: bake.Side) type {
/// exact size, instead of the log approach that dynamic arrays use.
stale_files: DynamicBitSetUnmanaged,
// TODO: rename `dependencies` to something that clearly indicates direction.
// such as "parent" or "consumer"
/// Start of a file's 'dependencies' linked list. These are the other
/// files that have imports to this file. Walk this list to discover
/// what files are to be reloaded when something changes.
@@ -3236,15 +3302,15 @@ pub fn IncrementalGraph(side: bake.Side) type {
return self.vlq_ptr[0..self.vlq_len];
}
pub fn quotedContentsCowString(self: @This()) bun.CowString {
return bun.CowString.initUnchecked(self.quoted_contents_ptr[0..self.quoted_contents_flags.len], self.quoted_contents_flags.is_owned);
pub fn quotedContentsCowString(self: @This()) bun.ptr.CowString {
return bun.ptr.CowString.initUnchecked(self.quoted_contents_ptr[0..self.quoted_contents_flags.len], self.quoted_contents_flags.is_owned);
}
pub fn quotedContents(self: @This()) []const u8 {
return self.quoted_contents_ptr[0..self.quoted_contents_flags.len];
}
pub fn fromNonEmptySourceMap(source_map: SourceMap.Chunk, quoted_contents: bun.CowString) !PackedMap {
pub fn fromNonEmptySourceMap(source_map: SourceMap.Chunk, quoted_contents: bun.ptr.CowString) !PackedMap {
assert(source_map.buffer.list.items.len > 0);
return .{
.vlq_ptr = source_map.buffer.list.items.ptr,
@@ -3271,9 +3337,9 @@ pub fn IncrementalGraph(side: bake.Side) type {
// for a simpler example. It is more complicated here because this
// structure is two-way.
pub const Edge = struct {
/// The file with the `import` statement
/// The file with the import statement
dependency: FileIndex,
/// The file that `dependency` is importing
/// The file the import statement references.
imported: FileIndex,
next_import: EdgeIndex.Optional,
@@ -3364,7 +3430,7 @@ pub fn IncrementalGraph(side: bake.Side) type {
js: struct {
code: []const u8,
source_map: SourceMap.Chunk,
quoted_contents: bun.CowString,
quoted_contents: bun.ptr.CowString,
},
css: u64,
},
@@ -3690,7 +3756,7 @@ pub fn IncrementalGraph(side: bake.Side) type {
}
}
while (queue.popOrNull()) |index| {
while (queue.pop()) |index| {
for (ctx.import_records[index.get()].slice()) |import_record| {
const result = try processEdgeAttachment(g, ctx, temp_alloc, quick_lookup, new_imports, file_index, import_record, .css);
if (result == .@"continue" and import_record.source_index.isValid()) {
@@ -4330,6 +4396,7 @@ pub fn IncrementalGraph(side: bake.Side) type {
dev.relativePath(gop.key_ptr.*),
log.msgs.items,
);
defer dev.releaseRelativePathBuf();
const fail_gop = try dev.bundling_failures.getOrPut(dev.allocator, failure);
try dev.incremental_result.failures_added.append(dev.allocator, failure);
if (fail_gop.found_existing) {
@@ -4358,7 +4425,7 @@ pub fn IncrementalGraph(side: bake.Side) type {
it = g.first_dep.items[index.get()].unwrap();
while (it) |edge_index| {
const dep = g.edges.items[edge_index.get()];
it = dep.next_import.unwrap();
it = dep.next_dependency.unwrap();
assert(dep.imported == index);
bv2.enqueueFileFromDevServerIncrementalGraphInvalidation(
@@ -4548,6 +4615,7 @@ pub fn IncrementalGraph(side: bake.Side) type {
w,
.utf8,
);
g.owner().releaseRelativePathBuf();
} else {
try w.writeAll("null");
}
@@ -4563,6 +4631,7 @@ pub fn IncrementalGraph(side: bake.Side) type {
w,
.utf8,
);
g.owner().releaseRelativePathBuf();
}
try w.writeAll("\n");
},
@@ -4640,21 +4709,32 @@ pub fn IncrementalGraph(side: bake.Side) type {
var source_map_strings = std.ArrayList(u8).init(arena);
defer source_map_strings.deinit();
const dev = g.owner();
dev.relative_path_buf_lock.lock();
defer dev.relative_path_buf_lock.unlock();
const buf = bun.PathBufferPool.get();
defer bun.PathBufferPool.put(buf);
var path_count: usize = 0;
for (g.current_chunk_parts.items) |entry| {
path_count += 1;
try source_map_strings.appendSlice(",");
const path = paths[entry.get()];
const path = if (Environment.isWindows)
bun.path.pathToPosixBuf(u8, paths[entry.get()], buf)
else
paths[entry.get()];
if (std.fs.path.isAbsolute(path)) {
const is_windows_drive_path = Environment.isWindows and bun.path.isSepAny(path[0]);
const is_windows_drive_path = Environment.isWindows and path[0] != '/';
try source_map_strings.appendSlice(if (is_windows_drive_path)
"file:///"
"\"file:///"
else
"\"file://");
if (Environment.isWindows and !is_windows_drive_path) {
// UNC namespace -> file://server/share/path.ext
bun.strings.percentEncodeWrite(
if (path.len > 2 and bun.path.isSepAny(path[0]) and bun.path.isSepAny(path[1]))
if (path.len > 2 and path[0] == '/' and path[1] == '/')
path[2..]
else
path, // invalid but must not crash
@@ -4906,7 +4986,7 @@ pub fn IncrementalGraph(side: bake.Side) type {
}
fn newEdge(g: *@This(), edge: Edge) !EdgeIndex {
if (g.edges_free_list.popOrNull()) |index| {
if (g.edges_free_list.pop()) |index| {
g.edges.items[index.get()] = edge;
return index;
}
@@ -5283,7 +5363,7 @@ const DirectoryWatchStore = struct {
}
fn appendDepAssumeCapacity(store: *DirectoryWatchStore, dep: Dep) Dep.Index {
if (store.dependencies_free_list.popOrNull()) |index| {
if (store.dependencies_free_list.pop()) |index| {
store.dependencies.items[index.get()] = dep;
return index;
}
@@ -5631,10 +5711,11 @@ fn writeVisualizerMessage(dev: *DevServer, payload: *std.ArrayList(u8)) !void {
0..,
) |k, v, i| {
const normalized_key = dev.relativePath(k);
defer dev.releaseRelativePathBuf();
try w.writeInt(u32, @intCast(normalized_key.len), .little);
if (k.len == 0) continue;
try w.writeAll(normalized_key);
try w.writeByte(@intFromBool(g.stale_files.isSet(i) or switch (side) {
try w.writeByte(@intFromBool(g.stale_files.isSetAllowOutOfBound(i, true) or switch (side) {
.server => v.failed,
.client => v.flags.failed,
}));
@@ -5684,7 +5765,7 @@ pub fn onWebSocketUpgrade(
.active_route = .none,
});
dev.active_websocket_connections.put(dev.allocator, dw, {}) catch bun.outOfMemory();
res.upgrade(
_ = res.upgrade(
*HmrSocket,
dw,
req.header("sec-websocket-key") orelse "",
@@ -5696,12 +5777,13 @@ pub fn onWebSocketUpgrade(
/// Every message is to use `.binary`/`ArrayBuffer` transport mode. The first byte
/// indicates a Message ID; see comments on each type for how to interpret the rest.
/// Avoid changing message ID values, as some of these are hard-coded in tests.
///
/// This format is only intended for communication via the browser and DevServer.
/// Server-side HMR is implemented using a different interface. This API is not
/// versioned alongside Bun; breaking changes may occur at any point.
///
/// All integers are sent in little-endian
/// All integers are sent in little-endian.
pub const MessageId = enum(u8) {
/// Version payload. Sent on connection startup. The client should issue a
/// hard-reload when it mismatches with its `config.version`.
@@ -5787,15 +5869,17 @@ pub const MessageId = enum(u8) {
/// Sent in response to `set_url`.
/// - `u32`: Route index
set_url_response = 'n',
/// Used for syncronization in dev server tests, to identify when a update was
/// Used for synchronization in DevServer tests, to identify when a update was
/// acknowledged by the watcher but intentionally took no action.
redundant_watch = 'r',
/// - `u8`: See bake-harness.ts WatchSynchronization enum.
testing_watch_synchronization = 'r',
pub inline fn char(id: MessageId) u8 {
return @intFromEnum(id);
}
};
/// Avoid changing message ID values, as some of these are hard-coded in tests.
pub const IncomingMessageId = enum(u8) {
/// Subscribe to an event channel. Payload is a sequence of chars available
/// in HmrTopic.
@@ -5803,6 +5887,8 @@ pub const IncomingMessageId = enum(u8) {
/// Emitted on client-side navigations.
/// Rest of payload is a UTF-8 string.
set_url = 'n',
/// Tells the DevServer to batch events together.
testing_batch_events = 'H',
/// Invalid data
_,
@@ -5813,7 +5899,7 @@ const HmrTopic = enum(u8) {
errors = 'e',
browser_error = 'E',
visualizer = 'v',
redundant_watch = 'r',
testing_watch_synchronization = 'r',
/// Invalid data
_,
@@ -5918,6 +6004,44 @@ const HmrSocket = struct {
var response: [5]u8 = .{MessageId.set_url_response.char()} ++ std.mem.toBytes(rbi.get());
_ = ws.send(&response, .binary, false, true);
},
.testing_batch_events => switch (s.dev.testing_batch_events) {
.disabled => {
if (s.dev.current_bundle != null) {
s.dev.testing_batch_events = .enable_after_bundle;
} else {
s.dev.testing_batch_events = .{ .enabled = .empty };
s.dev.publish(.testing_watch_synchronization, &.{
MessageId.testing_watch_synchronization.char(),
0,
}, .binary);
}
},
.enable_after_bundle => {
// do not expose a websocket event that panics a release build
bun.debugAssert(false);
ws.close();
},
.enabled => |event_const| {
var event = event_const;
s.dev.testing_batch_events = .disabled;
if (event.entry_points.set.count() == 0) {
s.dev.publish(.testing_watch_synchronization, &.{
MessageId.testing_watch_synchronization.char(),
2,
}, .binary);
return;
}
s.dev.startAsyncBundle(
event.entry_points,
true,
std.time.Timer.start() catch @panic("timers unsupported"),
) catch bun.outOfMemory();
event.entry_points.deinit(s.dev.allocator);
},
},
_ => ws.close(),
}
}
@@ -6163,15 +6287,6 @@ pub const HotReloadEvent = struct {
g.invalidate(changed_file_paths, entry_points, temp_alloc) catch bun.outOfMemory();
}
if (dev.has_tailwind_plugin_hack) |*map| {
for (map.keys()) |abs_path| {
const file = dev.client_graph.bundled_files.get(abs_path) orelse
continue;
if (file.flags.kind == .css)
entry_points.appendCss(temp_alloc, abs_path) catch bun.outOfMemory();
}
}
if (entry_points.set.count() == 0) {
Output.debugWarn("nothing to bundle", .{});
if (changed_file_paths.len > 0)
@@ -6184,9 +6299,21 @@ pub const HotReloadEvent = struct {
bun.fmt.fmtSlice(event.dirs.keys(), ", "),
});
dev.publish(.redundant_watch, &.{MessageId.redundant_watch.char()}, .binary);
dev.publish(.testing_watch_synchronization, &.{
MessageId.testing_watch_synchronization.char(),
1,
}, .binary);
return;
}
if (dev.has_tailwind_plugin_hack) |*map| {
for (map.keys()) |abs_path| {
const file = dev.client_graph.bundled_files.get(abs_path) orelse
continue;
if (file.flags.kind == .css)
entry_points.appendCss(temp_alloc, abs_path) catch bun.outOfMemory();
}
}
}
pub fn run(first: *HotReloadEvent) void {
@@ -6194,6 +6321,7 @@ pub const HotReloadEvent = struct {
defer debug.log("HMR Task end", .{});
const dev = first.owner;
if (Environment.isDebug) {
assert(first.debug_mutex.tryLock());
assert(first.contention_indicator.load(.seq_cst) == 0);
@@ -6206,7 +6334,7 @@ pub const HotReloadEvent = struct {
var sfb = std.heap.stackFallback(4096, dev.allocator);
const temp_alloc = sfb.get();
var entry_points: EntryPointList = EntryPointList.empty;
var entry_points: EntryPointList = .empty;
defer entry_points.deinit(temp_alloc);
first.processFileList(dev, &entry_points, temp_alloc);
@@ -6225,6 +6353,19 @@ pub const HotReloadEvent = struct {
return;
}
switch (dev.testing_batch_events) {
.disabled => {},
.enabled => |*ev| {
ev.append(dev, entry_points) catch bun.outOfMemory();
dev.publish(.testing_watch_synchronization, &.{
MessageId.testing_watch_synchronization.char(),
1,
}, .binary);
return;
},
.enable_after_bundle => bun.debugAssert(false),
}
dev.startAsyncBundle(
entry_points,
true,
@@ -6506,6 +6647,8 @@ pub fn onRouterCollisionError(dev: *DevServer, rel_path: []const u8, other_id: O
dev.relativePath(dev.server_graph.bundled_files.keys()[fromOpaqueFileId(.server, other_id).get()]),
});
Output.flush();
dev.releaseRelativePathBuf();
}
fn toOpaqueFileId(comptime side: bake.Side, index: IncrementalGraph(side).FileIndex) OpaqueFileId {
@@ -6529,7 +6672,9 @@ fn fromOpaqueFileId(comptime side: bake.Side, id: OpaqueFileId) IncrementalGraph
}
/// Returns posix style path, suitible for URLs and reproducible hashes.
fn relativePath(dev: *const DevServer, path: []const u8) []const u8 {
/// To avoid overwriting memory, this has a lock for the buffer.
fn relativePath(dev: *DevServer, path: []const u8) []const u8 {
dev.relative_path_buf_lock.lock();
bun.assert(dev.root[dev.root.len - 1] != '/');
if (!std.fs.path.isAbsolute(path)) {
@@ -6542,15 +6687,20 @@ fn relativePath(dev: *const DevServer, path: []const u8) []const u8 {
{
return path[dev.root.len + 1 ..];
}
const relative_path_buf = &struct {
threadlocal var buf: bun.PathBuffer = undefined;
}.buf;
const rel = bun.path.relativePlatformBuf(relative_path_buf, dev.root, path, .auto, true);
// @constCast: `rel` is owned by a mutable threadlocal buffer above
const rel = bun.path.relativePlatformBuf(&dev.relative_path_buf, dev.root, path, .auto, true);
// @constCast: `rel` is owned by a buffer on `dev`, which is mutable
bun.path.platformToPosixInPlace(u8, @constCast(rel));
return rel;
}
fn releaseRelativePathBuf(dev: *DevServer) void {
dev.relative_path_buf_lock.unlock();
if (bun.Environment.isDebug) {
dev.relative_path_buf = undefined;
}
}
fn dumpStateDueToCrash(dev: *DevServer) !void {
comptime assert(bun.FeatureFlags.bake_debugging_features);
@@ -7182,6 +7332,7 @@ const ErrorReportRequest = struct {
const abs_path = result.file_paths[@intCast(index - 1)];
frame.source_url = .init(abs_path);
const rel_path = ctx.dev.relativePath(abs_path);
defer ctx.dev.releaseRelativePathBuf();
if (bun.strings.eql(frame.function_name.value.ZigString.slice(), rel_path)) {
frame.function_name = .empty;
}
@@ -7211,14 +7362,15 @@ const ErrorReportRequest = struct {
// Stack traces can often end with random runtime frames that are not relevant.
trim_runtime_frames: {
const first_non_runtime_frame = for (frames.items, 0..) |frame, i| {
// Ensure that trimming will not remove ALL frames.
for (frames.items) |frame| {
if (!frame.position.isInvalid() or frame.source_url.value.ZigString.slice().ptr != runtime_name) {
break i;
break;
}
} else break :trim_runtime_frames;
// Move all frames up
var i = first_non_runtime_frame + 1;
var i: usize = 0;
for (frames.items[i..]) |frame| {
if (frame.position.isInvalid() and frame.source_url.value.ZigString.slice().ptr == runtime_name) {
continue; // skip runtime frames
@@ -7270,6 +7422,7 @@ const ErrorReportRequest = struct {
const src_to_write = frame.source_url.value.ZigString.slice();
if (bun.strings.hasPrefixComptime(src_to_write, "/")) {
const file = ctx.dev.relativePath(src_to_write);
defer ctx.dev.releaseRelativePathBuf();
try w.writeInt(u32, @intCast(file.len), .little);
try w.writeAll(file);
} else {
@@ -7404,6 +7557,19 @@ fn readString32(reader: anytype, alloc: Allocator) ![]const u8 {
return memory;
}
const TestingBatch = struct {
entry_points: EntryPointList,
const empty: @This() = .{ .entry_points = .empty };
pub fn append(self: *@This(), dev: *DevServer, entry_points: EntryPointList) !void {
assert(entry_points.set.count() > 0);
for (entry_points.set.keys(), entry_points.set.values()) |k, v| {
try self.entry_points.append(dev.allocator, k, v);
}
}
};
/// userland implementation of https://github.com/ziglang/zig/issues/21879
fn VoidFieldTypes(comptime T: type) type {
const fields = @typeInfo(T).@"struct".fields;

View File

@@ -430,7 +430,7 @@ pub const Style = union(enum) {
if (map.get(utf8.slice())) |style| {
return style;
}
} else if (value.isCallable(global.vm())) {
} else if (value.isCallable()) {
return .{ .javascript_defined = JSC.Strong.create(value, global) };
}
@@ -810,7 +810,7 @@ fn newRoute(fr: *FrameworkRouter, alloc: Allocator, route_data: Route) !Route.In
}
fn newEdge(fr: *FrameworkRouter, alloc: Allocator, edge_data: Route.Edge) !Route.Edge.Index {
if (fr.freed_edges.popOrNull()) |i| {
if (fr.freed_edges.pop()) |i| {
fr.edges.items[i.get()] = edge_data;
return i;
} else {

View File

@@ -1,3 +1,5 @@
/** Module Ids are pre-resolved by the bundler, and should be treated as opaque strings.
* In practice, these strings are the relative file path to the module. */
type Id = string;
/** Index with same usage as `IncrementalGraph(.client).Index` */
@@ -24,10 +26,35 @@ interface Config {
roots: FileIndex[];
}
/**
* All modules for the initial bundle.
*/
declare const input_graph: Record<string, ModuleLoadFunction>;
declare namespace DEBUG {
/**
* Set globally in debug builds.
* Removed using --drop=DEBUG.ASSERT in releases.
*/
declare function ASSERT(condition: any, message?: string): asserts condition;
}
/** All modules for the initial bundle. */
declare const unloadedModuleRegistry: Record<string, UnloadedModule>;
declare type UnloadedModule = UnloadedESM | UnloadedCommonJS;
declare type UnloadedESM = [
deps: EncodedDependencyArray,
exportKeys: string[],
starImports: Id[],
load: (mod: import("./hmr-module").HMRModule) => Promise<void>,
isAsync: boolean,
];
declare type EncodedDependencyArray = (string | number)[];
declare type UnloadedCommonJS = (
hmr: import("./hmr-module").HMRModule,
module: import("./hmr-module").HMRModule["cjs"],
exports: unknown,
) => unknown;
declare type CommonJSModule = {
id: Id;
exports: any;
require: (id: Id) => unknown;
};
declare const config: Config;
@@ -112,3 +139,12 @@ declare module "react-dom/server.node" {
options: RenderToPipeableStreamOptions,
): PipeableStream<Uint8Array>;
}
declare module "bun:wrap" {
export const __name: unique symbol;
export const __legacyDecorateClassTS: unique symbol;
export const __legacyDecorateParamTS: unique symbol;
export const __legacyMetadataTS: unique symbol;
export const __using: unique symbol;
export const __callDispose: unique symbol;
}

Some files were not shown because too many files have changed in this diff Show More