Compare commits

..

193 Commits

Author SHA1 Message Date
Don Isaac
caa6a7c5ac wip 2025-01-06 09:52:09 -08:00
Don Isaac
85b97e0303 Merge branch 'main' of github.com:oven-sh/bun into don/fix/streams-leak 2025-01-06 09:50:24 -08:00
Eric Liu
8d82302ec5 docs(plugins): fix typos (#16174) 2025-01-05 18:50:03 -08:00
Ciro Spaciari
034f776047 WIP: S3 improvements (#16167) 2025-01-04 19:57:35 -08:00
Jarred Sumner
8a469cce7e Default to "auto" instead of "us-east-1" 2025-01-04 06:21:32 -08:00
Jarred Sumner
e532456cfe Update Bun.S3 type definitions 2025-01-04 05:24:57 -08:00
Jarred Sumner
cc52828d54 Remove rejectOnNextTick (#16161) 2025-01-04 04:17:03 -08:00
Jarred Sumner
5fe9b6f426 Improve MinIO support in Bun.S3 2025-01-04 02:44:17 -08:00
Ciro Spaciari
a53f2e6aaa fix test on windows (#16151) 2025-01-04 01:22:48 -08:00
Dylan Conway
79aa5d16df skip root scripts if root is filtered out with --filter (#16152)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2025-01-04 01:22:24 -08:00
Jarred Sumner
4454ebb152 Allow http:// endpoints in Bun.S3 2025-01-04 01:08:27 -08:00
Jarred Sumner
33233b1607 Don't include //# sourcemap comments in .html or .css files (#16159) 2025-01-04 00:32:17 -08:00
Don Isaac
ed0b4e1a6e fix(build/html): handle relative paths in script src (#16153) 2025-01-04 00:23:51 -08:00
Jarred Sumner
debd8a0eba Support BUN_CONFIG_VERBOSE_FETCH in S3 2025-01-04 00:05:14 -08:00
Jarred Sumner
cc5ee01752 Initial S3 docs 2025-01-03 23:08:14 -08:00
Jarred Sumner
d5fc928ca8 S3 cleanup (#16039)
Co-authored-by: Ciro Spaciari <ciro.spaciari@gmail.com>
2025-01-03 19:11:48 -08:00
Dylan Conway
2043613a62 support bun install --filter <pattern> (#16093) 2025-01-03 18:39:41 -08:00
Michael H
5caeeb9549 docs: contributing windows link be absolute to bun.sh (#16127) 2025-01-03 17:56:00 -08:00
Dylan Conway
fa7376b042 add bun install --lockfile-only (#16143) 2025-01-03 17:55:40 -08:00
Jarred Sumner
fd9d9242d8 Support absolute paths when bundling HTML (#16149) 2025-01-03 17:54:07 -08:00
190n
78498b4244 Include array length and promise status in V8 heap snapshots (oven-sh/WebKit#75) (#16141) 2025-01-03 17:33:17 -08:00
Dylan Conway
c713c0319b fix(install): extra quotes in bun.lock (#16139) 2025-01-03 15:16:52 -08:00
Jarred Sumner
912a2cbc12 Expose some no-ops (#16125)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2025-01-03 13:57:46 -08:00
Don Isaac
85336764ff Merge branch 'main' of github.com:oven-sh/bun into don/fix/streams-leak 2025-01-03 11:40:52 -08:00
Dylan Conway
c130df6c58 start verdaccio in multiple test files (#16118) 2025-01-03 08:21:00 -08:00
Jarred Sumner
f0cb1b723e Remove spinlock in libpas on Linux (#16130) 2025-01-03 04:32:27 -08:00
Jarred Sumner
79430091a1 Add v8.writeHeapSnapshot (#16123) 2025-01-02 21:24:16 -08:00
Jarred Sumner
ab8fe1a6c3 Bump 2025-01-02 21:17:47 -08:00
Michael H
dda49d17f9 docs: fix #16116 (#16122) 2025-01-02 20:29:05 -08:00
Jarred Sumner
faec20080d Update nodejs-apis.md 2025-01-02 20:27:30 -08:00
Jarred Sumner
f834304c27 Support generating V8 Heap Snapshots (#16109) 2025-01-02 20:15:13 -08:00
Jarred Sumner
b59e7c7682 Add missing exception checks to JSPropertyIterator (#16121)
Co-authored-by: Dylan Conway <35280289+dylan-conway@users.noreply.github.com>
2025-01-02 18:55:38 -08:00
Yiheng
40724d29ac Update cache.md (#16028) 2025-01-02 18:24:03 -08:00
Dylan Conway
d9125143b7 lockfile: escape names in bun.lock (#16120) 2025-01-02 18:22:39 -08:00
Jarred Sumner
4dcfd686b4 Fix build 2025-01-02 16:22:58 -08:00
Jarred Sumner
012d70f42e Fix bug with PATH in Bun.spawn (#16067) 2025-01-02 16:03:42 -08:00
Dylan Conway
a85bd42989 Add short flag for --filter (#16058) 2025-01-02 15:53:45 -08:00
Chawye Hsu
d714943d87 fix(install): read bunfig install.cache.dir (#10699)
Signed-off-by: Chawye Hsu <su+git@chawyehsu.com>
2025-01-02 15:46:27 -08:00
Ciro Spaciari
ae18cc0ef3 fix(server) HEAD Requests followup (#16115) 2025-01-02 15:08:03 -08:00
Jarred Sumner
a8b3f732c5 Report memory size of performance.measure / performance.mark (#16094) 2025-01-01 19:23:13 -08:00
KOMIYA Atsushi
ee955591e2 Update defines to define in cc function documentation (#16097) 2025-01-01 19:07:18 -08:00
Ciro Spaciari
7a52ec55a5 fix(server) HEAD requests (#16099) 2025-01-01 19:06:08 -08:00
Johan Bergström
aa1b0c9c40 fix: avoid world-writable permissions for lockfiles (#16018) 2025-01-01 10:56:10 -08:00
Jarred Sumner
be959e111a Do not assert valid windows path in chdirOSPath because the SetCurrentDirectoryW function will validate the path 2024-12-31 21:08:07 -08:00
Jarred Sumner
19191659cf Avoid resolving substrings in bun:sqlite and Buffer.byteLength (#16092) 2024-12-31 19:48:33 -08:00
Jarred Sumner
30008ed0fc Bump WebKit again (#16091) 2024-12-31 18:17:56 -08:00
Jarred Sumner
e3a1d026f9 Fix crash in bake on load (#16021) 2024-12-31 17:16:12 -08:00
Jarred Sumner
02196cbf0e Avoid resolving substrings unnecessarily (#16090) 2024-12-31 17:06:49 -08:00
Jarred Sumner
1ae855223c Bump WebKit (#16068) 2024-12-31 14:48:54 -08:00
Dylan Conway
5058bd3913 handle bundle(d)Dependencies in bun install (#16055) 2024-12-31 13:40:55 -08:00
Don Isaac
b406509afd refactor: remove unused script execution context file (#16059) 2024-12-31 13:31:33 -08:00
Dylan Conway
82f9b13e08 docs: fix bun.lock section (#16088) 2024-12-31 11:40:28 -08:00
Dylan Conway
37e7f5ba8f transpiler: fix crash with malformed enums (#16084) 2024-12-31 09:09:09 -08:00
Navishkar Rao
babd8b6028 Update nextjs.md docs with starter example (#16072) 2024-12-30 22:26:19 -08:00
Don Isaac
ab52058439 fix(us): memory leak when getting root certificate (#16073) 2024-12-30 22:20:15 -08:00
Don Isaac
31482ec58c wip 2024-12-30 23:46:19 -05:00
Don Isaac
327451429d fix: memory leak when reading chunks from a stream 2024-12-30 23:17:07 -05:00
Lars Volkheimer
e96dded366 fix formatting of Set in Bun.inspect() (#16013) 2024-12-30 13:44:40 -08:00
Jarred Sumner
76bfceae81 Support jsonb, idle_timeout, connection_timeout, max_lifetime timeouts in bun:sql. Add onopen and onclose callbacks. Fix missing "code" property appearing in errors. Add error codes for postgres. (#16045) 2024-12-30 13:25:01 -08:00
Dylan Conway
f0073bfa81 fix(install): free correct pointer in bun patch --commit (#16064) 2024-12-30 12:38:39 -08:00
Devanand Sharma
18ac7f9509 Add remove() and isRemoved in HTMLRewriterTypes.Doctype interface (#16031) 2024-12-28 22:57:25 -08:00
Ciro Spaciari
fe4176e403 feat(s3) s3 client (#15740)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
Co-authored-by: cirospaciari <cirospaciari@users.noreply.github.com>
2024-12-28 17:46:22 -08:00
Jarred Sumner
ed0980cf94 Make creating errors slightly faster (#16023) 2024-12-28 01:32:32 -08:00
Jarred Sumner
dd243a06a5 Log slow lifecycle scripts (#16027) 2024-12-28 01:31:30 -08:00
Jarred Sumner
7b06872abb Deflake fetch tests (#16000) 2024-12-27 14:07:41 -08:00
Don Isaac
d8e644fc25 fix(node/path): crash when joining long paths (#16019) 2024-12-27 17:58:21 +00:00
Meghan Denny
4bcc5b25d9 node: fix all of test-event-emitter (#16009) 2024-12-27 01:34:49 -08:00
Jarred Sumner
19675f474a Update .cursorignore 2024-12-26 11:48:30 -08:00
Jarred Sumner
bba998a611 Create .cursorignore 2024-12-26 11:48:11 -08:00
Jarred Sumner
145a7fd92e Better unicode identifier start / continue check (#15455) 2024-12-25 23:02:46 -08:00
Jarred Sumner
d4c0432a5f Refactor JS parser visitor step into individual functions to reduce stack space usage (#15993) 2024-12-25 23:02:05 -08:00
Jarred Sumner
379c79ee2e Fix typo 2024-12-25 22:35:52 -08:00
Jarred Sumner
2b2ca3275c Improve stack overflow, show more properties in Error objects (#15985)
Co-authored-by: Dave Caruso <me@paperdave.net>
2024-12-25 21:47:13 -08:00
Jarred Sumner
7317c7b4a2 Compress completions list to make zig build a little faster (#15992) 2024-12-25 18:04:46 -08:00
Jarred Sumner
608101c975 Add zlib microbenchmark
need to improve this
2024-12-24 04:20:24 -08:00
Jarred Sumner
52a568d2b2 Fix flaky zlib dictionary test (#15976) 2024-12-24 02:27:07 -08:00
Jarred Sumner
60cb505a98 Use JSObject instead of JSFunction in Bun.plugin (#15968) 2024-12-23 12:33:11 -08:00
Jarred Sumner
da54e81955 Support bundling HTML files and their js, css, and assets in Bun.build and bun build (#15940) 2024-12-23 11:04:38 -08:00
Jarred Sumner
774e30d383 Make originalLine and originalColumn getter calls not observable (#15951) 2024-12-23 03:40:51 -08:00
Jarred Sumner
c6b22d399f Fix showing source code that looks like export default "file-path" (#15957) 2024-12-23 03:40:00 -08:00
Jarred Sumner
1fa6d9e695 +2 passing node:events tests (#15952) 2024-12-23 01:45:13 -08:00
Jarred Sumner
4f8a6b33c4 +5 passing node:zlib tests (#15944) 2024-12-22 20:39:42 -08:00
Martin Amps
a6ad3b9be4 add --elide-lines override flag for workspace filtering (#15837) 2024-12-22 00:14:46 -08:00
github-actions[bot]
b63a6c83b4 deps: update libdeflate to v1.23 (#15934)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2024-12-21 21:28:17 -08:00
Don Isaac
14b44aeb49 fix(process): process.kill allows zero or negative pids (#15920) 2024-12-21 08:45:39 +00:00
Jarred Sumner
d6b9c444c1 Rename src/bundler.zig to src/transpiler.zig (#15921)
Co-authored-by: Dylan Conway <35280289+dylan-conway@users.noreply.github.com>
2024-12-21 00:59:37 -08:00
Don Isaac
3c37b7f806 fix(lexer): do not treat '#bun' in a url as a pragma (#15912)
Co-authored-by: Don Isaac <don@bun.sh>
Co-authored-by: DonIsaac <DonIsaac@users.noreply.github.com>
2024-12-21 04:57:42 +00:00
Don Isaac
acb9fdfcf5 refactor: organize native glob code (#15914)
Co-authored-by: DonIsaac <DonIsaac@users.noreply.github.com>
2024-12-20 20:59:07 -08:00
Jarred Sumner
50eec0025b Add regression test for #15902 2024-12-20 19:28:13 -08:00
Jarred Sumner
ac3cd09a42 Bump 2024-12-20 17:54:39 -08:00
Dylan Conway
6e222c8523 fix #15902 (#15911) 2024-12-20 17:03:37 -08:00
Jarred Sumner
b8f28ed8af Bump 2024-12-20 03:44:55 -08:00
dave caruso
7b3554f90c feat(bundler): add --windows-icon, --windows-no-console, fix bun.exe's main icon (#15894)
Co-authored-by: Dylan Conway <dylan.conway567@gmail.com>
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
Co-authored-by: Dylan Conway <35280289+dylan-conway@users.noreply.github.com>
2024-12-20 03:22:16 -08:00
Jarred Sumner
0c50b0fcec Fix potential runtime crash if transpiler generates invalid commonjs (#15898) 2024-12-20 02:12:08 -08:00
Jarred Sumner
bf9c6fdc00 Revert "fix(lexer): do not treat '#bun' in a url as a pragma" (#15899) 2024-12-20 01:31:48 -08:00
Don Isaac
1d9fbe7d67 fix(lexer): do not treat '#bun' in a url as a pragma (#15888)
Co-authored-by: Don Isaac <don@bun.sh>
Co-authored-by: DonIsaac <DonIsaac@users.noreply.github.com>
2024-12-20 01:26:30 -08:00
Brian Kim
a8893dcae5 Fix macro imports (#15833) 2024-12-20 08:34:45 +00:00
dave caruso
8a4852b8b0 fix: pass homedir test (#15811)
Co-authored-by: paperdave <paperdave@users.noreply.github.com>
Co-authored-by: Ashcon Partovi <ashcon@partovi.net>
Co-authored-by: Dylan Conway <dylan.conway567@gmail.com>
2024-12-20 00:36:59 -08:00
Dylan Conway
45ca9e08c3 fix(install): peer/dev/optional = false lockfile fix (#15874)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-12-20 00:34:21 -08:00
Jarred Sumner
e3fed49082 Implement expect().toHaveBeenCalledOnce() (#15871)
Co-authored-by: Dylan Conway <35280289+dylan-conway@users.noreply.github.com>
2024-12-20 00:23:55 -08:00
Dylan Conway
9164760a5a fix pnpm.test.ts (#15897) 2024-12-19 23:52:50 -08:00
Dylan Conway
747828965e fix(install): sort tree dependencies by behavior and name (#15895) 2024-12-19 23:14:33 -08:00
Jarred Sumner
35679b3178 Update node_util_binding.zig 2024-12-19 17:34:38 -08:00
Don Isaac
960b2b2c11 perf(node:util): fast path for extractedSplitNewLines (#15838)
Co-authored-by: Don Isaac <don@bun.sh>
Co-authored-by: DonIsaac <DonIsaac@users.noreply.github.com>
2024-12-19 23:42:18 +00:00
Don Isaac
f546a9b605 chore: add usage messages to check-node.sh (#15885)
Co-authored-by: Don Isaac <don@bun.sh>
2024-12-19 22:32:25 +00:00
Ashcon Partovi
3cbcd43f9a ci: Enable merge queue, disable soft failing tests 2024-12-19 11:18:13 -08:00
Jarred Sumner
b254e69322 Fix svelte testing guide 2024-12-19 03:44:46 -08:00
Jarred Sumner
5dcfc6f10f Update svelte-test.md 2024-12-19 03:17:02 -08:00
Jarred Sumner
d9b2396948 Update svelte-test.md 2024-12-19 03:16:06 -08:00
Jarred Sumner
e21050dc6f Update svelte-test.md 2024-12-19 03:09:21 -08:00
Jarred Sumner
276da2dbf5 Create svelte-test.md 2024-12-19 03:09:10 -08:00
Jarred Sumner
b539ca32ea Make "use strict" become CommonJS if we don't know whether it's ESM or CJS (#15868) 2024-12-18 23:23:50 -08:00
Jarred Sumner
ebc2eb5c5b Support colors array in util.styleText (#15872) 2024-12-18 23:23:42 -08:00
Jarred Sumner
10990f5213 Fixes #3554 (#15870) 2024-12-18 22:54:11 -08:00
Jarred Sumner
42f23f0966 PR feedback from #15865 2024-12-18 19:42:33 -08:00
Jarred Sumner
ac6723eab7 +13 passing node:vm tests (#15865) 2024-12-18 19:41:37 -08:00
Michael H
8e20d02b9b update registry scope guide (.npmrc is supported) (#15866) 2024-12-18 19:28:23 -08:00
dave caruso
41924211f2 add throw: true in Bun.build, to be made default in 1.2 (#15861) 2024-12-18 19:27:59 -08:00
Michael H
5d2b72aa3b don't make inline sourcemap in normal vscode terminal (#15862) 2024-12-18 18:30:39 -08:00
Don Isaac
e66a347158 fix(module-loader): use a more descriptive crash message (#15831)
Co-authored-by: Don Isaac <don@bun.sh>
Co-authored-by: DonIsaac <DonIsaac@users.noreply.github.com>
2024-12-18 14:10:46 -08:00
Jarred Sumner
b5b51004e8 Bump WebKit (#15828)
Co-authored-by: Dylan Conway <35280289+dylan-conway@users.noreply.github.com>
2024-12-17 20:59:10 -08:00
Dylan Conway
2272b852ba fix(install): npm version to git resolution package-lock.json migration (#15810) 2024-12-17 19:59:23 -08:00
Michael H
df5f95b19e vscode: allow trailing comma in bun.lock (#15747) 2024-12-17 18:05:30 -08:00
190n
59e06b0df5 fix(napi): set lossless parameter in napi_get_value_bigint_{int64,uint64}, and trim leading zeroes in napi_create_bigint_words (#15804) 2024-12-17 17:38:12 -08:00
Dylan Conway
430c1dd583 add install.saveTextLockfile to bunfig.toml (#15827) 2024-12-17 16:52:04 -08:00
Jarred Sumner
ad1738d23c Fix process.on from non-mainthread (#15825) 2024-12-17 16:51:19 -08:00
Jarred Sumner
b7efaa5b19 Bump 2024-12-17 15:30:18 -08:00
Jarred Sumner
1d48561709 Update plugins.md 2024-12-17 01:49:02 -08:00
Jarred Sumner
f2e0d606b6 Update plugins.md 2024-12-17 01:34:17 -08:00
Jarred Sumner
385868f504 Update plugins.md 2024-12-17 01:34:00 -08:00
Jarred Sumner
eecbeb32ec Move bundler plugin docs 2024-12-17 01:31:14 -08:00
Jarred Sumner
903d8bfa4a Be more careful about setting the rlimit max 2024-12-17 01:15:24 -08:00
Don Isaac
9524e1c86a fix: Bun.deepMatch on circular objects causing segfault (#15672)
Co-authored-by: Don Isaac <don@bun.sh>
Co-authored-by: DonIsaac <DonIsaac@users.noreply.github.com>
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-12-16 22:33:34 -08:00
dave caruso
77acfa23a7 pass all upstream node:os tests, all supported node:async_hooks tests (#15802) 2024-12-16 22:22:54 -08:00
Jarred Sumner
9d3b461a25 CI: Remove unnecessary config 2024-12-16 20:38:05 -08:00
Jarred Sumner
9d63ee0edf CI: test concurrency group 2024-12-16 20:33:56 -08:00
Jarred Sumner
a3090fc204 CI: cancel previous canary build 2024-12-16 20:33:09 -08:00
Jarred Sumner
32c1fdf205 Rename estimateDirectMemoryUsageOf to estimateShallowMemoryUsageOf 2024-12-16 20:18:04 -08:00
Jarred Sumner
aada6f930f Fix heap snapshots memory usage stats. Introduce estimateDirectMemoryUsageOf function in "bun:jsc" (#15790) 2024-12-16 20:16:23 -08:00
Zack Radisic
3906d02e2c CSS fixes (#15806) 2024-12-16 19:40:53 -08:00
pfg
f276484f25 Add lldb scripts for zig & jsc (#15807) 2024-12-16 18:31:41 -08:00
Jarred Sumner
4bef96e8d1 Prevent unnecessary postinstall script from causing bun install to hang in unreliable networks 2024-12-16 18:19:43 -08:00
Michael H
f2d955f686 vscode extension: use new debug terminal provider (#15801)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-12-16 17:29:12 -08:00
Ashcon Partovi
e8b85cff40 ci: Retry and detect flaky tests (#15798) 2024-12-16 17:04:33 -08:00
Dylan Conway
d5f1f2f8ad Use the same hoisting logic for text lockfile (#15778) 2024-12-16 16:37:46 -08:00
Michael H
67e4aec990 attempt to fix debugger (#15788)
Co-authored-by: RiskyMH <RiskyMH@users.noreply.github.com>
2024-12-16 16:34:55 -08:00
Jarred Sumner
540a0a89ab Fix text input with ink (#15800) 2024-12-16 16:33:15 -08:00
190n
4eae3a90e8 fix(napi): Make napi_wrap work on regular objects (#15622)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-12-16 15:54:39 -08:00
Jarred Sumner
9604733ee1 ✂️ 2024-12-16 13:51:45 -08:00
Sam
7633f3cc35 docs: dns.prefetch doesn't require port anymore (#15792) 2024-12-16 06:52:49 -08:00
Michael H
1fa0dee5e9 document npm:<package-name> in install docs (#15754) 2024-12-15 07:19:34 -08:00
Jarred Sumner
80b0b88315 Deflake doesnt_crash.test.ts 2024-12-15 06:54:34 -08:00
github-actions[bot]
6a24a06741 deps: update c-ares to v1.34.4 (#15773)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2024-12-15 04:38:39 -08:00
Jarred Sumner
8a64038fae Deflake require.cache test 2024-12-15 00:55:18 -08:00
Jarred Sumner
65f5156589 Deflake process test 2024-12-15 00:47:59 -08:00
Brian Donovan
00a8392656 docs(bun-native-plugin-rs): fix typos (#15764) 2024-12-14 23:50:03 -08:00
Jarred Sumner
c218bffd94 Add "bin" field to bun.lock (#15763)
Co-authored-by: Dylan Conway <dylan.conway567@gmail.com>
2024-12-14 22:52:17 -08:00
Jarred Sumner
3ce6ffa6be Make git dependencies faster + further optimize bun install (#15771) 2024-12-14 19:42:23 -08:00
Jarred Sumner
5326a998c7 Don't open node_modules 1,618 times (#15762) 2024-12-14 04:48:57 -08:00
Jarred Sumner
0d97c8157f Add debugger to entitlements plist 2024-12-14 01:57:08 -08:00
Jarred Sumner
ebc33327d3 Delete incorrect debug assertion 2024-12-14 01:56:55 -08:00
Dylan Conway
3df39f4bb7 bun.lock: fix --frozen-lockfile and resolving extra dependencies (#15748) 2024-12-13 22:40:12 -08:00
Jarred Sumner
c7020c2edc Make --expose gc work in nodetests 2024-12-13 22:30:26 -08:00
Meghan Denny
ac12438f69 node: fix test-zlib-from-gzip-with-trailing-garbage.js (#15757) 2024-12-13 21:51:02 -08:00
Jarred Sumner
1e19672841 fix clangd 2024-12-13 21:20:43 -08:00
Jarred Sumner
20f9cf0047 Fix flaky signal handlers on posix (#15751) 2024-12-13 20:13:56 -08:00
Don Isaac
bd1c5e9876 feat: add JSObject constructors (#15742)
Co-authored-by: Don Isaac <don@bun.sh>
2024-12-12 22:04:19 -08:00
Don Isaac
bbb56acdf7 test(ws): do not create temporary .sock files in root repo directory (#15670)
Co-authored-by: Don Isaac <don@bun.sh>
2024-12-12 21:39:30 -08:00
Jarred Sumner
f64ca29c0e Fix symbols test. Bump Webkit. (#15741) 2024-12-12 20:53:02 -08:00
Dylan Conway
8b3b1442fd bun.lock workspace sorting and comma bugfix (#15739) 2024-12-12 19:33:44 -08:00
Jarred Sumner
e72692801a [ci] Reduce number of environment variables we send (#15730) 2024-12-12 17:48:53 -08:00
Dylan Conway
e146734596 bun.lock fixes (#15724) 2024-12-12 16:45:26 -08:00
Jarred Sumner
7ded578547 [publish images] 2024-12-12 03:22:45 -08:00
Dylan Conway
71af1950fb bump webkit (#15328)
Co-authored-by: dylan-conway <dylan-conway@users.noreply.github.com>
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
Co-authored-by: Ben Grant <ben@bun.sh>
Co-authored-by: Meghan Denny <meghan@bun.sh>
Co-authored-by: Ashcon Partovi <ashcon@partovi.net>
2024-12-12 03:21:56 -08:00
Jarred Sumner
7991be86a3 Fix build 2024-12-12 02:18:25 -08:00
Jarred Sumner
6f50f51528 Deflake a test 2024-12-12 02:07:29 -08:00
Jarred Sumner
2bdf33cac8 Remove silly hack 2024-12-12 01:42:03 -08:00
Jarred Sumner
b3628a526d ✂️ 2024-12-12 01:39:34 -08:00
pfg
1b5cb891c8 More passing console tests (#15676)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-12-12 01:37:10 -08:00
Don Isaac
fe1e3be104 test(node): add parallel/test-path-resolve.js (#15707)
Co-authored-by: Don Isaac <don@bun.sh>
2024-12-12 01:36:36 -08:00
dave caruso
79dc13ca79 pass all string decoder tests (#15723)
Co-authored-by: paperdave <paperdave@users.noreply.github.com>
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-12-12 01:35:08 -08:00
Jarred Sumner
2ccdf0122c Fix edgecase with socketpair() impacting shell and spawn (#15725) 2024-12-12 01:23:40 -08:00
Zack Radisic
fddc28d608 CSS moar fixes (#15719) 2024-12-11 21:45:41 -08:00
Meghan Denny
834b6436c6 fix canary 2024-12-11 20:06:42 -08:00
Zack Radisic
113b62be82 Native plugin follow up (#15632)
Co-authored-by: zackradisic <zackradisic@users.noreply.github.com>
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-12-11 17:51:21 -08:00
pfg
2e0f229722 test(events): 66% -> 94% (#15716) 2024-12-11 17:43:19 -08:00
Don Isaac
08e2cf3761 test: mock 'node:test' module in node test harness (#15696)
Co-authored-by: Don Isaac <don@bun.sh>
2024-12-11 17:40:44 -08:00
pfg
0e8f075191 Pass node querystring tests (#15695) 2024-12-11 17:39:46 -08:00
Ashcon Partovi
667821c53a ci: Fix canary releases (#15713) 2024-12-11 09:47:17 -08:00
Dylan Conway
b55ca429c7 Implement text-based lockfile (#15705) 2024-12-11 05:05:49 -08:00
Don Isaac
78445c543e refactor: set default for name in ErrorCode.ts (#15699)
Co-authored-by: Don Isaac <don@bun.sh>
2024-12-11 01:07:57 -08:00
Don Isaac
24d73e948a test(node): add passing path parse format test (#15703)
Co-authored-by: Don Isaac <don@bun.sh>
2024-12-11 00:34:58 -08:00
Jarred Sumner
5cfa4cc0af ✂️ 2024-12-11 00:34:19 -08:00
714 changed files with 64506 additions and 27371 deletions

170
.buildkite/Dockerfile Normal file
View File

@@ -0,0 +1,170 @@
ARG LLVM_VERSION="18"
ARG REPORTED_LLVM_VERSION="18.1.8"
ARG OLD_BUN_VERSION="1.1.38"
ARG DEFAULT_CFLAGS="-mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -ffunction-sections -fdata-sections -faddrsig -fno-unwind-tables -fno-asynchronous-unwind-tables"
ARG DEFAULT_CXXFLAGS="-flto=full -fwhole-program-vtables -fforce-emit-vtables"
ARG BUILDKITE_AGENT_TAGS="queue=linux,os=linux,arch=${TARGETARCH}"
FROM --platform=$BUILDPLATFORM ubuntu:20.04 as base-arm64
FROM --platform=$BUILDPLATFORM ubuntu:18.04 as base-amd64
FROM base-$TARGETARCH as base
ARG LLVM_VERSION
ARG OLD_BUN_VERSION
ARG TARGETARCH
ARG DEFAULT_CXXFLAGS
ARG DEFAULT_CFLAGS
ARG REPORTED_LLVM_VERSION
ENV DEBIAN_FRONTEND=noninteractive \
CI=true \
DOCKER=true
RUN echo "Acquire::Queue-Mode \"host\";" > /etc/apt/apt.conf.d/99-apt-queue-mode.conf \
&& echo "Acquire::Timeout \"120\";" >> /etc/apt/apt.conf.d/99-apt-timeout.conf \
&& echo "Acquire::Retries \"3\";" >> /etc/apt/apt.conf.d/99-apt-retries.conf \
&& echo "APT::Install-Recommends \"false\";" >> /etc/apt/apt.conf.d/99-apt-install-recommends.conf \
&& echo "APT::Install-Suggests \"false\";" >> /etc/apt/apt.conf.d/99-apt-install-suggests.conf
RUN apt-get update && apt-get install -y --no-install-recommends \
wget curl git python3 python3-pip ninja-build \
software-properties-common apt-transport-https \
ca-certificates gnupg lsb-release unzip \
libxml2-dev ruby ruby-dev bison gawk perl make golang \
&& add-apt-repository ppa:ubuntu-toolchain-r/test \
&& apt-get update \
&& apt-get install -y gcc-13 g++-13 libgcc-13-dev libstdc++-13-dev \
libasan6 libubsan1 libatomic1 libtsan0 liblsan0 \
libgfortran5 libc6-dev \
&& wget https://apt.llvm.org/llvm.sh \
&& chmod +x llvm.sh \
&& ./llvm.sh ${LLVM_VERSION} all \
&& rm llvm.sh
RUN --mount=type=tmpfs,target=/tmp \
cmake_version="3.30.5" && \
if [ "$TARGETARCH" = "arm64" ]; then \
cmake_url="https://github.com/Kitware/CMake/releases/download/v${cmake_version}/cmake-${cmake_version}-linux-aarch64.sh"; \
else \
cmake_url="https://github.com/Kitware/CMake/releases/download/v${cmake_version}/cmake-${cmake_version}-linux-x86_64.sh"; \
fi && \
wget -O /tmp/cmake.sh "$cmake_url" && \
sh /tmp/cmake.sh --skip-license --prefix=/usr
RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 130 \
--slave /usr/bin/g++ g++ /usr/bin/g++-13 \
--slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-13 \
--slave /usr/bin/gcc-nm gcc-nm /usr/bin/gcc-nm-13 \
--slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-13
RUN echo "ARCH_PATH=$([ "$TARGETARCH" = "arm64" ] && echo "aarch64-linux-gnu" || echo "x86_64-linux-gnu")" >> /etc/environment \
&& echo "BUN_ARCH=$([ "$TARGETARCH" = "arm64" ] && echo "aarch64" || echo "x64")" >> /etc/environment
ENV LD_LIBRARY_PATH=/usr/lib/gcc/${ARCH_PATH}/13:/usr/lib/${ARCH_PATH} \
LIBRARY_PATH=/usr/lib/gcc/${ARCH_PATH}/13:/usr/lib/${ARCH_PATH} \
CPLUS_INCLUDE_PATH=/usr/include/c++/13:/usr/include/${ARCH_PATH}/c++/13 \
C_INCLUDE_PATH=/usr/lib/gcc/${ARCH_PATH}/13/include \
CFLAGS=${DEFAULT_CFLAGS} \
CXXFLAGS="${DEFAULT_CFLAGS} ${DEFAULT_CXXFLAGS}"
RUN if [ "$TARGETARCH" = "arm64" ]; then \
export ARCH_PATH="aarch64-linux-gnu"; \
else \
export ARCH_PATH="x86_64-linux-gnu"; \
fi \
&& mkdir -p /usr/lib/gcc/${ARCH_PATH}/13 \
&& ln -sf /usr/lib/${ARCH_PATH}/libstdc++.so.6 /usr/lib/gcc/${ARCH_PATH}/13/ \
&& echo "/usr/lib/gcc/${ARCH_PATH}/13" > /etc/ld.so.conf.d/gcc-13.conf \
&& echo "/usr/lib/${ARCH_PATH}" >> /etc/ld.so.conf.d/gcc-13.conf \
&& ldconfig
RUN for f in /usr/lib/llvm-${LLVM_VERSION}/bin/*; do ln -sf "$f" /usr/bin; done \
&& ln -sf /usr/bin/clang-${LLVM_VERSION} /usr/bin/clang \
&& ln -sf /usr/bin/clang++-${LLVM_VERSION} /usr/bin/clang++ \
&& ln -sf /usr/bin/lld-${LLVM_VERSION} /usr/bin/lld \
&& ln -sf /usr/bin/lldb-${LLVM_VERSION} /usr/bin/lldb \
&& ln -sf /usr/bin/clangd-${LLVM_VERSION} /usr/bin/clangd \
&& ln -sf /usr/bin/llvm-ar-${LLVM_VERSION} /usr/bin/llvm-ar \
&& ln -sf /usr/bin/ld.lld /usr/bin/ld \
&& ln -sf /usr/bin/clang /usr/bin/cc \
&& ln -sf /usr/bin/clang++ /usr/bin/c++
ENV CC="clang" \
CXX="clang++" \
AR="llvm-ar-${LLVM_VERSION}" \
RANLIB="llvm-ranlib-${LLVM_VERSION}" \
LD="lld-${LLVM_VERSION}"
RUN --mount=type=tmpfs,target=/tmp \
bash -c '\
set -euxo pipefail && \
source /etc/environment && \
echo "Downloading bun-v${OLD_BUN_VERSION}/bun-linux-$BUN_ARCH.zip from https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v${OLD_BUN_VERSION}/bun-linux-$BUN_ARCH.zip" && \
curl -fsSL https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v${OLD_BUN_VERSION}/bun-linux-$BUN_ARCH.zip -o /tmp/bun.zip && \
unzip /tmp/bun.zip -d /tmp/bun && \
mv /tmp/bun/*/bun /usr/bin/bun && \
chmod +x /usr/bin/bun'
ENV LLVM_VERSION=${REPORTED_LLVM_VERSION}
WORKDIR /workspace
FROM --platform=$BUILDPLATFORM base as buildkite
ARG BUILDKITE_AGENT_TAGS
# Install Rust nightly
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \
&& export PATH=$HOME/.cargo/bin:$PATH \
&& rustup install nightly \
&& rustup default nightly
RUN ARCH=$(if [ "$TARGETARCH" = "arm64" ]; then echo "arm64"; else echo "amd64"; fi) && \
echo "Downloading buildkite" && \
curl -fsSL "https://github.com/buildkite/agent/releases/download/v3.87.0/buildkite-agent-linux-${ARCH}-3.87.0.tar.gz" -o /tmp/buildkite-agent.tar.gz && \
mkdir -p /tmp/buildkite-agent && \
tar -xzf /tmp/buildkite-agent.tar.gz -C /tmp/buildkite-agent && \
mv /tmp/buildkite-agent/buildkite-agent /usr/bin/buildkite-agent
RUN mkdir -p /var/cache/buildkite-agent /var/log/buildkite-agent /var/run/buildkite-agent /etc/buildkite-agent /var/lib/buildkite-agent/cache/bun
COPY ../*/agent.mjs /var/bun/scripts/
ENV BUN_INSTALL_CACHE=/var/lib/buildkite-agent/cache/bun
ENV BUILDKITE_AGENT_TAGS=${BUILDKITE_AGENT_TAGS}
WORKDIR /var/bun/scripts
ENV PATH=/root/.cargo/bin:$PATH
CMD ["bun", "/var/bun/scripts/agent.mjs", "start"]
FROM --platform=$BUILDPLATFORM base as bun-build-linux-local
ARG LLVM_VERSION
WORKDIR /workspace/bun
COPY . /workspace/bun
# Install Rust nightly
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \
&& export PATH=$HOME/.cargo/bin:$PATH \
&& rustup install nightly \
&& rustup default nightly
ENV PATH=/root/.cargo/bin:$PATH
ENV LLVM_VERSION=${REPORTED_LLVM_VERSION}
RUN --mount=type=tmpfs,target=/workspace/bun/build \
ls -la \
&& bun run build:release \
&& mkdir -p /target \
&& cp -r /workspace/bun/build/release/bun /target/bun

View File

@@ -0,0 +1,122 @@
#!/usr/bin/env bash
set -euo pipefail
# Check if running as root
if [ "$EUID" -ne 0 ]; then
echo "error: must run as root"
exit 1
fi
# Check OS compatibility
if ! command -v dnf &> /dev/null; then
echo "error: this script requires dnf (RHEL/Fedora/CentOS)"
exit 1
fi
# Ensure /tmp/agent.mjs, /tmp/Dockerfile are present
if [ ! -f /tmp/agent.mjs ] || [ ! -f /tmp/Dockerfile ]; then
# Print each missing file
if [ ! -f /tmp/agent.mjs ]; then
echo "error: /tmp/agent.mjs is missing"
fi
if [ ! -f /tmp/Dockerfile ]; then
echo "error: /tmp/Dockerfile is missing"
fi
exit 1
fi
# Install Docker
dnf update -y
dnf install -y docker
systemctl enable docker
systemctl start docker || {
echo "error: failed to start Docker"
exit 1
}
# Create builder
docker buildx create --name builder --driver docker-container --bootstrap --use || {
echo "error: failed to create Docker buildx builder"
exit 1
}
# Set up Docker to start on boot
cat << 'EOF' > /etc/systemd/system/buildkite-agent.service
[Unit]
Description=Buildkite Docker Container
After=docker.service network-online.target
Requires=docker.service network-online.target
[Service]
TimeoutStartSec=0
Restart=always
RestartSec=5
ExecStartPre=-/usr/bin/docker stop buildkite
ExecStartPre=-/usr/bin/docker rm buildkite
ExecStart=/usr/bin/docker run \
--name buildkite \
--restart=unless-stopped \
--network host \
-v /var/run/docker.sock:/var/run/docker.sock \
-v /tmp:/tmp \
buildkite:latest
[Install]
WantedBy=multi-user.target
EOF
echo "Building Buildkite image"
# Clean up any previous build artifacts
rm -rf /tmp/fakebun
mkdir -p /tmp/fakebun/scripts /tmp/fakebun/.buildkite
# Copy required files
cp /tmp/agent.mjs /tmp/fakebun/scripts/ || {
echo "error: failed to copy agent.mjs"
exit 1
}
cp /tmp/Dockerfile /tmp/fakebun/.buildkite/Dockerfile || {
echo "error: failed to copy Dockerfile"
exit 1
}
cd /tmp/fakebun || {
echo "error: failed to change directory"
exit 1
}
# Build the Buildkite image
docker buildx build \
--platform $(uname -m | sed 's/aarch64/linux\/arm64/;s/x86_64/linux\/amd64/') \
--tag buildkite:latest \
--target buildkite \
-f .buildkite/Dockerfile \
--load \
. || {
echo "error: Docker build failed"
exit 1
}
# Create container to ensure image is cached in AMI
docker container create \
--name buildkite \
--restart=unless-stopped \
buildkite:latest || {
echo "error: failed to create buildkite container"
exit 1
}
# Reload systemd to pick up new service
systemctl daemon-reload
# Enable the service, but don't start it yet
systemctl enable buildkite-agent || {
echo "error: failed to enable buildkite-agent service"
exit 1
}
echo "Bootstrap complete"
echo "To start the Buildkite agent, run: "
echo " systemctl start buildkite-agent"

View File

@@ -13,19 +13,4 @@ steps:
agents:
queue: "build-darwin"
command:
- ".buildkite/scripts/prepare-build.sh"
- if: "build.branch == 'main' && !build.pull_request.repository.fork"
label: ":github:"
agents:
queue: "test-darwin"
depends_on:
- "darwin-aarch64-build-bun"
- "darwin-x64-build-bun"
- "linux-aarch64-build-bun"
- "linux-x64-build-bun"
- "linux-x64-baseline-build-bun"
- "windows-x64-build-bun"
- "windows-x64-baseline-build-bun"
command:
- ".buildkite/scripts/upload-release.sh"
- "node .buildkite/ci.mjs"

View File

@@ -11,6 +11,7 @@ import {
getBuildkiteEmoji,
getBuildMetadata,
getBuildNumber,
getCanaryRevision,
getCommitMessage,
getEmoji,
getEnv,
@@ -43,7 +44,6 @@ import {
* @property {Arch} arch
* @property {Abi} [abi]
* @property {boolean} [baseline]
* @property {boolean} [canary]
* @property {Profile} [profile]
*/
@@ -91,11 +91,11 @@ function getTargetLabel(target) {
* @property {Arch} arch
* @property {Abi} [abi]
* @property {boolean} [baseline]
* @property {boolean} [canary]
* @property {Profile} [profile]
* @property {Distro} [distro]
* @property {string} release
* @property {Tier} [tier]
* @property {string[]} [features]
*/
/**
@@ -103,10 +103,10 @@ function getTargetLabel(target) {
*/
const buildPlatforms = [
{ os: "darwin", arch: "aarch64", release: "14" },
// { os: "darwin", arch: "x64", release: "14" },
{ os: "linux", arch: "aarch64", distro: "debian", release: "11" },
{ os: "linux", arch: "x64", distro: "debian", release: "11" },
{ os: "linux", arch: "x64", baseline: true, distro: "debian", release: "11" },
{ os: "darwin", arch: "x64", release: "14" },
{ os: "linux", arch: "aarch64", distro: "amazonlinux", release: "2023", features: ["docker"] },
{ os: "linux", arch: "x64", distro: "amazonlinux", release: "2023", features: ["docker"] },
{ os: "linux", arch: "x64", baseline: true, distro: "amazonlinux", release: "2023", features: ["docker"] },
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.20" },
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.20" },
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.20" },
@@ -120,8 +120,8 @@ const buildPlatforms = [
const testPlatforms = [
{ os: "darwin", arch: "aarch64", release: "14", tier: "latest" },
{ os: "darwin", arch: "aarch64", release: "13", tier: "previous" },
// { os: "darwin", arch: "x64", release: "14", tier: "latest" },
// { os: "darwin", arch: "x64", release: "13", tier: "previous" },
{ os: "darwin", arch: "x64", release: "14", tier: "latest" },
{ os: "darwin", arch: "x64", release: "13", tier: "previous" },
{ os: "linux", arch: "aarch64", distro: "debian", release: "12", tier: "latest" },
{ os: "linux", arch: "x64", distro: "debian", release: "12", tier: "latest" },
{ os: "linux", arch: "x64", baseline: true, distro: "debian", release: "12", tier: "latest" },
@@ -176,12 +176,21 @@ function getPlatformLabel(platform) {
* @returns {string}
*/
function getImageKey(platform) {
const { os, arch, distro, release } = platform;
const { os, arch, distro, release, features, abi } = platform;
const version = release.replace(/\./g, "");
let key = `${os}-${arch}-${version}`;
if (distro) {
return `${os}-${arch}-${distro}-${version}`;
key += `-${distro}`;
}
return `${os}-${arch}-${version}`;
if (features?.length) {
key += `-with-${features.join("-")}`;
}
if (abi) {
key += `-${abi}`;
}
return key;
}
/**
@@ -195,15 +204,19 @@ function getImageLabel(platform) {
/**
* @param {Platform} platform
* @param {boolean} [dryRun]
* @param {PipelineOptions} options
* @returns {string}
*/
function getImageName(platform, dryRun) {
const { os, arch, distro, release } = platform;
const name = distro ? `${os}-${arch}-${distro}-${release}` : `${os}-${arch}-${release}`;
if (dryRun) {
function getImageName(platform, options) {
const { os } = platform;
const { buildImages, publishImages } = options;
const name = getImageKey(platform);
if (buildImages && !publishImages) {
return `${name}-build-${getBuildNumber()}`;
}
return `${name}-v${getBootstrapVersion(os)}`;
}
@@ -252,27 +265,27 @@ function getPriority() {
* @property {string} instanceType
* @property {number} cpuCount
* @property {number} threadsPerCore
* @property {boolean} dryRun
*/
/**
* @param {Platform} platform
* @param {Ec2Options} options
* @param {PipelineOptions} options
* @param {Ec2Options} ec2Options
* @returns {Agent}
*/
function getEc2Agent(platform, options) {
function getEc2Agent(platform, options, ec2Options) {
const { os, arch, abi, distro, release } = platform;
const { instanceType, cpuCount, threadsPerCore } = options;
const { instanceType, cpuCount, threadsPerCore } = ec2Options;
return {
os,
arch,
abi,
distro,
release,
// The agent is created by robobun, see more details here:
// https://github.com/oven-sh/robobun/blob/d46c07e0ac5ac0f9ffe1012f0e98b59e1a0d387a/src/robobun.ts#L1707
robobun: true,
robobun2: true,
"image-name": getImageName(platform),
"image-name": getImageName(platform, options),
"instance-type": instanceType,
"cpu-count": cpuCount,
"threads-per-core": threadsPerCore,
@@ -282,10 +295,11 @@ function getEc2Agent(platform, options) {
/**
* @param {Platform} platform
* @param {PipelineOptions} options
* @returns {string}
*/
function getCppAgent(platform) {
const { os, arch } = platform;
function getCppAgent(platform, options) {
const { os, arch, distro } = platform;
if (os === "darwin") {
return {
@@ -295,7 +309,7 @@ function getCppAgent(platform) {
};
}
return getEc2Agent(platform, {
return getEc2Agent(platform, options, {
instanceType: arch === "aarch64" ? "c8g.16xlarge" : "c7i.16xlarge",
cpuCount: 32,
threadsPerCore: 1,
@@ -304,35 +318,22 @@ function getCppAgent(platform) {
/**
* @param {Platform} platform
* @param {PipelineOptions} options
* @returns {Agent}
*/
function getZigAgent(platform) {
function getZigAgent(platform, options) {
const { arch } = platform;
return {
queue: "build-zig",
};
// return getEc2Agent(
// {
// os: "linux",
// arch,
// distro: "debian",
// release: "11",
// },
// {
// instanceType: arch === "aarch64" ? "c8g.2xlarge" : "c7i.2xlarge",
// cpuCount: 4,
// threadsPerCore: 1,
// },
// );
}
/**
* @param {Platform} platform
* @param {PipelineOptions} options
* @returns {Agent}
*/
function getTestAgent(platform) {
function getTestAgent(platform, options) {
const { os, arch } = platform;
if (os === "darwin") {
@@ -345,7 +346,7 @@ function getTestAgent(platform) {
// TODO: `dev-server-ssr-110.test.ts` and `next-build.test.ts` run out of memory at 8GB of memory, so use 16GB instead.
if (os === "windows") {
return getEc2Agent(platform, {
return getEc2Agent(platform, options, {
instanceType: "c7i.2xlarge",
cpuCount: 2,
threadsPerCore: 1,
@@ -353,14 +354,14 @@ function getTestAgent(platform) {
}
if (arch === "aarch64") {
return getEc2Agent(platform, {
return getEc2Agent(platform, options, {
instanceType: "c8g.xlarge",
cpuCount: 2,
threadsPerCore: 1,
});
}
return getEc2Agent(platform, {
return getEc2Agent(platform, options, {
instanceType: "c7i.xlarge",
cpuCount: 2,
threadsPerCore: 1,
@@ -373,16 +374,20 @@ function getTestAgent(platform) {
/**
* @param {Target} target
* @param {PipelineOptions} options
* @returns {Record<string, string | undefined>}
*/
function getBuildEnv(target) {
const { profile, baseline, canary, abi } = target;
function getBuildEnv(target, options) {
const { profile, baseline, abi } = target;
const release = !profile || profile === "release";
const { canary } = options;
const revision = typeof canary === "number" ? canary : 1;
return {
CMAKE_BUILD_TYPE: release ? "Release" : profile === "debug" ? "Debug" : "RelWithDebInfo",
ENABLE_BASELINE: baseline ? "ON" : "OFF",
ENABLE_CANARY: canary ? "ON" : "OFF",
ENABLE_CANARY: revision > 0 ? "ON" : "OFF",
CANARY_REVISION: revision,
ENABLE_ASSERTIONS: release ? "OFF" : "ON",
ENABLE_LOGS: release ? "OFF" : "ON",
ABI: abi === "musl" ? "musl" : undefined,
@@ -391,34 +396,36 @@ function getBuildEnv(target) {
/**
* @param {Platform} platform
* @param {PipelineOptions} options
* @returns {Step}
*/
function getBuildVendorStep(platform) {
function getBuildVendorStep(platform, options) {
return {
key: `${getTargetKey(platform)}-build-vendor`,
label: `${getTargetLabel(platform)} - build-vendor`,
agents: getCppAgent(platform),
agents: getCppAgent(platform, options),
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
env: getBuildEnv(platform),
env: getBuildEnv(platform, options),
command: "bun run build:ci --target dependencies",
};
}
/**
* @param {Platform} platform
* @param {PipelineOptions} options
* @returns {Step}
*/
function getBuildCppStep(platform) {
function getBuildCppStep(platform, options) {
return {
key: `${getTargetKey(platform)}-build-cpp`,
label: `${getTargetLabel(platform)} - build-cpp`,
agents: getCppAgent(platform),
agents: getCppAgent(platform, options),
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
env: {
BUN_CPP_ONLY: "ON",
...getBuildEnv(platform),
...getBuildEnv(platform, options),
},
command: "bun run build:ci --target bun",
};
@@ -442,26 +449,28 @@ function getBuildToolchain(target) {
/**
* @param {Platform} platform
* @param {PipelineOptions} options
* @returns {Step}
*/
function getBuildZigStep(platform) {
function getBuildZigStep(platform, options) {
const toolchain = getBuildToolchain(platform);
return {
key: `${getTargetKey(platform)}-build-zig`,
label: `${getTargetLabel(platform)} - build-zig`,
agents: getZigAgent(platform),
agents: getZigAgent(platform, options),
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
env: getBuildEnv(platform),
env: getBuildEnv(platform, options),
command: `bun run build:ci --target bun-zig --toolchain ${toolchain}`,
};
}
/**
* @param {Platform} platform
* @param {PipelineOptions} options
* @returns {Step}
*/
function getLinkBunStep(platform) {
function getLinkBunStep(platform, options) {
return {
key: `${getTargetKey(platform)}-build-bun`,
label: `${getTargetLabel(platform)} - build-bun`,
@@ -470,12 +479,12 @@ function getLinkBunStep(platform) {
`${getTargetKey(platform)}-build-cpp`,
`${getTargetKey(platform)}-build-zig`,
],
agents: getCppAgent(platform),
agents: getCppAgent(platform, options),
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
env: {
BUN_LINK_ONLY: "ON",
...getBuildEnv(platform),
...getBuildEnv(platform, options),
},
command: "bun run build:ci --target bun",
};
@@ -483,16 +492,17 @@ function getLinkBunStep(platform) {
/**
* @param {Platform} platform
* @param {PipelineOptions} options
* @returns {Step}
*/
function getBuildBunStep(platform) {
function getBuildBunStep(platform, options) {
return {
key: `${getTargetKey(platform)}-build-bun`,
label: `${getTargetLabel(platform)} - build-bun`,
agents: getCppAgent(platform),
agents: getCppAgent(platform, options),
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
env: getBuildEnv(platform),
env: getBuildEnv(platform, options),
command: "bun run build:ci",
};
}
@@ -502,16 +512,18 @@ function getBuildBunStep(platform) {
* @property {string} [buildId]
* @property {boolean} [unifiedTests]
* @property {string[]} [testFiles]
* @property {boolean} [dryRun]
*/
/**
* @param {Platform} platform
* @param {TestOptions} [options]
* @param {PipelineOptions} options
* @param {TestOptions} [testOptions]
* @returns {Step}
*/
function getTestBunStep(platform, options = {}) {
function getTestBunStep(platform, options, testOptions = {}) {
const { os } = platform;
const { buildId, unifiedTests, testFiles } = options;
const { buildId, unifiedTests, testFiles } = testOptions;
const args = [`--step=${getTargetKey(platform)}-build-bun`];
if (buildId) {
@@ -530,10 +542,9 @@ function getTestBunStep(platform, options = {}) {
key: `${getPlatformKey(platform)}-test-bun`,
label: `${getPlatformLabel(platform)} - test-bun`,
depends_on: depends,
agents: getTestAgent(platform),
cancel_on_build_failing: isMergeQueue(),
agents: getTestAgent(platform, options),
retry: getRetry(),
soft_fail: isMainBranch() ? true : [{ exit_status: 2 }],
cancel_on_build_failing: isMergeQueue(),
parallelism: unifiedTests ? undefined : os === "darwin" ? 2 : 10,
command:
os === "windows"
@@ -544,12 +555,14 @@ function getTestBunStep(platform, options = {}) {
/**
* @param {Platform} platform
* @param {boolean} [dryRun]
* @param {PipelineOptions} options
* @returns {Step}
*/
function getBuildImageStep(platform, dryRun) {
const { os, arch, distro, release } = platform;
const action = dryRun ? "create-image" : "publish-image";
function getBuildImageStep(platform, options) {
const { os, arch, distro, release, features } = platform;
const { publishImages } = options;
const action = publishImages ? "publish-image" : "create-image";
const command = [
"node",
"./scripts/machine.mjs",
@@ -562,6 +575,10 @@ function getBuildImageStep(platform, dryRun) {
"--ci",
"--authorized-org=oven-sh",
];
for (const feature of features || []) {
command.push(`--feature=${feature}`);
}
return {
key: `${getImageKey(platform)}-build-image`,
label: `${getImageLabel(platform)} - build-image`,
@@ -572,16 +589,21 @@ function getBuildImageStep(platform, dryRun) {
DEBUG: "1",
},
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
command: command.filter(Boolean).join(" "),
timeout_in_minutes: 3 * 60,
};
}
/**
* @param {Platform[]} [buildPlatforms]
* @param {Platform[]} buildPlatforms
* @param {PipelineOptions} options
* @returns {Step}
*/
function getReleaseStep(buildPlatforms) {
function getReleaseStep(buildPlatforms, options) {
const { canary } = options;
const revision = typeof canary === "number" ? canary : 1;
return {
key: "release",
label: getBuildkiteEmoji("rocket"),
@@ -589,6 +611,9 @@ function getReleaseStep(buildPlatforms) {
queue: "test-darwin",
},
depends_on: buildPlatforms.map(platform => `${getTargetKey(platform)}-build-bun`),
env: {
CANARY: revision,
},
command: ".buildkite/scripts/upload-release.sh",
};
}
@@ -678,7 +703,7 @@ function getReleaseStep(buildPlatforms) {
* @property {string | boolean} [forceTests]
* @property {string | boolean} [buildImages]
* @property {string | boolean} [publishImages]
* @property {boolean} [canary]
* @property {number} [canary]
* @property {Profile[]} [buildProfiles]
* @property {Platform[]} [buildPlatforms]
* @property {Platform[]} [testPlatforms]
@@ -896,6 +921,7 @@ async function getPipelineOptions() {
return;
}
const canary = await getCanaryRevision();
const buildPlatformsMap = new Map(buildPlatforms.map(platform => [getTargetKey(platform), platform]));
const testPlatformsMap = new Map(testPlatforms.map(platform => [getPlatformKey(platform), platform]));
@@ -918,13 +944,13 @@ async function getPipelineOptions() {
const buildPlatformKeys = parseArray(options["build-platforms"]);
const testPlatformKeys = parseArray(options["test-platforms"]);
return {
canary: parseBoolean(options["canary"]),
canary: parseBoolean(options["canary"]) ? canary : 0,
skipBuilds: parseBoolean(options["skip-builds"]),
forceBuilds: parseBoolean(options["force-builds"]),
skipTests: parseBoolean(options["skip-tests"]),
testFiles: parseArray(options["test-files"]),
buildImages: parseBoolean(options["build-images"]),
publishImages: parseBoolean(options["publish-images"]),
testFiles: parseArray(options["test-files"]),
unifiedBuilds: parseBoolean(options["unified-builds"]),
unifiedTests: parseBoolean(options["unified-tests"]),
buildProfiles: parseArray(options["build-profiles"]),
@@ -934,6 +960,7 @@ async function getPipelineOptions() {
testPlatforms: testPlatformKeys?.length
? testPlatformKeys.map(key => testPlatformsMap.get(key))
: Array.from(testPlatformsMap.values()),
dryRun: parseBoolean(options["dry-run"]),
};
}
@@ -952,14 +979,18 @@ async function getPipelineOptions() {
return false;
};
const isCanary =
!parseBoolean(getEnv("RELEASE", false) || "false") &&
!/\[(release|build release|release build)\]/i.test(commitMessage);
return {
canary:
!parseBoolean(getEnv("RELEASE", false) || "false") &&
!/\[(release|build release|release build)\]/i.test(commitMessage),
canary: isCanary ? canary : 0,
skipEverything: parseOption(/\[(skip ci|no ci)\]/i),
skipBuilds: parseOption(/\[(skip builds?|no builds?|only tests?)\]/i),
forceBuilds: parseOption(/\[(force builds?)\]/i),
skipTests: parseOption(/\[(skip tests?|no tests?|only builds?)\]/i),
buildImages: parseOption(/\[(build images?)\]/i),
dryRun: parseOption(/\[(dry run)\]/i),
publishImages: parseOption(/\[(publish images?)\]/i),
buildPlatforms: Array.from(buildPlatformsMap.values()),
testPlatforms: Array.from(testPlatformsMap.values()),
buildProfiles: ["release"],
@@ -1001,11 +1032,12 @@ async function getPipeline(options = {}) {
steps.push({
key: "build-images",
group: getBuildkiteEmoji("aws"),
steps: [...imagePlatforms.values()].map(platform => getBuildImageStep(platform, !publishImages)),
steps: [...imagePlatforms.values()].map(platform => getBuildImageStep(platform, options)),
});
}
const { skipBuilds, forceBuilds, unifiedBuilds } = options;
let { skipBuilds, forceBuilds, unifiedBuilds, dryRun } = options;
dryRun = dryRun || !!buildImages;
/** @type {string | undefined} */
let buildId;
@@ -1025,22 +1057,21 @@ async function getPipeline(options = {}) {
.flatMap(platform => buildProfiles.map(profile => ({ ...platform, profile })))
.map(target => {
const imageKey = getImageKey(target);
const imagePlatform = imagePlatforms.get(imageKey);
return getStepWithDependsOn(
{
key: getTargetKey(target),
group: getTargetLabel(target),
steps: unifiedBuilds
? [getBuildBunStep(target)]
? [getBuildBunStep(target, options)]
: [
getBuildVendorStep(target),
getBuildCppStep(target),
getBuildZigStep(target),
getLinkBunStep(target),
getBuildVendorStep(target, options),
getBuildCppStep(target, options),
getBuildZigStep(target, options),
getLinkBunStep(target, options),
],
},
imagePlatform ? `${imageKey}-build-image` : undefined,
imagePlatforms.has(imageKey) ? `${imageKey}-build-image` : undefined,
);
}),
);
@@ -1055,14 +1086,14 @@ async function getPipeline(options = {}) {
.map(target => ({
key: getTargetKey(target),
group: getTargetLabel(target),
steps: [getTestBunStep(target, { unifiedTests, testFiles, buildId })],
steps: [getTestBunStep(target, options, { unifiedTests, testFiles, buildId })],
})),
);
}
}
if (isMainBranch()) {
steps.push(getReleaseStep(buildPlatforms));
steps.push(getReleaseStep(buildPlatforms, options));
}
/** @type {Map<string, GroupStep>} */

View File

@@ -1,11 +0,0 @@
#!/bin/bash
set -eo pipefail
function run_command() {
set -x
"$@"
{ set +x; } 2>/dev/null
}
run_command node ".buildkite/ci.mjs" "$@"

View File

@@ -3,10 +3,6 @@
set -eo pipefail
function assert_main() {
if [ "$RELEASE" == "1" ]; then
echo "info: Skipping canary release because this is a release build"
exit 0
fi
if [ -z "$BUILDKITE_REPO" ]; then
echo "error: Cannot find repository for this build"
exit 1
@@ -194,8 +190,6 @@ function create_release() {
local artifacts=(
bun-darwin-aarch64.zip
bun-darwin-aarch64-profile.zip
bun-darwin-x64.zip
bun-darwin-x64-profile.zip
bun-linux-aarch64.zip
bun-linux-aarch64-profile.zip
bun-linux-x64.zip
@@ -237,8 +231,7 @@ function create_release() {
}
function assert_canary() {
local canary="$(buildkite-agent meta-data get canary 2>/dev/null)"
if [ -z "$canary" ] || [ "$canary" == "0" ]; then
if [ -z "$CANARY" ] || [ "$CANARY" == "0" ]; then
echo "warn: Skipping release because this is not a canary build"
exit 0
fi

View File

@@ -3,3 +3,6 @@ Index:
CompileFlags:
CompilationDatabase: build/debug
Diagnostics:
UnusedIncludes: None

7
.cursorignore Normal file
View File

@@ -0,0 +1,7 @@
# Add directories or file patterns to ignore during indexing (e.g. foo/ or *.csv)
bench
vendor
*-fixture.{js,ts}
zig-cache
packages/bun-uws/fuzzing
build

View File

@@ -16,3 +16,6 @@ zig-out
build
vendor
node_modules
*.trace
packages/bun-uws/fuzzing

View File

@@ -1,4 +1,4 @@
command script import vendor/zig/tools/lldb_pretty_printers.py
# command script import vendor/zig/tools/lldb_pretty_printers.py
command script import vendor/WebKit/Tools/lldb/lldb_webkit.py
# type summary add --summary-string "${var} | inner=${var[0-30]}, source=${var[33-64]}, tag=${var[31-32]}" "unsigned long"

95
.vscode/launch.json generated vendored
View File

@@ -15,6 +15,7 @@
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"envFile": "${workspaceFolder}/.env",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
@@ -22,7 +23,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -31,6 +32,7 @@
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "--only", "${file}"],
"cwd": "${workspaceFolder}",
"envFile": "${workspaceFolder}/.env",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "1",
@@ -38,7 +40,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -53,6 +55,7 @@
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"envFile": "${workspaceFolder}/.env",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
@@ -60,7 +63,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -69,6 +72,7 @@
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"envFile": "${workspaceFolder}/.env",
"env": {
"BUN_DEBUG_QUIET_LOGS": "0",
"BUN_DEBUG_jest": "1",
@@ -76,7 +80,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -85,6 +89,7 @@
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "--watch", "${file}"],
"cwd": "${workspaceFolder}",
"envFile": "${workspaceFolder}/.env",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
@@ -92,7 +97,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -101,6 +106,7 @@
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "--hot", "${file}"],
"cwd": "${workspaceFolder}",
"envFile": "${workspaceFolder}/.env",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
@@ -108,7 +114,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -117,6 +123,7 @@
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"envFile": "${workspaceFolder}/.env",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
@@ -125,7 +132,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -139,6 +146,7 @@
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"envFile": "${workspaceFolder}/.env",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
@@ -147,7 +155,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -162,6 +170,7 @@
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["run", "${fileBasename}"],
"cwd": "${fileDirname}",
"envFile": "${workspaceFolder}/.env",
"env": {
"FORCE_COLOR": "0",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -169,7 +178,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -178,6 +187,7 @@
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["run", "${fileBasename}"],
"cwd": "${fileDirname}",
"envFile": "${workspaceFolder}/.env",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
@@ -188,7 +198,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -197,13 +207,14 @@
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["run", "${fileBasename}"],
"cwd": "${fileDirname}",
"envFile": "${workspaceFolder}/.env",
"env": {
"BUN_DEBUG_QUIET_LOGS": "0",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -212,6 +223,7 @@
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["run", "--watch", "${fileBasename}"],
"cwd": "${fileDirname}",
"envFile": "${workspaceFolder}/.env",
"env": {
// "BUN_DEBUG_DEBUGGER": "1",
// "BUN_DEBUG_INTERNAL_DEBUGGER": "1",
@@ -221,7 +233,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -230,13 +242,14 @@
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["run", "--hot", "${fileBasename}"],
"cwd": "${fileDirname}",
"envFile": "${workspaceFolder}/.env",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -245,6 +258,7 @@
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["run", "${fileBasename}"],
"cwd": "${fileDirname}",
"envFile": "${workspaceFolder}/.env",
"env": {
"FORCE_COLOR": "0",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -253,7 +267,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -267,6 +281,7 @@
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["run", "${fileBasename}"],
"cwd": "${fileDirname}",
"envFile": "${workspaceFolder}/.env",
"env": {
"FORCE_COLOR": "0",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -275,7 +290,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -290,6 +305,7 @@
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"envFile": "${workspaceFolder}/.env",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
@@ -297,7 +313,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -306,6 +322,7 @@
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"envFile": "${workspaceFolder}/.env",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
@@ -313,7 +330,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -322,6 +339,7 @@
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"envFile": "${workspaceFolder}/.env",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
@@ -329,7 +347,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -338,6 +356,7 @@
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "--watch", "${input:testName}"],
"cwd": "${workspaceFolder}",
"envFile": "${workspaceFolder}/.env",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
@@ -345,7 +364,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -354,6 +373,7 @@
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "--hot", "${input:testName}"],
"cwd": "${workspaceFolder}",
"envFile": "${workspaceFolder}/.env",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
@@ -361,7 +381,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -370,6 +390,7 @@
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"envFile": "${workspaceFolder}/.env",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
@@ -378,7 +399,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -392,6 +413,7 @@
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"envFile": "${workspaceFolder}/.env",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
@@ -400,7 +422,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -415,13 +437,14 @@
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["exec", "${input:testName}"],
"cwd": "${workspaceFolder}",
"envFile": "${workspaceFolder}/.env",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
// bun test [*]
{
@@ -431,13 +454,14 @@
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test"],
"cwd": "${workspaceFolder}",
"envFile": "${workspaceFolder}/.env",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -446,13 +470,14 @@
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test"],
"cwd": "${workspaceFolder}",
"envFile": "${workspaceFolder}/.env",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -461,6 +486,7 @@
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test"],
"cwd": "${workspaceFolder}",
"envFile": "${workspaceFolder}/.env",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
@@ -468,7 +494,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -482,13 +508,14 @@
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["install"],
"cwd": "${fileDirname}",
"envFile": "${workspaceFolder}/.env",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -497,13 +524,14 @@
"program": "node",
"args": ["test/runner.node.mjs"],
"cwd": "${workspaceFolder}",
"envFile": "${workspaceFolder}/.env",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
// Windows: bun test [file]
{
@@ -1099,6 +1127,15 @@
"action": "openExternally",
},
},
{
"type": "bun",
"request": "launch",
"name": "bun debug [file]",
// "program": "${file}",
"runtimeArgs": ["run", "${file}"],
"runtime": "${workspaceFolder}/build/debug/bun-debug",
"cwd": "${workspaceFolder}",
},
{
"type": "cppvsdbg",
"sourceFileMap": {
@@ -1125,7 +1162,7 @@
],
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
],
"inputs": [

View File

@@ -2,7 +2,7 @@
// Editor
"editor.tabSize": 2,
"editor.insertSpaces": true,
"editor.formatOnSave": true,
// "editor.formatOnSave": true,
"editor.formatOnSaveMode": "file",
// Search
@@ -17,6 +17,7 @@
// This will fill up your whole search history.
"test/js/node/test/fixtures": true,
"test/js/node/test/common": true,
"./build/debug/codegen/**": false,
},
"search.followSymlinks": false,
"search.useIgnoreFiles": true,
@@ -63,7 +64,6 @@
"editor.tabSize": 4,
"editor.defaultFormatter": "xaver.clang-format",
},
"clangd.arguments": ["-header-insertion=never", "-no-unused-includes"],
// JavaScript
"prettier.enable": true,

View File

@@ -1,11 +1,6 @@
Configuring a development environment for Bun can take 10-30 minutes depending on your internet connection and computer speed. You will need ~10GB of free disk space for the repository and build artifacts.
If you are using Windows, please refer to [this guide](/docs/project/building-windows.md)
{% details summary="For Ubuntu users" %}
TL;DR: Ubuntu 22.04 is suggested.
Bun currently requires `glibc >=2.32` in development which means if you're on Ubuntu 20.04 (glibc == 2.31), you may likely meet `error: undefined symbol: __libc_single_threaded `. You need to take extra configurations. Also, according to this [issue](https://github.com/llvm/llvm-project/issues/97314), LLVM 16 is no longer maintained on Ubuntu 24.04 (noble). And instead, you might want `brew` to install LLVM 16 for your Ubuntu 24.04.
{% /details %}
If you are using Windows, please refer to [this guide](https://bun.sh/docs/project/building-windows)
## Install Dependencies
@@ -58,7 +53,7 @@ $ brew install bun
## Install LLVM
Bun requires LLVM 16 (`clang` is part of LLVM). This version requirement is to match WebKit (precompiled), as mismatching versions will cause memory allocation failures at runtime. In most cases, you can install LLVM through your system package manager:
Bun requires LLVM 18 (`clang` is part of LLVM). This version requirement is to match WebKit (precompiled), as mismatching versions will cause memory allocation failures at runtime. In most cases, you can install LLVM through your system package manager:
{% codetabs group="os" %}
@@ -89,7 +84,7 @@ $ sudo zypper install clang16 lld16 llvm16
If none of the above solutions apply, you will have to install it [manually](https://github.com/llvm/llvm-project/releases/tag/llvmorg-16.0.6).
Make sure Clang/LLVM 16 is in your path:
Make sure Clang/LLVM 18 is in your path:
```bash
$ which clang-16

2
LATEST
View File

@@ -1 +1 @@
1.1.38
1.1.42

View File

@@ -66,9 +66,10 @@ TRIPLET = $(OS_NAME)-$(ARCH_NAME)
PACKAGE_NAME = bun-$(TRIPLET)
PACKAGES_REALPATH = $(realpath packages)
PACKAGE_DIR = $(PACKAGES_REALPATH)/$(PACKAGE_NAME)
BUILD_REALPATH = $(realpath build)
DEBUG_PACKAGE_DIR = $(PACKAGES_REALPATH)/debug-$(PACKAGE_NAME)
RELEASE_BUN = $(PACKAGE_DIR)/bun
DEBUG_BIN = $(DEBUG_PACKAGE_DIR)
DEBUG_BIN = $(BUILD_REALPATH)/debug
DEBUG_BUN = $(DEBUG_BIN)/bun-debug
BUILD_ID = $(shell cat ./src/build-id)
PACKAGE_JSON_VERSION = $(BUN_BASE_VERSION).$(BUILD_ID)
@@ -905,7 +906,7 @@ zig-win32:
# Hardened runtime will not work with debugging
bun-codesign-debug:
codesign --entitlements $(realpath entitlements.debug.plist) --force --timestamp --sign "$(CODESIGN_IDENTITY)" -vvvv --deep --strict $(DEBUG_BUN)
codesign --entitlements $(realpath entitlements.debug.plist) --force --timestamp --sign - -vvvv --deep --strict $(DEBUG_BUN)
bun-codesign-release-local:
codesign --entitlements $(realpath entitlements.plist) --options runtime --force --timestamp --sign "$(CODESIGN_IDENTITY)" -vvvv --deep --strict $(RELEASE_BUN)

Binary file not shown.

View File

@@ -13,7 +13,7 @@
"execa": "^8.0.1",
"fast-glob": "3.3.1",
"fdir": "^6.1.0",
"mitata": "^1.0.10",
"mitata": "^1.0.25",
"react": "^18.3.1",
"react-dom": "^18.3.1",
"string-width": "7.1.0",

View File

@@ -0,0 +1,27 @@
import { Buffer } from "node:buffer";
import { bench, run } from "../runner.mjs";
const variations = [
["latin1", "hello world"],
["utf16", "hello emoji 🤔"],
];
for (const [label, string] of variations) {
const big = Buffer.alloc(1000000, string).toString();
const small = Buffer.from(string).toString();
const substring = big.slice(0, big.length - 2);
bench(`${substring.length}`, () => {
return Buffer.byteLength(substring, "utf8");
});
bench(`${small.length}`, () => {
return Buffer.byteLength(small);
});
bench(`${big.length}`, () => {
return Buffer.byteLength(big);
});
}
await run();

View File

@@ -1,20 +1,14 @@
import { noOpForTesting as noop } from "bun:internal-for-testing";
import { bench, run } from "../runner.mjs";
// These are no-op C++ functions that are exported to JS.
const lazy = globalThis[Symbol.for("Bun.lazy")];
const noop = lazy("noop");
const fn = noop.function;
const regular = noop.functionRegular;
const callback = noop.callback;
bench("C++ callback into JS", () => {
callback(() => {});
});
bench("C++ fn regular", () => {
regular();
});
bench("C++ fn", () => {
fn();
});

View File

@@ -0,0 +1,37 @@
import { bench, run } from "../runner.mjs";
import { brotliCompress, brotliDecompress, createBrotliCompress, createBrotliDecompress } from "node:zlib";
import { promisify } from "node:util";
import { pipeline } from "node:stream/promises";
import { Readable } from "node:stream";
import { readFileSync } from "node:fs";
const brotliCompressAsync = promisify(brotliCompress);
const brotliDecompressAsync = promisify(brotliDecompress);
const testData =
process.argv.length > 2
? readFileSync(process.argv[2])
: Buffer.alloc(1024 * 1024 * 16, "abcdefghijklmnopqrstuvwxyz");
let compressed;
bench("brotli compress", async () => {
compressed = await brotliCompressAsync(testData);
});
bench("brotli decompress", async () => {
await brotliDecompressAsync(compressed);
});
bench("brotli compress stream", async () => {
const source = Readable.from([testData]);
const compress = createBrotliCompress();
await pipeline(source, compress);
});
bench("brotli decompress stream", async () => {
const source = Readable.from([compressed]);
const decompress = createBrotliDecompress();
await pipeline(source, decompress);
});
await run();

62
bench/snippets/zlib.mjs Normal file
View File

@@ -0,0 +1,62 @@
import { bench, run } from "../runner.mjs";
import zlib from "node:zlib";
import { promisify } from "node:util";
const deflate = promisify(zlib.deflate);
const inflate = promisify(zlib.inflate);
const short = "Hello World!";
const long = "Hello World!".repeat(1024);
const veryLong = "Hello World!".repeat(10240);
// Pre-compress some data for decompression tests
const shortBuf = Buffer.from(short);
const longBuf = Buffer.from(long);
const veryLongBuf = Buffer.from(veryLong);
let [shortCompressed, longCompressed, veryLongCompressed] = await Promise.all([
deflate(shortBuf, { level: 6 }),
deflate(longBuf, { level: 6 }),
deflate(veryLongBuf, { level: 6 }),
]);
const format = new Intl.NumberFormat("en-US", { notation: "compact", unit: "byte" });
// Compression tests at different levels
bench(`deflate ${format.format(short.length)}B (level 1)`, async () => {
await deflate(shortBuf, { level: 1 });
});
bench(`deflate ${format.format(short.length)} (level 6)`, async () => {
await deflate(shortBuf, { level: 6 });
});
bench(`deflate ${format.format(long.length)} (level 1)`, async () => {
await deflate(longBuf, { level: 1 });
});
bench(`deflate ${format.format(long.length)} (level 6)`, async () => {
await deflate(longBuf, { level: 6 });
});
bench(`deflate ${format.format(veryLong.length)} (level 1)`, async () => {
await deflate(veryLongBuf, { level: 1 });
});
bench(`deflate ${format.format(veryLong.length)} (level 6)`, async () => {
await deflate(veryLongBuf, { level: 6 });
});
// Decompression tests
bench(`inflate ${format.format(short.length)}`, async () => {
await inflate(shortCompressed);
});
bench(`inflate ${format.format(long.length)}`, async () => {
await inflate(longCompressed);
});
bench(`inflate ${format.format(veryLong.length)}`, async () => {
await inflate(veryLongCompressed);
});
await run();

View File

@@ -328,6 +328,12 @@ pub fn build(b: *Build) !void {
});
}
// zig build translate-c-headers
{
const step = b.step("translate-c", "Copy generated translated-c-headers.zig to zig-out");
step.dependOn(&b.addInstallFile(getTranslateC(b, b.host, .Debug).getOutput(), "translated-c-headers.zig").step);
}
// zig build enum-extractor
{
// const step = b.step("enum-extractor", "Extract enum definitions (invoked by a code generator)");
@@ -380,6 +386,25 @@ pub fn addMultiCheck(
}
}
fn getTranslateC(b: *Build, target: std.Build.ResolvedTarget, optimize: std.builtin.OptimizeMode) *Step.TranslateC {
const translate_c = b.addTranslateC(.{
.root_source_file = b.path("src/c-headers-for-zig.h"),
.target = target,
.optimize = optimize,
.link_libc = true,
});
inline for ([_](struct { []const u8, bool }){
.{ "WINDOWS", translate_c.target.result.os.tag == .windows },
.{ "POSIX", translate_c.target.result.os.tag != .windows },
.{ "LINUX", translate_c.target.result.os.tag == .linux },
.{ "DARWIN", translate_c.target.result.os.tag.isDarwin() },
}) |entry| {
const str, const value = entry;
translate_c.defineCMacroRaw(b.fmt("{s}={d}", .{ str, @intFromBool(value) }));
}
return translate_c;
}
pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
const obj = b.addObject(.{
.name = if (opts.optimize == .Debug) "bun-debug" else "bun",
@@ -428,13 +453,8 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
addInternalPackages(b, obj, opts);
obj.root_module.addImport("build_options", opts.buildOptionsModule(b));
const translate_plugin_api = b.addTranslateC(.{
.root_source_file = b.path("./packages/bun-native-bundler-plugin-api/bundler_plugin.h"),
.target = opts.target,
.optimize = opts.optimize,
.link_libc = true,
});
obj.root_module.addImport("bun-native-bundler-plugin-api", translate_plugin_api.createModule());
const translate_c = getTranslateC(b, opts.target, opts.optimize);
obj.root_module.addImport("translated-c-headers", translate_c.createModule());
return obj;
}

View File

@@ -176,6 +176,10 @@ if(LINUX)
DESCRIPTION "Disable relocation read-only (RELRO)"
-Wl,-z,norelro
)
register_compiler_flags(
DESCRIPTION "Disable semantic interposition"
-fno-semantic-interposition
)
endif()
# --- Assertions ---

View File

@@ -291,7 +291,7 @@ function(find_command)
set_property(GLOBAL PROPERTY ${FIND_NAME} "${exe}: ${reason}" APPEND)
if(version)
satisfies_range(${version} ${${FIND_VERSION_VARIABLE}} ${variable})
satisfies_range(${version} ${FIND_VERSION} ${variable})
set(${variable} ${${variable}} PARENT_SCOPE)
endif()
endfunction()

View File

@@ -67,13 +67,7 @@ optionx(ENABLE_ASSERTIONS BOOL "If debug assertions should be enabled" DEFAULT $
optionx(ENABLE_CANARY BOOL "If canary features should be enabled" DEFAULT ON)
if(ENABLE_CANARY AND BUILDKITE)
execute_process(
COMMAND buildkite-agent meta-data get "canary"
OUTPUT_VARIABLE DEFAULT_CANARY_REVISION
OUTPUT_STRIP_TRAILING_WHITESPACE
)
elseif(ENABLE_CANARY)
if(ENABLE_CANARY)
set(DEFAULT_CANARY_REVISION "1")
else()
set(DEFAULT_CANARY_REVISION "0")

View File

@@ -576,6 +576,7 @@ set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS "build.zig")
set(BUN_USOCKETS_SOURCE ${CWD}/packages/bun-usockets)
# hand written cpp source files. Full list of "source" code (including codegen) is in BUN_CPP_SOURCES
file(GLOB BUN_CXX_SOURCES ${CONFIGURE_DEPENDS}
${CWD}/src/io/*.cpp
${CWD}/src/bun.js/modules/*.cpp
@@ -599,7 +600,8 @@ file(GLOB BUN_C_SOURCES ${CONFIGURE_DEPENDS}
)
if(WIN32)
list(APPEND BUN_C_SOURCES ${CWD}/src/bun.js/bindings/windows/musl-memmem.c)
list(APPEND BUN_CXX_SOURCES ${CWD}/src/bun.js/bindings/windows/rescle.cpp)
list(APPEND BUN_CXX_SOURCES ${CWD}/src/bun.js/bindings/windows/rescle-binding.cpp)
endif()
register_repository(
@@ -632,6 +634,7 @@ register_command(
list(APPEND BUN_CPP_SOURCES
${BUN_C_SOURCES}
${BUN_CXX_SOURCES}
${BUN_ERROR_CODE_OUTPUTS}
${VENDOR_PATH}/picohttpparser/picohttpparser.c
${NODEJS_HEADERS_PATH}/include/node/node_version.h
${BUN_ZIG_GENERATED_CLASSES_OUTPUTS}
@@ -648,11 +651,19 @@ if(WIN32)
set(Bun_VERSION_WITH_TAG ${VERSION})
endif()
set(BUN_ICO_PATH ${CWD}/src/bun.ico)
configure_file(${CWD}/src/bun.ico ${CODEGEN_PATH}/bun.ico COPYONLY)
configure_file(
${CWD}/src/windows-app-info.rc
${CODEGEN_PATH}/windows-app-info.rc
@ONLY
)
list(APPEND BUN_CPP_SOURCES ${CODEGEN_PATH}/windows-app-info.rc)
add_custom_command(
OUTPUT ${CODEGEN_PATH}/windows-app-info.res
COMMAND rc.exe /fo ${CODEGEN_PATH}/windows-app-info.res ${CODEGEN_PATH}/windows-app-info.rc
DEPENDS ${CODEGEN_PATH}/windows-app-info.rc ${CODEGEN_PATH}/bun.ico
COMMENT "Adding Windows resource file ${CODEGEN_PATH}/windows-app-info.res with ico in ${CODEGEN_PATH}/bun.ico"
)
set(WINDOWS_RESOURCES ${CODEGEN_PATH}/windows-app-info.res)
endif()
# --- Executable ---
@@ -660,7 +671,7 @@ endif()
set(BUN_CPP_OUTPUT ${BUILD_PATH}/${CMAKE_STATIC_LIBRARY_PREFIX}${bun}${CMAKE_STATIC_LIBRARY_SUFFIX})
if(BUN_LINK_ONLY)
add_executable(${bun} ${BUN_CPP_OUTPUT} ${BUN_ZIG_OUTPUT})
add_executable(${bun} ${BUN_CPP_OUTPUT} ${BUN_ZIG_OUTPUT} ${WINDOWS_RESOURCES})
set_target_properties(${bun} PROPERTIES LINKER_LANGUAGE CXX)
target_link_libraries(${bun} PRIVATE ${BUN_CPP_OUTPUT})
elseif(BUN_CPP_ONLY)
@@ -678,7 +689,7 @@ elseif(BUN_CPP_ONLY)
${BUN_CPP_OUTPUT}
)
else()
add_executable(${bun} ${BUN_CPP_SOURCES})
add_executable(${bun} ${BUN_CPP_SOURCES} ${WINDOWS_RESOURCES})
target_link_libraries(${bun} PRIVATE ${BUN_ZIG_OUTPUT})
endif()
@@ -848,7 +859,7 @@ endif()
if(WIN32)
target_link_options(${bun} PUBLIC
/STACK:0x1200000,0x100000
/STACK:0x1200000,0x200000
/errorlimit:0
)
if(RELEASE)
@@ -884,48 +895,28 @@ endif()
if(LINUX)
if(NOT ABI STREQUAL "musl")
if(ARCH STREQUAL "aarch64")
target_link_options(${bun} PUBLIC
-Wl,--wrap=fcntl64
-Wl,--wrap=statx
)
endif()
if(ARCH STREQUAL "x64")
target_link_options(${bun} PUBLIC
-Wl,--wrap=fcntl
-Wl,--wrap=fcntl64
-Wl,--wrap=fstat
-Wl,--wrap=fstat64
-Wl,--wrap=fstatat
-Wl,--wrap=fstatat64
-Wl,--wrap=lstat
-Wl,--wrap=lstat64
-Wl,--wrap=mknod
-Wl,--wrap=mknodat
-Wl,--wrap=stat
-Wl,--wrap=stat64
-Wl,--wrap=statx
)
endif()
# on arm64
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm|ARM|arm64|ARM64|aarch64|AARCH64")
target_link_options(${bun} PUBLIC
-Wl,--wrap=cosf
-Wl,--wrap=exp
-Wl,--wrap=expf
-Wl,--wrap=fmod
-Wl,--wrap=fmodf
-Wl,--wrap=fcntl64
-Wl,--wrap=log
-Wl,--wrap=log10f
-Wl,--wrap=log2
-Wl,--wrap=log2f
-Wl,--wrap=logf
-Wl,--wrap=pow
-Wl,--wrap=powf
-Wl,--wrap=sincosf
-Wl,--wrap=sinf
-Wl,--wrap=tanf
)
else()
target_link_options(${bun} PUBLIC
-Wl,--wrap=exp
-Wl,--wrap=expf
-Wl,--wrap=log2f
-Wl,--wrap=logf
-Wl,--wrap=powf
)
endif()
endif()
if(NOT ABI STREQUAL "musl")
@@ -954,7 +945,7 @@ if(LINUX)
-Wl,-z,combreloc
-Wl,--no-eh-frame-hdr
-Wl,--sort-section=name
-Wl,--hash-style=gnu
-Wl,--hash-style=both
-Wl,--build-id=sha1 # Better for debugging than default
-Wl,-Map=${bun}.linker-map
)
@@ -966,6 +957,7 @@ if(WIN32)
set(BUN_SYMBOLS_PATH ${CWD}/src/symbols.def)
target_link_options(${bun} PUBLIC /DEF:${BUN_SYMBOLS_PATH})
elseif(APPLE)
set(BUN_SYMBOLS_PATH ${CWD}/src/symbols.txt)
target_link_options(${bun} PUBLIC -exported_symbols_list ${BUN_SYMBOLS_PATH})
else()

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
c-ares/c-ares
COMMIT
41ee334af3e3d0027dca5e477855d0244936bd49
4f4912bce7374f787b10576851b687935f018e17
)
register_cmake_command(

View File

@@ -18,7 +18,7 @@ register_cmake_command(
-DENABLE_INSTALL=OFF
-DENABLE_TEST=OFF
-DENABLE_WERROR=OFF
-DENABLE_BZIP2=OFF
-DENABLE_BZip2=OFF
-DENABLE_CAT=OFF
-DENABLE_EXPAT=OFF
-DENABLE_ICONV=OFF

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
ebiggers/libdeflate
COMMIT
9d624d1d8ba82c690d6d6be1d0a961fc5a983ea4
733848901289eca058804ca0737f8796875204c8
)
register_cmake_command(

View File

@@ -5,6 +5,11 @@ if(NOT ENABLE_CCACHE OR CACHE_STRATEGY STREQUAL "none")
return()
endif()
if (CI AND NOT APPLE)
setenv(CCACHE_DISABLE 1)
return()
endif()
find_command(
VARIABLE
CCACHE_PROGRAM
@@ -38,7 +43,8 @@ setenv(CCACHE_FILECLONE 1)
setenv(CCACHE_STATSLOG ${BUILD_PATH}/ccache.log)
if(CI)
setenv(CCACHE_SLOPPINESS "pch_defines,time_macros,locale,clang_index_store,gcno_cwd,include_file_ctime,include_file_mtime")
# FIXME: Does not work on Ubuntu 18.04
# setenv(CCACHE_SLOPPINESS "pch_defines,time_macros,locale,clang_index_store,gcno_cwd,include_file_ctime,include_file_mtime")
else()
setenv(CCACHE_SLOPPINESS "pch_defines,time_macros,locale,random_seed,clang_index_store,gcno_cwd")
endif()

View File

@@ -1,14 +1,18 @@
optionx(ENABLE_LLVM BOOL "If LLVM should be used for compilation" DEFAULT ON)
set(DEFAULT_ENABLE_LLVM ON)
# if target is bun-zig, set ENABLE_LLVM to OFF
if(TARGET bun-zig)
set(DEFAULT_ENABLE_LLVM OFF)
endif()
optionx(ENABLE_LLVM BOOL "If LLVM should be used for compilation" DEFAULT ${DEFAULT_ENABLE_LLVM})
if(NOT ENABLE_LLVM)
return()
endif()
if(CMAKE_HOST_WIN32 OR CMAKE_HOST_APPLE OR EXISTS "/etc/alpine-release")
set(DEFAULT_LLVM_VERSION "18.1.8")
else()
set(DEFAULT_LLVM_VERSION "16.0.6")
endif()
set(DEFAULT_LLVM_VERSION "18.1.8")
optionx(LLVM_VERSION STRING "The version of LLVM to use" DEFAULT ${DEFAULT_LLVM_VERSION})
@@ -73,7 +77,7 @@ macro(find_llvm_command variable command)
VERSION_VARIABLE LLVM_VERSION
COMMAND ${commands}
PATHS ${LLVM_PATHS}
VERSION ${LLVM_VERSION}
VERSION >=${LLVM_VERSION_MAJOR}.1.0
)
list(APPEND CMAKE_ARGS -D${variable}=${${variable}})
endmacro()

View File

@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
if(NOT WEBKIT_VERSION)
set(WEBKIT_VERSION 8f9ae4f01a047c666ef548864294e01df731d4ea)
set(WEBKIT_VERSION e1a802a2287edfe7f4046a9dd8307c8b59f5d816)
endif()
if(WEBKIT_LOCAL)

View File

@@ -179,16 +179,16 @@ type Flags = string | string[];
These are flags like `-I` for include directories and `-D` for preprocessor definitions.
#### `defines: Record<string, string>`
#### `define: Record<string, string>`
The `defines` is an optional object that should be passed to the TinyCC compiler.
The `define` is an optional object that should be passed to the TinyCC compiler.
```ts
type Defines = Record<string, string>;
cc({
source: "hello.c",
defines: {
define: {
"NDEBUG": "1",
},
});

View File

@@ -234,7 +234,7 @@ To prefetch a DNS entry, you can use the `dns.prefetch` API. This API is useful
```ts
import { dns } from "bun";
dns.prefetch("bun.sh", 443);
dns.prefetch("bun.sh");
```
#### DNS caching

549
docs/api/s3.md Normal file
View File

@@ -0,0 +1,549 @@
Production servers often read, upload, and write files to S3-compatible object storage services instead of the local filesystem. Historically, that means local filesystem APIs you use in development can't be used in production. When you use Bun, things are different.
Bun provides fast, native bindings for interacting with S3-compatible object storage services. Bun's S3 API is designed to be simple and feel similar to fetch's `Response` and `Blob` APIs (like Bun's local filesystem APIs).
```ts
import { s3, write, S3 } from "bun";
const metadata = await s3("123.json", {
accessKeyId: "your-access-key",
secretAccessKey: "your-secret-key",
bucket: "my-bucket",
// endpoint: "https://s3.us-east-1.amazonaws.com",
});
// Download from S3 as JSON
const data = await metadata.json();
// Upload to S3
await write(metadata, JSON.stringify({ name: "John", age: 30 }));
// Presign a URL (synchronous - no network request needed)
const url = metadata.presign({
acl: "public-read",
expiresIn: 60 * 60 * 24, // 1 day
});
```
S3 is the [de facto standard](https://en.wikipedia.org/wiki/De_facto_standard) internet filesystem. You can use Bun's S3 API with S3-compatible storage services like:
- AWS S3
- Cloudflare R2
- DigitalOcean Spaces
- MinIO
- Backblaze B2
- ...and any other S3-compatible storage service
## Basic Usage
There are several ways to interact with Bun's S3 API.
### Using `Bun.s3()`
The `s3()` helper function is used to create one-off `S3File` instances for a single file.
```ts
import { s3 } from "bun";
// Using the s3() helper
const s3file = s3("my-file.txt", {
accessKeyId: "your-access-key",
secretAccessKey: "your-secret-key",
bucket: "my-bucket",
// endpoint: "https://s3.us-east-1.amazonaws.com", // optional
// endpoint: "https://<account-id>.r2.cloudflarestorage.com", // Cloudflare R2
// endpoint: "https://<region>.digitaloceanspaces.com", // DigitalOcean Spaces
// endpoint: "http://localhost:9000", // MinIO
});
```
### Reading Files
You can read files from S3 using similar methods to Bun's file system APIs:
```ts
// Read an S3File as text
const text = await s3file.text();
// Read an S3File as JSON
const json = await s3file.json();
// Read an S3File as an ArrayBuffer
const buffer = await s3file.arrayBuffer();
// Get only the first 1024 bytes
const partial = await s3file.slice(0, 1024).text();
// Stream the file
const stream = s3file.stream();
for await (const chunk of stream) {
console.log(chunk);
}
```
## Writing Files
Writing to S3 is just as simple:
```ts
// Write a string (replacing the file)
await s3file.write("Hello World!");
// Write with content type
await s3file.write(JSON.stringify({ name: "John", age: 30 }), {
type: "application/json",
});
// Write using a writer (streaming)
const writer = s3file.writer({ type: "application/json" });
writer.write("Hello");
writer.write(" World!");
await writer.end();
// Write using Bun.write
await Bun.write(s3file, "Hello World!");
```
### Working with large files (streams)
Bun automatically handles multipart uploads for large files and provides streaming capabilities. The same API that works for local files also works for S3 files.
```ts
// Write a large file
const bigFile = Buffer.alloc(10 * 1024 * 1024); // 10MB
const writer = s3file.writer({
// Automatically retry on network errors up to 3 times
retry: 3,
// Queue up to 10 requests at a time
queueSize: 10,
// Upload in 5 MB chunks
partSize: 5 * 1024 * 1024,
});
for (let i = 0; i < 10; i++) {
await writer.write(bigFile);
}
await writer.end();
```
## Presigning URLs
When your production service needs to let users upload files to your server, it's often more reliable for the user to upload directly to S3 instead of your server acting as an intermediary.
To facilitate this, you can presign URLs for S3 files. This generates a URL with a signature that allows a user to securely upload that specific file to S3, without exposing your credentials or granting them unnecessary access to your bucket.
```ts
// Generate a presigned URL that expires in 24 hours (default)
const url = s3file.presign();
// Custom expiration time (in seconds)
const url2 = s3file.presign({ expiresIn: 3600 }); // 1 hour
// Using static method
const url3 = Bun.S3.presign("my-file.txt", {
bucket: "my-bucket",
accessKeyId: "your-access-key",
secretAccessKey: "your-secret-key",
// endpoint: "https://s3.us-east-1.amazonaws.com",
// endpoint: "https://<account-id>.r2.cloudflarestorage.com", // Cloudflare R2
expiresIn: 3600,
});
```
### Setting ACLs
To set an ACL (access control list) on a presigned URL, pass the `acl` option:
```ts
const url = s3file.presign({
acl: "public-read",
expiresIn: 3600,
});
```
You can pass any of the following ACLs:
| ACL | Explanation |
| ----------------------------- | ------------------------------------------------------------------- |
| `"public-read"` | The object is readable by the public. |
| `"private"` | The object is readable only by the bucket owner. |
| `"public-read-write"` | The object is readable and writable by the public. |
| `"authenticated-read"` | The object is readable by the bucket owner and authenticated users. |
| `"aws-exec-read"` | The object is readable by the AWS account that made the request. |
| `"bucket-owner-read"` | The object is readable by the bucket owner. |
| `"bucket-owner-full-control"` | The object is readable and writable by the bucket owner. |
| `"log-delivery-write"` | The object is writable by AWS services used for log delivery. |
### Expiring URLs
To set an expiration time for a presigned URL, pass the `expiresIn` option.
```ts
const url = s3file.presign({
// Seconds
expiresIn: 3600, // 1 hour
});
```
### `method`
To set the HTTP method for a presigned URL, pass the `method` option.
```ts
const url = s3file.presign({
method: "PUT",
// method: "DELETE",
// method: "GET",
// method: "HEAD",
// method: "POST",
// method: "PUT",
});
```
### `new Response(S3File)`
To quickly redirect users to a presigned URL for an S3 file, you can pass an `S3File` instance to a `Response` object as the body.
```ts
const response = new Response(s3file);
console.log(response);
```
This will automatically redirect the user to the presigned URL for the S3 file, saving you the memory, time, and bandwidth cost of downloading the file to your server and sending it back to the user.
```ts
Response (0 KB) {
ok: false,
url: "",
status: 302,
statusText: "",
headers: Headers {
"location": "https://<account-id>.r2.cloudflarestorage.com/...",
},
redirected: true,
bodyUsed: false
}
```
## Support for S3-Compatible Services
Bun's S3 implementation works with any S3-compatible storage service. Just specify the appropriate endpoint:
```ts
import { s3 } from "bun";
// CloudFlare R2
const r2file = s3("my-file.txt", {
accessKeyId: "access-key",
secretAccessKey: "secret-key",
bucket: "my-bucket",
endpoint: "https://<account-id>.r2.cloudflarestorage.com",
});
// DigitalOcean Spaces
const spacesFile = s3("my-file.txt", {
accessKeyId: "access-key",
secretAccessKey: "secret-key",
bucket: "my-bucket",
endpoint: "https://<region>.digitaloceanspaces.com",
});
// MinIO
const minioFile = s3("my-file.txt", {
accessKeyId: "access-key",
secretAccessKey: "secret-key",
bucket: "my-bucket",
endpoint: "http://localhost:9000",
});
```
## Credentials
Credentials are one of the hardest parts of using S3, and we've tried to make it as easy as possible. By default, Bun reads the following environment variables for credentials.
| Option name | Environment variable |
| ----------------- | ---------------------- |
| `accessKeyId` | `S3_ACCESS_KEY_ID` |
| `secretAccessKey` | `S3_SECRET_ACCESS_KEY` |
| `region` | `S3_REGION` |
| `endpoint` | `S3_ENDPOINT` |
| `bucket` | `S3_BUCKET` |
| `sessionToken` | `S3_SESSION_TOKEN` |
If the `S3_*` environment variable is not set, Bun will also check for the `AWS_*` environment variable, for each of the above options.
| Option name | Fallback environment variable |
| ----------------- | ----------------------------- |
| `accessKeyId` | `AWS_ACCESS_KEY_ID` |
| `secretAccessKey` | `AWS_SECRET_ACCESS_KEY` |
| `region` | `AWS_REGION` |
| `endpoint` | `AWS_ENDPOINT` |
| `bucket` | `AWS_BUCKET` |
| `sessionToken` | `AWS_SESSION_TOKEN` |
These environment variables are read from [`.env` files](/docs/runtime/env) or from the process environment at initialization time (`process.env` is not used for this).
These defaults are overriden by the options you pass to `s3(credentials)`, `new Bun.S3(credentials)`, or any of the methods that accept credentials. So if, for example, you use the same credentials for different buckets, you can set the credentials once in your `.env` file and then pass `bucket: "my-bucket"` to the `s3()` helper function without having to specify all the credentials again.
### `S3` Buckets
Passing around all of these credentials can be cumbersome. To make it easier, you can create a `S3` bucket instance.
```ts
import { S3 } from "bun";
const bucket = new S3({
accessKeyId: "your-access-key",
secretAccessKey: "your-secret-key",
bucket: "my-bucket",
// sessionToken: "..."
endpoint: "https://s3.us-east-1.amazonaws.com",
// endpoint: "https://<account-id>.r2.cloudflarestorage.com", // Cloudflare R2
// endpoint: "http://localhost:9000", // MinIO
});
// bucket is a function that creates `S3File` instances (lazy)
const file = bucket("my-file.txt");
// Write to S3
await file.write("Hello World!");
// Read from S3
const text = await file.text();
// Write using a Response
await file.write(new Response("Hello World!"));
// Presign a URL
const url = file.presign({
expiresIn: 60 * 60 * 24, // 1 day
acl: "public-read",
});
// Delete the file
await file.unlink();
```
#### Read a file from an `S3` bucket
The `S3` bucket instance is itself a function that creates `S3File` instances. It provides a more convenient API for interacting with S3.
```ts
const s3file = bucket("my-file.txt");
const text = await s3file.text();
const json = await s3file.json();
const bytes = await s3file.bytes();
const arrayBuffer = await s3file.arrayBuffer();
```
#### Write a file to S3
To write a file to the bucket, you can use the `write` method.
```ts
const bucket = new Bun.S3({
accessKeyId: "your-access-key",
secretAccessKey: "your-secret-key",
endpoint: "https://s3.us-east-1.amazonaws.com",
bucket: "my-bucket",
});
await bucket.write("my-file.txt", "Hello World!");
await bucket.write("my-file.txt", new Response("Hello World!"));
```
You can also call `.write` on the `S3File` instance created by the `S3` bucket instance.
```ts
const s3file = bucket("my-file.txt");
await s3file.write("Hello World!", {
type: "text/plain",
});
await s3file.write(new Response("Hello World!"));
```
#### Delete a file from S3
To delete a file from the bucket, you can use the `delete` method.
```ts
const bucket = new Bun.S3({
accessKeyId: "your-access-key",
secretAccessKey: "your-secret-key",
bucket: "my-bucket",
});
await bucket.delete("my-file.txt");
```
You can also use the `unlink` method, which is an alias for `delete`.
```ts
// "delete" and "unlink" are aliases of each other.
await bucket.unlink("my-file.txt");
```
## `S3File`
`S3File` instances are created by calling the `S3` instance method or the `s3()` helper function. Like `Bun.file()`, `S3File` instances are lazy. They don't refer to something that necessarily exists at the time of creation. That's why all the methods that don't involve network requests are fully synchronous.
```ts
interface S3File extends Blob {
slice(start: number, end?: number): S3File;
exists(): Promise<boolean>;
unlink(): Promise<void>;
presign(options: S3Options): string;
text(): Promise<string>;
json(): Promise<any>;
bytes(): Promise<Uint8Array>;
arrayBuffer(): Promise<ArrayBuffer>;
stream(options: S3Options): ReadableStream;
write(
data:
| string
| Uint8Array
| ArrayBuffer
| Blob
| ReadableStream
| Response
| Request,
options?: BlobPropertyBag,
): Promise<void>;
readonly size: Promise<number>;
// ... more omitted for brevity
}
```
Like `Bun.file()`, `S3File` extends [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob), so all the methods that are available on `Blob` are also available on `S3File`. The same API for reading data from a local file is also available for reading data from S3.
| Method | Output |
| ---------------------------- | ---------------- |
| `await s3File.text()` | `string` |
| `await s3File.bytes()` | `Uint8Array` |
| `await s3File.json()` | `JSON` |
| `await s3File.stream()` | `ReadableStream` |
| `await s3File.arrayBuffer()` | `ArrayBuffer` |
That means using `S3File` instances with `fetch()`, `Response`, and other web APIs that accept `Blob` instances just works.
### Partial reads
To read a partial range of a file, you can use the `slice` method.
```ts
const partial = s3file.slice(0, 1024);
// Read the partial range as a Uint8Array
const bytes = await partial.bytes();
// Read the partial range as a string
const text = await partial.text();
```
Internally, this works by using the HTTP `Range` header to request only the bytes you want. This `slice` method is the same as [`Blob.prototype.slice`](https://developer.mozilla.org/en-US/docs/Web/API/Blob/slice).
## Error codes
When Bun's S3 API throws an error, it will have a `code` property that matches one of the following values:
- `ERR_S3_MISSING_CREDENTIALS`
- `ERR_S3_INVALID_METHOD`
- `ERR_S3_INVALID_PATH`
- `ERR_S3_INVALID_ENDPOINT`
- `ERR_S3_INVALID_SIGNATURE`
- `ERR_S3_INVALID_SESSION_TOKEN`
When the S3 Object Storage service returns an error (that is, not Bun), it will be an `S3Error` instance (an `Error` instance with the name `"S3Error"`).
## `S3` static methods
The `S3` class provides several static methods for interacting with S3.
### `S3.presign`
To generate a presigned URL for an S3 file, you can use the `S3.presign` method.
```ts
import { S3 } from "bun";
const url = S3.presign("my-file.txt", {
accessKeyId: "your-access-key",
secretAccessKey: "your-secret-key",
bucket: "my-bucket",
expiresIn: 3600,
// endpoint: "https://s3.us-east-1.amazonaws.com",
// endpoint: "https://<account-id>.r2.cloudflarestorage.com", // Cloudflare R2
});
```
This is the same as `S3File.prototype.presign` and `new S3(credentials).presign`, as a static method on the `S3` class.
### `S3.exists`
To check if an S3 file exists, you can use the `S3.exists` method.
```ts
import { S3 } from "bun";
const exists = await S3.exists("my-file.txt", {
accessKeyId: "your-access-key",
secretAccessKey: "your-secret-key",
bucket: "my-bucket",
// endpoint: "https://s3.us-east-1.amazonaws.com",
});
```
The same method also works on `S3File` instances.
```ts
const s3file = Bun.s3("my-file.txt", {
accessKeyId: "your-access-key",
secretAccessKey: "your-secret-key",
bucket: "my-bucket",
});
const exists = await s3file.exists();
```
### `S3.size`
To get the size of an S3 file, you can use the `S3.size` method.
```ts
import { S3 } from "bun";
const size = await S3.size("my-file.txt", {
accessKeyId: "your-access-key",
secretAccessKey: "your-secret-key",
bucket: "my-bucket",
// endpoint: "https://s3.us-east-1.amazonaws.com",
});
```
### `S3.unlink`
To delete an S3 file, you can use the `S3.unlink` method.
```ts
import { S3 } from "bun";
await S3.unlink("my-file.txt", {
accessKeyId: "your-access-key",
secretAccessKey: "your-secret-key",
bucket: "my-bucket",
// endpoint: "https://s3.us-east-1.amazonaws.com",
});
```
## s3:// protocol
To make it easier to use the same code for local files and S3 files, the `s3://` protocol is supported in `fetch` and `Bun.file()`.
```ts
const response = await fetch("s3://my-bucket/my-file.txt");
const file = Bun.file("s3://my-bucket/my-file.txt");
```
This is the equivalent of calling `Bun.s3("my-file.txt", { bucket: "my-bucket" })`.
This `s3://` protocol exists to make it easier to use the same code for local files and S3 files.

View File

@@ -771,3 +771,28 @@ console.log(obj); // => { foo: "bar" }
```
Internally, [`structuredClone`](https://developer.mozilla.org/en-US/docs/Web/API/structuredClone) and [`postMessage`](https://developer.mozilla.org/en-US/docs/Web/API/Window/postMessage) serialize and deserialize the same way. This exposes the underlying [HTML Structured Clone Algorithm](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm) to JavaScript as an ArrayBuffer.
## `estimateShallowMemoryUsageOf` in `bun:jsc`
The `estimateShallowMemoryUsageOf` function returns a best-effort estimate of the memory usage of an object in bytes, excluding the memory usage of properties or other objects it references. For accurate per-object memory usage, use `Bun.generateHeapSnapshot`.
```js
import { estimateShallowMemoryUsageOf } from "bun:jsc";
const obj = { foo: "bar" };
const usage = estimateShallowMemoryUsageOf(obj);
console.log(usage); // => 16
const buffer = Buffer.alloc(1024 * 1024);
estimateShallowMemoryUsageOf(buffer);
// => 1048624
const req = new Request("https://bun.sh");
estimateShallowMemoryUsageOf(req);
// => 167
const array = Array(1024).fill({ a: 1 });
// Arrays are usually not stored contiguously in memory, so this will not return a useful value (which isn't a bug).
estimateShallowMemoryUsageOf(array);
// => 16
```

View File

@@ -279,6 +279,19 @@ $ bun build --compile --asset-naming="[name].[ext]" ./index.ts
To trim down the size of the executable a little, pass `--minify` to `bun build --compile`. This uses Bun's minifier to reduce the code size. Overall though, Bun's binary is still way too big and we need to make it smaller.
## Windows-specific flags
When compiling a standalone executable on Windows, there are two platform-specific options that can be used to customize metadata on the generated `.exe` file:
- `--windows-icon=path/to/icon.ico` to customize the executable file icon.
- `--windows-hide-console` to disable the background terminal, which can be used for applications that do not need a TTY.
{% callout %}
These flags currently cannot be used when cross-compiling because they depend on Windows APIs.
{% /callout %}
## Unsupported CLI arguments
Currently, the `--compile` flag can only accept a single entrypoint at a time and does not support the following flags:

110
docs/bundler/html.md Normal file
View File

@@ -0,0 +1,110 @@
As of Bun v1.1.43, Bun's bundler now has first-class support for HTML. Build static sites, landing pages, and web applications with zero configuration. Just point Bun at your HTML file and it handles everything else.
```html#index.html
<!doctype html>
<html>
<head>
<link rel="stylesheet" href="./styles.css" />
<script src="./app.ts" type="module"></script>
</head>
<body>
<img src="./logo.png" />
</body>
</html>
```
One command is all you need (won't be experimental after Bun v1.2):
{% codetabs %}
```bash#CLI
$ bun build --experimental-html --experimental-css ./index.html --outdir=dist
```
```ts#API
Bun.build({
entrypoints: ["./index.html"],
outdir: "./dist",
// On by default in Bun v1.2+
html: true,
experimentalCss: true,
});
```
{% /codetabs %}
Bun automatically:
- Bundles, tree-shakes, and optimizes your JavaScript, JSX and TypeScript
- Bundles and optimizes your CSS
- Copies & hashes images and other assets
- Updates all references to local files or packages in your HTML
## Zero Config, Maximum Performance
The HTML bundler is enabled by default after Bun v1.2+. Drop in your existing HTML files and Bun will handle:
- **TypeScript & JSX** - Write modern JavaScript for browsers without the setup
- **CSS** - Bundle CSS stylesheets directly from `<link rel="stylesheet">` or `@import`
- **Images & Assets** - Automatic copying & hashing & rewriting of assets in JavaScript, CSS, and HTML
## Watch mode
You can run `bun build --watch` to watch for changes and rebuild automatically.
You've never seen a watch mode this fast.
## Plugin API
Need more control? Configure the bundler through the JavaScript API and use Bun's builtin `HTMLRewriter` to preprocess HTML.
```ts
await Bun.build({
entrypoints: ["./index.html"],
outdir: "./dist",
html: true,
experimentalCss: true,
minify: true,
plugins: [
{
// A plugin that makes every HTML tag lowercase
name: "lowercase-html-plugin",
setup({ onLoad }) {
const rewriter = new HTMLRewriter().on("*", {
element(element) {
element.tagName = element.tagName.toLowerCase();
},
text(element) {
element.replace(element.text.toLowerCase());
},
});
onLoad({ filter: /\.html$/ }, async args => {
const html = await Bun.file(args.path).text();
return {
// Bun's bundler will scan the HTML for <script> tags, <link rel="stylesheet"> tags, and other assets
// and bundle them automatically
contents: rewriter.transform(html),
loader: "html",
};
});
},
},
],
});
```
## What Gets Processed?
Bun automatically handles all common web assets:
- Scripts (`<script src>`) are run through Bun's JavaScript/TypeScript/JSX bundler
- Stylesheets (`<link rel="stylesheet">`) are run through Bun's CSS parser & bundler
- Images (`<img>`, `<picture>`) are copied and hashed
- Media (`<video>`, `<audio>`, `<source>`) are copied and hashed
- Any `<link>` tag with an `href` attribute pointing to a local file is rewritten to the new path, and hashed
All paths are resolved relative to your HTML file, making it easy to organize your project however you want.

View File

@@ -1259,7 +1259,7 @@ $ bun build ./index.tsx --outdir ./out --drop=console --drop=debugger --drop=any
### `experimentalCss`
Whether to enable _experimental_ support for bundling CSS files. Defaults to `false`.
Whether to enable _experimental_ support for bundling CSS files. Defaults to `false`. In 1.2, this property will be deleted, and CSS bundling will always be enabled.
This supports bundling CSS files imported from JS, as well as CSS entrypoints.
@@ -1275,6 +1275,12 @@ const result = await Bun.build({
{% /codetabs %}
### `throw`
If set to `true`, `Bun.build` will throw on build failure. See the section ["Logs and Errors"](#logs-and-errors) for more details on the error message structure.
In 1.2, this will default to `true`, with the previous behavior as `throw: false`
## Outputs
The `Bun.build` function returns a `Promise<BuildOutput>`, defined as:
@@ -1414,7 +1420,70 @@ Refer to [Bundler > Executables](https://bun.sh/docs/bundler/executables) for co
## Logs and errors
`Bun.build` only throws if invalid options are provided. Read the `success` property to determine if the build was successful; the `logs` property will contain additional details.
<!-- 1.2 documentation -->
<!-- On failure, `Bun.build` returns a rejected promise with an `AggregateError`. This can be logged to the console for pretty printing of the error list, or programmatically read with a `try`/`catch` block.
```ts
try {
const result = await Bun.build({
entrypoints: ["./index.tsx"],
outdir: "./out",
});
} catch (e) {
// TypeScript does not allow annotations on the catch clause
const error = e as AggregateError;
console.error("Build Failed");
// Example: Using the built-in formatter
console.error(error);
// Example: Serializing the failure as a JSON string.
console.error(JSON.stringify(error, null, 2));
}
```
{% callout %}
Most of the time, an explicit `try`/`catch` is not needed, as Bun will neatly print uncaught exceptions. It is enough to just use a top-level `await` on the `Bun.build` call.
{% /callout %}
Each item in `error.errors` is an instance of `BuildMessage` or `ResolveMessage` (subclasses of Error), containing detailed information for each error.
```ts
class BuildMessage {
name: string;
position?: Position;
message: string;
level: "error" | "warning" | "info" | "debug" | "verbose";
}
class ResolveMessage extends BuildMessage {
code: string;
referrer: string;
specifier: string;
importKind: ImportKind;
}
```
On build success, the returned object contains a `logs` property, which contains bundler warnings and info messages.
```ts
const result = await Bun.build({
entrypoints: ["./index.tsx"],
outdir: "./out",
});
if (result.logs.length > 0) {
console.warn("Build succeeded with warnings:");
for (const message of result.logs) {
// Bun will pretty print the message object
console.warn(message);
}
}
``` -->
By default, `Bun.build` only throws if invalid options are provided. Read the `success` property to determine if the build was successful; the `logs` property will contain additional details.
```ts
const result = await Bun.build({
@@ -1457,6 +1526,27 @@ if (!result.success) {
}
```
In Bun 1.2, throwing an aggregate error like this will become the default beahavior. You can opt-into it early using the `throw: true` option.
```ts
try {
const result = await Bun.build({
entrypoints: ["./index.tsx"],
outdir: "./out",
});
} catch (e) {
// TypeScript does not allow annotations on the catch clause
const error = e as AggregateError;
console.error("Build Failed");
// Example: Using the built-in formatter
console.error(error);
// Example: Serializing the failure as a JSON string.
console.error(JSON.stringify(error, null, 2));
}
```
## Reference
```ts
@@ -1478,39 +1568,23 @@ interface BuildConfig {
*
* @default "esm"
*/
format?: /**
* ECMAScript Module format
*/
| "esm"
/**
* CommonJS format
* **Experimental**
*/
| "cjs"
/**
* IIFE format
* **Experimental**
*/
| "iife";
format?: "esm" | "cjs" | "iife";
naming?:
| string
| {
chunk?: string;
entry?: string;
asset?: string;
}; // | string;
};
root?: string; // project root
splitting?: boolean; // default true, enable code splitting
plugins?: BunPlugin[];
// manifest?: boolean; // whether to return manifest
external?: string[];
packages?: "bundle" | "external";
publicPath?: string;
define?: Record<string, string>;
// origin?: string; // e.g. http://mydomain.com
loader?: { [k in string]: Loader };
sourcemap?: "none" | "linked" | "inline" | "external" | "linked"; // default: "none", true -> "inline"
sourcemap?: "none" | "linked" | "inline" | "external" | "linked" | boolean; // default: "none", true -> "inline"
/**
* package.json `exports` conditions used when resolving imports
*
@@ -1519,6 +1593,18 @@ interface BuildConfig {
* https://nodejs.org/api/packages.html#exports
*/
conditions?: Array<string> | string;
/**
* Controls how environment variables are handled during bundling.
*
* Can be one of:
* - `"inline"`: Injects environment variables into the bundled output by converting `process.env.FOO`
* references to string literals containing the actual environment variable values
* - `"disable"`: Disables environment variable injection entirely
* - A string ending in `*`: Inlines environment variables that match the given prefix.
* For example, `"MY_PUBLIC_*"` will only include env vars starting with "MY_PUBLIC_"
*/
env?: "inline" | "disable" | `${string}*`;
minify?:
| boolean
| {
@@ -1536,20 +1622,6 @@ interface BuildConfig {
* Force emitting @__PURE__ annotations even if minify.whitespace is true.
*/
emitDCEAnnotations?: boolean;
// treeshaking?: boolean;
// jsx?:
// | "automatic"
// | "classic"
// | /* later: "preserve" */ {
// runtime?: "automatic" | "classic"; // later: "preserve"
// /** Only works when runtime=classic */
// factory?: string; // default: "React.createElement"
// /** Only works when runtime=classic */
// fragment?: string; // default: "React.Fragment"
// /** Only works when runtime=automatic */
// importSource?: string; // default: "react"
// };
/**
* Generate bytecode for the output. This can dramatically improve cold
@@ -1562,6 +1634,37 @@ interface BuildConfig {
* @default false
*/
bytecode?: boolean;
/**
* Add a banner to the bundled code such as "use client";
*/
banner?: string;
/**
* Add a footer to the bundled code such as a comment block like
*
* `// made with bun!`
*/
footer?: string;
/**
* **Experimental**
*
* Enable CSS support.
*/
experimentalCss?: boolean;
/**
* Drop function calls to matching property accesses.
*/
drop?: string[];
/**
* When set to `true`, the returned promise rejects with an AggregateError when a build failure happens.
* When set to `false`, the `success` property of the returned object will be `false` when a build failure happens.
*
* This defaults to `false` in Bun 1.1 and will change to `true` in Bun 1.2
* as most usage of `Bun.build` forgets to check for errors.
*/
throw?: boolean;
}
interface BuildOutput {
@@ -1619,32 +1722,3 @@ declare class ResolveMessage {
toString(): string;
}
```
<!--
interface BuildManifest {
inputs: {
[path: string]: {
output: {
path: string;
};
imports: {
path: string;
kind: ImportKind;
external?: boolean;
asset?: boolean; // whether the import defaulted to "file" loader
}[];
};
};
outputs: {
[path: string]: {
type: "chunk" | "entrypoint" | "asset";
inputs: { path: string }[];
imports: {
path: string;
kind: ImportKind;
external?: boolean;
}[];
exports: string[];
};
};
} -->

View File

@@ -1,6 +1,6 @@
The Bun bundler implements a set of default loaders out of the box. As a rule of thumb, the bundler and the runtime both support the same set of file types out of the box.
`.js` `.cjs` `.mjs` `.mts` `.cts` `.ts` `.tsx` `.jsx` `.toml` `.json` `.txt` `.wasm` `.node`
`.js` `.cjs` `.mjs` `.mts` `.cts` `.ts` `.tsx` `.jsx` `.toml` `.json` `.txt` `.wasm` `.node` `.html`
Bun uses the file extension to determine which built-in _loader_ should be used to parse the file. Every loader has a name, such as `js`, `tsx`, or `json`. These names are used when building [plugins](https://bun.sh/docs/bundler/plugins) that extend Bun with custom loaders.
@@ -203,6 +203,81 @@ When using a [standalone executable](https://bun.sh/docs/bundler/executables), t
Otherwise, the database to embed is copied into the `outdir` with a hashed filename.
### `html`
**HTML loader**. Default for `.html` after Bun v1.2.0.
To enable the html loader:
- For `Bun.build`: set `html: true`
- For `bun build`: `--experimental-html` CLI flag
You most likely want to use the `html` loader in conjunction with `experimentalCss: true` or `--experimental-css`.
The html loader processes HTML files and bundles any referenced assets. It will:
- Bundle and hash referenced JavaScript files (`<script src="...">`)
- Bundle and hash referenced CSS files (`<link rel="stylesheet" href="...">`)
- Hash referenced images (`<img src="...">`)
- Preserve external URLs (by default, anything starting with `http://` or `https://`)
For example, given this HTML file:
{% codetabs %}
```html#src/index.html
<!DOCTYPE html>
<html>
<body>
<img src="./image.jpg" alt="Local image">
<img src="https://example.com/image.jpg" alt="External image">
<script type="module" src="./script.js"></script>
</body>
</html>
```
{% /codetabs %}
It will output a new HTML file with the bundled assets:
{% codetabs %}
```html#dist/output.html
<!DOCTYPE html>
<html>
<body>
<img src="./image-HASHED.jpg" alt="Local image">
<img src="https://example.com/image.jpg" alt="External image">
<script type="module" src="./output-ALSO-HASHED.js"></script>
</body>
</html>
```
{% /codetabs %}
Under the hood, it uses [`lol-html`](https://github.com/cloudflare/lol-html) to extract script and link tags as entrypoints, and other assets as external.
Currently, the list of selectors is:
- `audio[src]`
- `iframe[src]`
- `img[src]`
- `img[srcset]`
- `link:not([rel~='stylesheet']):not([rel~='modulepreload']):not([rel~='manifest']):not([rel~='icon']):not([rel~='apple-touch-icon'])[href]`
- `link[as='font'][href], link[type^='font/'][href]`
- `link[as='image'][href]`
- `link[as='style'][href]`
- `link[as='video'][href], link[as='audio'][href]`
- `link[as='worker'][href]`
- `link[rel='icon'][href], link[rel='apple-touch-icon'][href]`
- `link[rel='manifest'][href]`
- `link[rel='stylesheet'][href]`
- `script[src]`
- `source[src]`
- `source[srcset]`
- `video[poster]`
- `video[src]`
### `sh` loader
**Bun Shell loader**. Default for `.sh` files

View File

@@ -2,11 +2,47 @@ Bun provides a universal plugin API that can be used to extend both the _runtime
Plugins intercept imports and perform custom loading logic: reading files, transpiling code, etc. They can be used to add support for additional file types, like `.scss` or `.yaml`. In the context of Bun's bundler, plugins can be used to implement framework-level features like CSS extraction, macros, and client-server code co-location.
For more complete documentation of the Plugin API, see [Runtime > Plugins](https://bun.sh/docs/runtime/plugins).
## Lifecycle hooks
Plugins can register callbacks to be run at various points in the lifecycle of a bundle:
- [`onStart()`](#onstart): Run once the bundler has started a bundle
- [`onResolve()`](#onresolve): Run before a module is resolved
- [`onLoad()`](#onload): Run before a module is loaded.
- [`onBeforeParse()`](#onbeforeparse): Run zero-copy native addons in the parser thread before a file is parsed.
### Reference
A rough overview of the types (please refer to Bun's `bun.d.ts` for the full type definitions):
```ts
type PluginBuilder = {
onStart(callback: () => void): void;
onResolve: (
args: { filter: RegExp; namespace?: string },
callback: (args: { path: string; importer: string }) => {
path: string;
namespace?: string;
} | void,
) => void;
onLoad: (
args: { filter: RegExp; namespace?: string },
defer: () => Promise<void>,
callback: (args: { path: string }) => {
loader?: Loader;
contents?: string;
exports?: Record<string, any>;
},
) => void;
config: BuildConfig;
};
type Loader = "js" | "jsx" | "ts" | "tsx" | "css" | "json" | "toml";
```
## Usage
A plugin is defined as simple JavaScript object containing a `name` property and a `setup` function. Register a plugin with Bun using the `plugin` function.
A plugin is defined as simple JavaScript object containing a `name` property and a `setup` function.
```tsx#myPlugin.ts
import type { BunPlugin } from "bun";
@@ -22,9 +58,343 @@ const myPlugin: BunPlugin = {
This plugin can be passed into the `plugins` array when calling `Bun.build`.
```ts
Bun.build({
await Bun.build({
entrypoints: ["./app.ts"],
outdir: "./out",
plugins: [myPlugin],
});
```
## Plugin lifecycle
### Namespaces
`onLoad` and `onResolve` accept an optional `namespace` string. What is a namespace?
Every module has a namespace. Namespaces are used to prefix the import in transpiled code; for instance, a loader with a `filter: /\.yaml$/` and `namespace: "yaml:"` will transform an import from `./myfile.yaml` into `yaml:./myfile.yaml`.
The default namespace is `"file"` and it is not necessary to specify it, for instance: `import myModule from "./my-module.ts"` is the same as `import myModule from "file:./my-module.ts"`.
Other common namespaces are:
- `"bun"`: for Bun-specific modules (e.g. `"bun:test"`, `"bun:sqlite"`)
- `"node"`: for Node.js modules (e.g. `"node:fs"`, `"node:path"`)
### `onStart`
```ts
onStart(callback: () => void): Promise<void> | void;
```
Registers a callback to be run when the bundler starts a new bundle.
```ts
import { plugin } from "bun";
plugin({
name: "onStart example",
setup(build) {
build.onStart(() => {
console.log("Bundle started!");
});
},
});
```
The callback can return a `Promise`. After the bundle process has initialized, the bundler waits until all `onStart()` callbacks have completed before continuing.
For example:
```ts
const result = await Bun.build({
entrypoints: ["./app.ts"],
outdir: "./dist",
sourcemap: "external",
plugins: [
{
name: "Sleep for 10 seconds",
setup(build) {
build.onStart(async () => {
await Bunlog.sleep(10_000);
});
},
},
{
name: "Log bundle time to a file",
setup(build) {
build.onStart(async () => {
const now = Date.now();
await Bun.$`echo ${now} > bundle-time.txt`;
});
},
},
],
});
```
In the above example, Bun will wait until the first `onStart()` (sleeping for 10 seconds) has completed, _as well as_ the second `onStart()` (writing the bundle time to a file).
Note that `onStart()` callbacks (like every other lifecycle callback) do not have the ability to modify the `build.config` object. If you want to mutate `build.config`, you must do so directly in the `setup()` function.
### `onResolve`
```ts
onResolve(
args: { filter: RegExp; namespace?: string },
callback: (args: { path: string; importer: string }) => {
path: string;
namespace?: string;
} | void,
): void;
```
To bundle your project, Bun walks down the dependency tree of all modules in your project. For each imported module, Bun actually has to find and read that module. The "finding" part is known as "resolving" a module.
The `onResolve()` plugin lifecycle callback allows you to configure how a module is resolved.
The first argument to `onResolve()` is an object with a `filter` and [`namespace`](#what-is-a-namespace) property. The filter is a regular expression which is run on the import string. Effectively, these allow you to filter which modules your custom resolution logic will apply to.
The second argument to `onResolve()` is a callback which is run for each module import Bun finds that matches the `filter` and `namespace` defined in the first argument.
The callback receives as input the _path_ to the matching module. The callback can return a _new path_ for the module. Bun will read the contents of the _new path_ and parse it as a module.
For example, redirecting all imports to `images/` to `./public/images/`:
```ts
import { plugin } from "bun";
plugin({
name: "onResolve example",
setup(build) {
build.onResolve({ filter: /.*/, namespace: "file" }, args => {
if (args.path.startsWith("images/")) {
return {
path: args.path.replace("images/", "./public/images/"),
};
}
});
},
});
```
### `onLoad`
```ts
onLoad(
args: { filter: RegExp; namespace?: string },
defer: () => Promise<void>,
callback: (args: { path: string, importer: string, namespace: string, kind: ImportKind }) => {
loader?: Loader;
contents?: string;
exports?: Record<string, any>;
},
): void;
```
After Bun's bundler has resolved a module, it needs to read the contents of the module and parse it.
The `onLoad()` plugin lifecycle callback allows you to modify the _contents_ of a module before it is read and parsed by Bun.
Like `onResolve()`, the first argument to `onLoad()` allows you to filter which modules this invocation of `onLoad()` will apply to.
The second argument to `onLoad()` is a callback which is run for each matching module _before_ Bun loads the contents of the module into memory.
This callback receives as input the _path_ to the matching module, the _importer_ of the module (the module that imported the module), the _namespace_ of the module, and the _kind_ of the module.
The callback can return a new `contents` string for the module as well as a new `loader`.
For example:
```ts
import { plugin } from "bun";
const envPlugin: BunPlugin = {
name: "env plugin",
setup(build) {
build.onLoad({ filter: /env/, namespace: "file" }, args => {
return {
contents: `export default ${JSON.stringify(process.env)}`,
loader: "js",
};
});
},
});
Bun.build({
entrypoints: ["./app.ts"],
outdir: "./dist",
plugins: [envPlugin],
});
// import env from "env"
// env.FOO === "bar"
```
This plugin will transform all imports of the form `import env from "env"` into a JavaScript module that exports the current environment variables.
#### `.defer()`
One of the arguments passed to the `onLoad` callback is a `defer` function. This function returns a `Promise` that is resolved when all _other_ modules have been loaded.
This allows you to delay execution of the `onLoad` callback until all other modules have been loaded.
This is useful for returning contents of a module that depends on other modules.
##### Example: tracking and reporting unused exports
```ts
import { plugin } from "bun";
plugin({
name: "track imports",
setup(build) {
const transpiler = new Bun.Transpiler();
let trackedImports: Record<string, number> = {};
// Each module that goes through this onLoad callback
// will record its imports in `trackedImports`
build.onLoad({ filter: /\.ts/ }, async ({ path }) => {
const contents = await Bun.file(path).arrayBuffer();
const imports = transpiler.scanImports(contents);
for (const i of imports) {
trackedImports[i.path] = (trackedImports[i.path] || 0) + 1;
}
return undefined;
});
build.onLoad({ filter: /stats\.json/ }, async ({ defer }) => {
// Wait for all files to be loaded, ensuring
// that every file goes through the above `onLoad()` function
// and their imports tracked
await defer();
// Emit JSON containing the stats of each import
return {
contents: `export default ${JSON.stringify(trackedImports)}`,
loader: "json",
};
});
},
});
```
Note that the `.defer()` function currently has the limitation that it can only be called once per `onLoad` callback.
## Native plugins
One of the reasons why Bun's bundler is so fast is that it is written in native code and leverages multi-threading to load and parse modules in parallel.
However, one limitation of plugins written in JavaScript is that JavaScript itself is single-threaded.
Native plugins are written as [NAPI](/docs/node-api) modules and can be run on multiple threads. This allows native plugins to run much faster than JavaScript plugins.
In addition, native plugins can skip unnecessary work such as the UTF-8 -> UTF-16 conversion needed to pass strings to JavaScript.
These are the following lifecycle hooks which are available to native plugins:
- [`onBeforeParse()`](#onbeforeparse): Called on any thread before a file is parsed by Bun's bundler.
Native plugins are NAPI modules which expose lifecycle hooks as C ABI functions.
To create a native plugin, you must export a C ABI function which matches the signature of the native lifecycle hook you want to implement.
### Creating a native plugin in Rust
Native plugins are NAPI modules which expose lifecycle hooks as C ABI functions.
To create a native plugin, you must export a C ABI function which matches the signature of the native lifecycle hook you want to implement.
```bash
bun add -g @napi-rs/cli
napi new
```
Then install this crate:
```bash
cargo add bun-native-plugin
```
Now, inside the `lib.rs` file, we'll use the `bun_native_plugin::bun` proc macro to define a function which
will implement our native plugin.
Here's an example implementing the `onBeforeParse` hook:
```rs
use bun_native_plugin::{define_bun_plugin, OnBeforeParse, bun, Result, anyhow, BunLoader};
use napi_derive::napi;
/// Define the plugin and its name
define_bun_plugin!("replace-foo-with-bar");
/// Here we'll implement `onBeforeParse` with code that replaces all occurrences of
/// `foo` with `bar`.
///
/// We use the #[bun] macro to generate some of the boilerplate code.
///
/// The argument of the function (`handle: &mut OnBeforeParse`) tells
/// the macro that this function implements the `onBeforeParse` hook.
#[bun]
pub fn replace_foo_with_bar(handle: &mut OnBeforeParse) -> Result<()> {
// Fetch the input source code.
let input_source_code = handle.input_source_code()?;
// Get the Loader for the file
let loader = handle.output_loader();
let output_source_code = input_source_code.replace("foo", "bar");
handle.set_output_source_code(output_source_code, BunLoader::BUN_LOADER_JSX);
Ok(())
}
```
And to use it in Bun.build():
```typescript
import myNativeAddon from "./my-native-addon";
Bun.build({
entrypoints: ["./app.tsx"],
plugins: [
{
name: "my-plugin",
setup(build) {
build.onBeforeParse(
{
namespace: "file",
filter: "**/*.tsx",
},
{
napiModule: myNativeAddon,
symbol: "replace_foo_with_bar",
// external: myNativeAddon.getSharedState()
},
);
},
},
],
});
```
### `onBeforeParse`
```ts
onBeforeParse(
args: { filter: RegExp; namespace?: string },
callback: { napiModule: NapiModule; symbol: string; external?: unknown },
): void;
```
This lifecycle callback is run immediately before a file is parsed by Bun's bundler.
As input, it receives the file's contents and can optionally return new source code.
This callback can be called from any thread and so the napi module implementation must be thread-safe.

View File

@@ -695,7 +695,7 @@ In Bun's CLI, simple boolean flags like `--minify` do not accept an argument. Ot
- In Bun, `minify` can be a boolean or an object.
```ts
Bun.build({
await Bun.build({
entrypoints: ['./index.tsx'],
// enable all minification
minify: true

View File

@@ -47,6 +47,9 @@ registry = "https://registry.yarnpkg.com/"
# Install for production? This is the equivalent to the "--production" CLI argument
production = false
# Save a text-based lockfile? This is equivalent to the "--save-text-lockfile" CLI argument
saveTextLockfile = false
# Disallow changes to lockfile? This is the equivalent to the "--frozen-lockfile" CLI argument
frozenLockfile = false
@@ -54,12 +57,15 @@ frozenLockfile = false
dryRun = true
# Install optionalDependencies (default: true)
# Setting this to false is equivalent to the `--omit=optional` CLI argument
optional = true
# Install local devDependencies (default: true)
# Setting this to false is equivalent to the `--omit=dev` CLI argument
dev = true
# Install peerDependencies (default: true)
# Setting this to false is equivalent to the `--omit=peer` CLI argument
peer = true
# Max number of concurrent lifecycle scripts (default: (cpu count or GOMAXPROCS) x2)
@@ -108,6 +114,7 @@ export interface Install {
scopes: Scopes;
registry: Registry;
production: boolean;
saveTextLockfile: boolean;
frozenLockfile: boolean;
dryRun: boolean;
optional: boolean;

View File

@@ -130,6 +130,20 @@ $ bun install --frozen-lockfile
For more information on Bun's binary lockfile `bun.lockb`, refer to [Package manager > Lockfile](https://bun.sh/docs/install/lockfile).
## Omitting dependencies
To omit dev, peer, or optional dependencies use the `--omit` flag.
```bash
# Exclude "devDependencies" from the installation. This will apply to the
# root package and workspaces if they exist. Transitive dependencies will
# not have "devDependencies".
$ bun install --omit dev
# Install only dependencies from "dependencies"
$ bun install --omit=dev --omit=peer --omit=optional
```
## Dry run
To perform a dry run (i.e. don't actually install anything):
@@ -149,7 +163,8 @@ Bun supports installing dependencies from Git, GitHub, and local or remotely-hos
"lodash": "git+ssh://github.com/lodash/lodash.git#4.17.21",
"moment": "git@github.com:moment/moment.git",
"zod": "github:colinhacks/zod",
"react": "https://registry.npmjs.org/react/-/react-18.2.0.tgz"
"react": "https://registry.npmjs.org/react/-/react-18.2.0.tgz",
"bun-types": "npm:@types/bun"
}
}
```
@@ -173,6 +188,9 @@ peer = true
# equivalent to `--production` flag
production = false
# equivalent to `--save-text-lockfile` flag
saveTextLockfile = false
# equivalent to `--frozen-lockfile` flag
frozenLockfile = false

View File

@@ -15,6 +15,14 @@ $ bun create next-app
Creating a new Next.js app in /path/to/my-app.
```
You can specify a starter template using the `--example` flag.
```sh
$ bun create next-app --example with-supabase
✔ What is your project named? … my-app
...
```
---
To start the dev server with Bun, run `bun --bun run dev` from the project root.

View File

@@ -2,7 +2,9 @@
name: Configure a private registry for an organization scope with bun install
---
Bun does not read `.npmrc` files; instead private registries are configured via `bunfig.toml`. To configure a registry for a particular npm scope:
Private registries can be configured using either [`.npmrc`](https://bun.sh/docs/install/npmrc) or [`bunfig.toml`](https://bun.sh/docs/runtime/bunfig#install-registry). While both are supported, we recommend using **bunfig.toml** for enhanced flexibility and Bun-specific options.
To configure a registry for a particular npm scope:
```toml#bunfig.toml
[install.scopes]

View File

@@ -0,0 +1,120 @@
---
name: "import, require, and test Svelte components with bun test"
---
Bun's [Plugin API](/docs/runtime/plugins) lets you add custom loaders to your project. The `test.preload` option in `bunfig.toml` lets you configure your loader to start before your tests run.
Firstly, install `@testing-library/svelte`, `svelte`, and `@happy-dom/global-registrator`.
```bash
$ bun add @testing-library/svelte svelte@4 @happy-dom/global-registrator
```
Then, save this plugin in your project.
```ts#svelte-loader.js
import { plugin } from "bun";
import { compile } from "svelte/compiler";
import { readFileSync } from "fs";
import { beforeEach, afterEach } from "bun:test";
import { GlobalRegistrator } from "@happy-dom/global-registrator";
beforeEach(async () => {
await GlobalRegistrator.register();
});
afterEach(async () => {
await GlobalRegistrator.unregister();
});
plugin({
name: "svelte loader",
setup(builder) {
builder.onLoad({ filter: /\.svelte(\?[^.]+)?$/ }, ({ path }) => {
try {
const source = readFileSync(
path.substring(
0,
path.includes("?") ? path.indexOf("?") : path.length
),
"utf-8"
);
const result = compile(source, {
filename: path,
generate: "client",
dev: false,
});
return {
contents: result.js.code,
loader: "js",
};
} catch (err) {
throw new Error(`Failed to compile Svelte component: ${err.message}`);
}
});
},
});
```
---
Add this to `bunfig.toml` to tell Bun to preload the plugin, so it loads before your tests run.
```toml#bunfig.toml
[test]
# Tell Bun to load this plugin before your tests run
preload = ["./svelte-loader.js"]
# This also works:
# test.preload = ["./svelte-loader.js"]
```
---
Add an example `.svelte` file in your project.
```html#Counter.svelte
<script>
export let initialCount = 0;
let count = initialCount;
</script>
<button on:click={() => (count += 1)}>+1</button>
```
---
Now you can `import` or `require` `*.svelte` files in your tests, and it will load the Svelte component as a JavaScript module.
```ts#hello-svelte.test.ts
import { test, expect } from "bun:test";
import { render, fireEvent } from "@testing-library/svelte";
import Counter from "./Counter.svelte";
test("Counter increments when clicked", async () => {
const { getByText, component } = render(Counter);
const button = getByText("+1");
// Initial state
expect(component.$$.ctx[0]).toBe(0); // initialCount is the first prop
// Click the increment button
await fireEvent.click(button);
// Check the new state
expect(component.$$.ctx[0]).toBe(1);
});
```
---
Use `bun test` to run your tests.
```bash
$ bun test
```
---

View File

@@ -1,4 +1,4 @@
All packages downloaded from the registry are stored in a global cache at `~/.bun/install/cache`. They are stored in subdirectories named like `${name}@${version}`, so multiple versions of a package can be cached.
All packages downloaded from the registry are stored in a global cache at `~/.bun/install/cache`, or the path defined by the environment variable `BUN_INSTALL_CACHE_DIR`. They are stored in subdirectories named like `${name}@${version}`, so multiple versions of a package can be cached.
{% details summary="Configuring cache behavior (bunfig.toml)" %}

View File

@@ -55,12 +55,25 @@ To install dependencies without allowing changes to lockfile (useful on CI):
$ bun install --frozen-lockfile
```
To perform a dry run (i.e. don't actually install anything):
To exclude dependency types from installing, use `--omit` with `dev`, `optional`, or `peer`:
```bash
# Disable devDependencies and optionalDependencies
$ bun install --omit=dev --omit=optional
```
To perform a dry run (i.e. don't actually install anything or update the lockfile):
```bash
$ bun install --dry-run
```
To generate a lockfile without install packages:
```bash
$ bun install --lockfile-only
```
To modify logging verbosity:
```bash
@@ -86,6 +99,9 @@ peer = true
# equivalent to `--production` flag
production = false
# equivalent to `--save-text-lockfile` flag
saveTextLockfile = false
# equivalent to `--frozen-lockfile` flag
frozenLockfile = false

View File

@@ -49,6 +49,18 @@ Packages, metadata for those packages, the hoisted install order, dependencies f
It uses linear arrays for all data. [Packages](https://github.com/oven-sh/bun/blob/be03fc273a487ac402f19ad897778d74b6d72963/src/install/install.zig#L1825) are referenced by an auto-incrementing integer ID or a hash of the package name. Strings longer than 8 characters are de-duplicated. Prior to saving on disk, the lockfile is garbage-collected & made deterministic by walking the package tree and cloning the packages in dependency order.
#### Generate a lockfile without installing?
To generate a lockfile without installing to `node_modules` you can use the `--lockfile-only` flag. The lockfile will always be saved to disk, even if it is up-to-date with the `package.json`(s) for your project.
```bash
$ bun install --lockfile-only
```
{% callout %}
**Note** - using `--lockfile-only` will still populate the global install cache with registry metadata and git/tarball dependencies.
{% endcallout %}
#### Can I opt out?
To install without creating a lockfile:
@@ -74,6 +86,24 @@ print = "yarn"
{% /codetabs %}
### Text-based lockfile
Bun v1.1.39 introduced `bun.lock`, a JSONC formatted lockfile. `bun.lock` is human-readable and git-diffable without configuration, at [no cost to performance](https://bun.sh/blog/bun-lock-text-lockfile#cached-bun-install-gets-30-faster).
To generate the lockfile, use `--save-text-lockfile` with `bun install`. You can do this for new projects and existing projects already using `bun.lockb` (resolutions will be preserved).
```bash
$ bun install --save-text-lockfile
$ head -n3 bun.lock
{
"lockfileVersion": 0,
"workspaces": {
```
Once `bun.lock` is generated, Bun will use it for all subsequent installs and updates through commands that read and modify the lockfile. If both lockfiles exist, `bun.lock` will be choosen over `bun.lockb`.
Bun v1.2.0 will switch the default lockfile format to `bun.lock`.
{% details summary="Configuring lockfile" %}
```toml

View File

@@ -6,7 +6,7 @@ Bun supports loading configuration options from [`.npmrc`](https://docs.npmjs.co
{% /callout %}
# Supported options
## Supported options
### `registry`: Set the default registry

View File

@@ -214,9 +214,9 @@ export default {
page("bundler", "`Bun.build`", {
description: "Bundle code for consumption in the browser with Bun's native bundler.",
}),
// page("bundler/intro", "How bundlers work", {
// description: "A visual introduction to bundling",
// }),
page("bundler/html", "HTML", {
description: `Bundle html files with Bun's native bundler.`,
}),
page("bundler/loaders", "Loaders", {
description: "Bun's built-in loaders for the bundler and runtime",
}),
@@ -226,6 +226,7 @@ export default {
page("bundler/macros", "Macros", {
description: `Run JavaScript functions at bundle-time and inline the results into your bundle`,
}),
page("bundler/vs-esbuild", "vs esbuild", {
description: `Guides for migrating from other bundlers to Bun.`,
}),
@@ -310,6 +311,9 @@ export default {
page("api/streams", "Streams", {
description: `Reading, writing, and manipulating streams of data in Bun.`,
}), // "`Bun.serve`"),
page("api/s3", "S3 Object Storage", {
description: `Bun provides fast, native bindings for interacting with S3-compatible object storage services.`,
}),
page("api/file-io", "File I/O", {
description: `Read and write files fast with Bun's heavily optimized file system API.`,
}), // "`Bun.write`"),

View File

@@ -61,7 +61,10 @@ This function declaration is equivalent to:
declare function add(a: number, b: number = 1): number;
```
The code generator will provide `bun.gen.math.jsAdd`, which is the native function implementation. To pass to JavaScript, use `bun.gen.math.createAddCallback(global)`
The code generator will provide `bun.gen.math.jsAdd`, which is the native
function implementation. To pass to JavaScript, use
`bun.gen.math.createAddCallback(global)`. JS files in `src/js/` may use
`$bindgenFn("math.bind.ts", "add")` to get a handle to the implementation.
## Strings
@@ -104,7 +107,7 @@ export const action = fn({
In Zig, each variant gets a number, based on the order the schema defines.
```
```zig
fn action1(a: i32) i32 {
return a;
}
@@ -180,9 +183,9 @@ export const add = fn({
// enforce in i32 range
a: t.i32.enforceRange(),
// clamp to u16 range
c: t.u16,
b: t.u16,
// enforce in arbitrary range, with a default if not provided
b: t.i32.enforceRange(0, 1000).default(5),
c: t.i32.enforceRange(0, 1000).default(5),
// clamp to arbitrary range, or null
d: t.u16.clamp(0, 10).optional,
},
@@ -190,6 +193,29 @@ export const add = fn({
});
```
Various Node.js validator functions such as `validateInteger`, `validateNumber`, and more are available. Use these when implementing Node.js APIs, so the error messages match 1:1 what Node would do.
Unlike `enforceRange`, which is taken from WebIDL, `validate*` functions are much more strict on the input they accept. For example, Node's numerical validator check `typeof value === 'number'`, while WebIDL uses `ToNumber` for lossy conversion.
```ts
import { t, fn } from "bindgen";
export const add = fn({
args: {
global: t.globalObject,
// throw if not given a number
a: t.f64.validateNumber(),
// valid in i32 range
a: t.i32.validateInt32(),
// f64 within safe integer range
b: t.f64.validateInteger(),
// f64 in given range
c: t.f64.validateNumber(-10000, 10000),
},
ret: t.i32,
});
```
## Callbacks
TODO

View File

@@ -238,6 +238,17 @@ By default Bun uses caret ranges; if the `latest` version of a package is `2.4.1
exact = false
```
### `install.saveTextLockfile`
Generate `bun.lock`, a human-readable text-based lockfile. Once generated, Bun will use this file instead of `bun.lockb`, choosing it over the binary lockfile if both are present.
Default `false`. In Bun v1.2.0 the default lockfile format will change to `bun.lock`.
```toml
[install]
saveTextLockfile = true
```
<!--
### `install.prefer`

View File

@@ -259,6 +259,7 @@ await Bun.build({
conditions: ["react-server"],
target: "bun",
entryPoints: ["./app/foo/route.js"],
throw: true,
});
```

View File

@@ -53,7 +53,7 @@ Some methods are not optimized yet.
### [`node:events`](https://nodejs.org/api/events.html)
🟡 `events.addAbortListener` & `events.getMaxListeners` do not support (web api) `EventTarget`
🟢 Fully implemented. `EventEmitterAsyncResource` uses `AsyncResource` underneath.
### [`node:fs`](https://nodejs.org/api/fs.html)
@@ -157,11 +157,11 @@ Some methods are not optimized yet.
### [`node:v8`](https://nodejs.org/api/v8.html)
🔴 `serialize` and `deserialize` use JavaScriptCore's wire format instead of V8's. Otherwise, not implemented. For profiling, use [`bun:jsc`](https://bun.sh/docs/project/benchmarking#bunjsc) instead.
🟡 `writeHeapSnapshot` and `getHeapSnapshot` are implemented. `serialize` and `deserialize` use JavaScriptCore's wire format instead of V8's. Other methods are not implemented. For profiling, use [`bun:jsc`](https://bun.sh/docs/project/benchmarking#bunjsc) instead.
### [`node:vm`](https://nodejs.org/api/vm.html)
🟡 Core functionality works, but experimental VM ES modules are not implemented, including `vm.Module`, `vm.SourceTextModule`, `vm.SyntheticModule`,`importModuleDynamically`, and `vm.measureMemory`. Options like `timeout`, `breakOnSigint`, `cachedData` are not implemented yet. There is a bug with `this` value for contextified options not having the correct prototype.
🟡 Core functionality works, but experimental VM ES modules are not implemented, including `vm.Module`, `vm.SourceTextModule`, `vm.SyntheticModule`,`importModuleDynamically`, and `vm.measureMemory`. Options like `timeout`, `breakOnSigint`, `cachedData` are not implemented yet.
### [`node:wasi`](https://nodejs.org/api/wasi.html)

View File

@@ -307,7 +307,7 @@ await import("my-object-virtual-module"); // { baz: "quix" }
Plugins can read and write to the [build config](https://bun.sh/docs/bundler#api) with `build.config`.
```ts
Bun.build({
await Bun.build({
entrypoints: ["./app.ts"],
outdir: "./dist",
sourcemap: "external",
@@ -324,6 +324,7 @@ Bun.build({
},
},
],
throw: true,
});
```
@@ -332,7 +333,7 @@ Bun.build({
**NOTE**: Plugin lifcycle callbacks (`onStart()`, `onResolve()`, etc.) do not have the ability to modify the `build.config` object in the `setup()` function. If you want to mutate `build.config`, you must do so directly in the `setup()` function:
```ts
Bun.build({
await Bun.build({
entrypoints: ["./app.ts"],
outdir: "./dist",
sourcemap: "external",
@@ -350,6 +351,7 @@ Bun.build({
},
},
],
throw: true,
});
```
@@ -553,150 +555,3 @@ plugin({
```
This plugin will transform all imports of the form `import env from "env"` into a JavaScript module that exports the current environment variables.
#### `.defer()`
One of the arguments passed to the `onLoad` callback is a `defer` function. This function returns a `Promise` that is resolved when all _other_ modules have been loaded.
This allows you to delay execution of the `onLoad` callback until all other modules have been loaded.
This is useful for returning contens of a module that depends on other modules.
##### Example: tracking and reporting unused exports
```ts
import { plugin } from "bun";
plugin({
name: "track imports",
setup(build) {
const transpiler = new Bun.Transpiler();
let trackedImports: Record<string, number> = {};
// Each module that goes through this onLoad callback
// will record its imports in `trackedImports`
build.onLoad({ filter: /\.ts/ }, async ({ path }) => {
const contents = await Bun.file(path).arrayBuffer();
const imports = transpiler.scanImports(contents);
for (const i of imports) {
trackedImports[i.path] = (trackedImports[i.path] || 0) + 1;
}
return undefined;
});
build.onLoad({ filter: /stats\.json/ }, async ({ defer }) => {
// Wait for all files to be loaded, ensuring
// that every file goes through the above `onLoad()` function
// and their imports tracked
await defer();
// Emit JSON containing the stats of each import
return {
contents: `export default ${JSON.stringify(trackedImports)}`,
loader: "json",
};
});
},
});
```
Note that the `.defer()` function currently has the limitation that it can only be called once per `onLoad` callback.
## Native plugins
{% callout %}
**NOTE** — This is an advanced and experiemental API recommended for plugin developers who are familiar with systems programming and the C ABI. Use with caution.
{% /callout %}
One of the reasons why Bun's bundler is so fast is that it is written in native code and leverages multi-threading to load and parse modules in parallel.
However, one limitation of plugins written in JavaScript is that JavaScript itself is single-threaded.
Native plugins are written as [NAPI](/docs/node-api) modules and can be run on multiple threads. This allows native plugins to run much faster than JavaScript plugins.
In addition, native plugins can skip unnecessary work such as the UTF-8 -> UTF-16 conversion needed to pass strings to JavaScript.
These are the following lifecycle hooks which are available to native plugins:
- [`onBeforeParse()`](#onbeforeparse): Called on any thread before a file is parsed by Bun's bundler.
### Creating a native plugin
Native plugins are NAPI modules which expose lifecycle hooks as C ABI functions.
To create a native plugin, you must export a C ABI function which matches the signature of the native lifecycle hook you want to implement.
#### Example: Rust with napi-rs
First initialize a napi project (see [here](https://napi.rs/docs/introduction/getting-started) for a more comprehensive guide).
Then install Bun's official safe plugin wrapper crate:
```bash
cargo add bun-native-plugin
```
Now you can export an `extern "C" fn` which is the implementation of your plugin:
```rust
#[no_mangle]
extern "C" fn on_before_parse_impl(
args: *const bun_native_plugin::sys::OnBeforeParseArguments,
result: *mut bun_native_plugin::sys::OnBeforeParseResult,
) {
let args = unsafe { &*args };
let result = unsafe { &mut *result };
let mut handle = match bun_native_plugin::OnBeforeParse::from_raw(args, result) {
Ok(handle) => handle,
Err(_) => {
return;
}
};
let source_code = match handle.input_source_code() {
Ok(source_code) => source_code,
Err(_) => {
handle.log_error("Fetching source code failed!");
return;
}
};
let loader = handle.output_loader();
handle.set_output_source_code(source_code.replace("foo", "bar"), loader);
```
Use napi-rs to compile the plugin to a `.node` file, then you can `require()` it from JS and use it:
```js
await Bun.build({
entrypoints: ["index.ts"],
setup(build) {
const myNativePlugin = require("./path/to/plugin.node");
build.onBeforeParse(
{ filter: /\.ts/ },
{ napiModule: myNativePlugin, symbol: "on_before_parse_impl" },
);
},
});
```
### `onBeforeParse`
```ts
onBeforeParse(
args: { filter: RegExp; namespace?: string },
callback: { napiModule: NapiModule; symbol: string; external?: unknown },
): void;
```
This lifecycle callback is run immediately before a file is parsed by Bun's bundler.
As input, it receives the file's contents and can optionally return new source code.
This callback can be called from any thread and so the napi module implementation must be thread-safe.

View File

@@ -14,5 +14,7 @@
<true/>
<key>com.apple.security.get-task-allow</key>
<true/>
<key>com.apple.security.cs.debugger</key>
<true/>
</dict>
</plist>
</plist>

View File

@@ -1,234 +0,0 @@
const std = @import("std");
const bun = @import("root").bun;
const string = bun.string;
const Output = bun.Output;
const Global = bun.Global;
const Environment = bun.Environment;
const strings = bun.strings;
const MutableString = bun.MutableString;
const stringZ = bun.stringZ;
const default_allocator = bun.default_allocator;
const C = bun.C;
pub usingnamespace @import("root").bun;
const clap = bun.clap;
const URL = @import("../src/url.zig").URL;
const Headers = bun.http.Headers;
const Method = @import("../src/http/method.zig").Method;
const ColonListType = @import("../src/cli/colon_list_type.zig").ColonListType;
const HeadersTuple = ColonListType(string, noop_resolver);
const path_handler = @import("../src/resolver/resolve_path.zig");
const HTTPThread = bun.http.HTTPThread;
const HTTP = bun.http;
fn noop_resolver(in: string) !string {
return in;
}
const VERSION = "0.0.0";
const params = [_]clap.Param(clap.Help){
clap.parseParam("-v, --verbose Show headers & status code") catch unreachable,
clap.parseParam("-H, --header <STR>... Add a header") catch unreachable,
clap.parseParam("-r, --max-redirects <STR> Maximum number of redirects to follow (default: 128)") catch unreachable,
clap.parseParam("-b, --body <STR> HTTP request body as a string") catch unreachable,
clap.parseParam("-f, --file <STR> File path to load as body") catch unreachable,
clap.parseParam("-q, --quiet Quiet mode") catch unreachable,
clap.parseParam("--no-gzip Disable gzip") catch unreachable,
clap.parseParam("--no-deflate Disable deflate") catch unreachable,
clap.parseParam("--no-compression Disable gzip & deflate") catch unreachable,
clap.parseParam("--version Print the version and exit") catch unreachable,
clap.parseParam("--turbo Skip sending TLS shutdown signals") catch unreachable,
clap.parseParam("<POS>... ") catch unreachable,
};
const MethodNames = std.ComptimeStringMap(Method, .{
.{ "GET", Method.GET },
.{ "get", Method.GET },
.{ "POST", Method.POST },
.{ "post", Method.POST },
.{ "PUT", Method.PUT },
.{ "put", Method.PUT },
.{ "PATCH", Method.PATCH },
.{ "patch", Method.PATCH },
.{ "OPTIONS", Method.OPTIONS },
.{ "options", Method.OPTIONS },
.{ "HEAD", Method.HEAD },
.{ "head", Method.HEAD },
});
var file_path_buf: bun.PathBuffer = undefined;
var cwd_buf: bun.PathBuffer = undefined;
pub const Arguments = struct {
url: URL,
method: Method,
verbose: bool = false,
headers: Headers.Entries,
headers_buf: string,
body: string = "",
turbo: bool = false,
quiet: bool = false,
pub fn parse(allocator: std.mem.Allocator) !Arguments {
var diag = clap.Diagnostic{};
var args = clap.parse(clap.Help, &params, .{
.diagnostic = &diag,
.allocator = allocator,
}) catch |err| {
// Report useful error and exit
diag.report(Output.errorWriter(), err) catch {};
return err;
};
var positionals = args.positionals();
var raw_args: std.ArrayListUnmanaged(string) = undefined;
if (positionals.len > 0) {
raw_args = .{ .capacity = positionals.len, .items = @as([*][]const u8, @ptrFromInt(@intFromPtr(positionals.ptr)))[0..positionals.len] };
} else {
raw_args = .{};
}
if (args.flag("--version")) {
try Output.writer().writeAll(VERSION);
Global.exit(0);
}
var method = Method.GET;
var url: URL = .{};
var body_string: string = args.option("--body") orelse "";
if (args.option("--file")) |file_path| {
if (file_path.len > 0) {
var cwd = try std.process.getCwd(&cwd_buf);
var parts = [_]string{file_path};
var absolute_path = path_handler.joinAbsStringBuf(cwd, &file_path_buf, &parts, .auto);
file_path_buf[absolute_path.len] = 0;
file_path_buf[absolute_path.len + 1] = 0;
var absolute_path_len = absolute_path.len;
var absolute_path_ = file_path_buf[0..absolute_path_len :0];
var body_file = std.fs.openFileAbsoluteZ(absolute_path_, .{ .mode = .read_only }) catch |err| {
Output.printErrorln("<r><red>{s}<r> opening file {s}", .{ @errorName(err), absolute_path });
Global.exit(1);
};
var file_contents = body_file.readToEndAlloc(allocator, try body_file.getEndPos()) catch |err| {
Output.printErrorln("<r><red>{s}<r> reading file {s}", .{ @errorName(err), absolute_path });
Global.exit(1);
};
body_string = file_contents;
}
}
{
var raw_arg_i: usize = 0;
while (raw_arg_i < raw_args.items.len) : (raw_arg_i += 1) {
const arg = raw_args.items[raw_arg_i];
if (MethodNames.get(arg[0..])) |method_| {
method = method_;
_ = raw_args.swapRemove(raw_arg_i);
}
}
if (raw_args.items.len == 0) {
Output.prettyErrorln("<r><red>error<r><d>:<r> <b>Missing URL<r>\n\nExample:\n<r><b>fetch GET https://example.com<r>\n\n<b>fetch example.com/foo<r>\n\n", .{});
Global.exit(1);
}
const url_position = raw_args.items.len - 1;
url = URL.parse(raw_args.swapRemove(url_position));
if (!url.isAbsolute()) {
Output.prettyErrorln("<r><red>error<r><d>:<r> <b>Invalid URL<r>\n\nExample:\n<r><b>fetch GET https://example.com<r>\n\n<b>fetch example.com/foo<r>\n\n", .{});
Global.exit(1);
}
}
return Arguments{
.url = url,
.method = method,
.verbose = args.flag("--verbose"),
.headers = .{},
.headers_buf = "",
.body = body_string,
.turbo = args.flag("--turbo"),
.quiet = args.flag("--quiet"),
};
}
};
pub fn main() anyerror!void {
var stdout_ = std.io.getStdOut();
var stderr_ = std.io.getStdErr();
var output_source = Output.Source.init(stdout_, stderr_);
Output.Source.set(&output_source);
defer Output.flush();
var args = try Arguments.parse(default_allocator);
var body_out_str = try MutableString.init(default_allocator, 1024);
var channel = try default_allocator.create(HTTP.HTTPChannel);
channel.* = HTTP.HTTPChannel.init();
var response_body_string = try default_allocator.create(MutableString);
response_body_string.* = body_out_str;
try channel.buffer.ensureTotalCapacity(1);
HTTPThread.init();
var ctx = try default_allocator.create(HTTP.HTTPChannelContext);
ctx.* = .{
.channel = channel,
.http = try HTTP.AsyncHTTP.init(
default_allocator,
args.method,
args.url,
args.headers,
args.headers_buf,
response_body_string,
args.body,
HTTP.FetchRedirect.follow,
),
};
ctx.http.callback = HTTP.HTTPChannelContext.callback;
var batch = HTTPThread.Batch{};
ctx.http.schedule(default_allocator, &batch);
ctx.http.client.verbose = args.verbose;
ctx.http.verbose = args.verbose;
HTTPThread.global.schedule(batch);
while (true) {
while (channel.tryReadItem() catch null) |http| {
var response = http.response orelse {
Output.prettyErrorln("<r><red>error<r><d>:<r> <b>HTTP response missing<r>", .{});
Global.exit(1);
};
switch (response.status_code) {
200, 302 => {},
else => {
if (args.verbose) {
Output.prettyErrorln("{}", .{response});
}
},
}
if (!args.quiet) {
Output.flush();
Output.disableBuffering();
try Output.writer().writeAll(response_body_string.list.items);
Output.enableBuffering();
}
return;
}
}
}

View File

@@ -1,172 +0,0 @@
// Thank you @evanw for this code!!!
const fs = require("fs");
const path = require("path");
// ES5 reference: https://es5.github.io/
//
// A conforming implementation of this International standard shall interpret
// characters in conformance with the Unicode Standard, Version 3.0 or later
// and ISO/IEC 10646-1 with either UCS-2 or UTF-16 as the adopted encoding
// form, implementation level 3. If the adopted ISO/IEC 10646-1 subset is not
// otherwise specified, it is presumed to be the BMP subset, collection 300.
//
// UnicodeLetter: any character in the Unicode categories “Uppercase letter (Lu)”,
// “Lowercase letter (Ll)”, “Titlecase letter (Lt)”, “Modifier letter (Lm)”,
// “Other letter (Lo)”, or “Letter number (Nl)”.
const idStartES5 = []
.concat(
require("@unicode/unicode-3.0.0/General_Category/Uppercase_Letter/code-points"),
require("@unicode/unicode-3.0.0/General_Category/Lowercase_Letter/code-points"),
require("@unicode/unicode-3.0.0/General_Category/Titlecase_Letter/code-points"),
require("@unicode/unicode-3.0.0/General_Category/Modifier_Letter/code-points"),
require("@unicode/unicode-3.0.0/General_Category/Other_Letter/code-points"),
// The "letter number" category is not included because old versions of Safari
// had a bug where they didn't include it. This means it does not match ES5.
// We need to make sure we escape these characters so Safari can read them.
// See https://github.com/evanw/esbuild/issues/1349 for more information.
// require('@unicode/unicode-3.0.0/General_Category/Letter_Number/code-points'),
)
.sort((a, b) => a - b);
// UnicodeCombiningMark: any character in the Unicode categories “Non-spacing mark (Mn)”
// or “Combining spacing mark (Mc)”
// UnicodeDigit: any character in the Unicode category “Decimal number (Nd)”
// UnicodeConnectorPunctuation: any character in the Unicode category “Connector punctuation (Pc)”
const idContinueES5 = idStartES5
.concat(
require("@unicode/unicode-3.0.0/General_Category/Nonspacing_Mark/code-points"),
require("@unicode/unicode-3.0.0/General_Category/Spacing_Mark/code-points"),
require("@unicode/unicode-3.0.0/General_Category/Decimal_Number/code-points"),
require("@unicode/unicode-3.0.0/General_Category/Connector_Punctuation/code-points"),
)
.sort((a, b) => a - b);
// ESNext reference: https://tc39.es/ecma262/
//
// A conforming implementation of ECMAScript must interpret source text input
// in conformance with the Unicode Standard, Version 5.1.0 or later and ISO/IEC
// 10646. If the adopted ISO/IEC 10646-1 subset is not otherwise specified, it
// is presumed to be the Unicode set, collection 10646.
//
// UnicodeIDStart: any Unicode code point with the Unicode property “ID_Start”
const idStartESNext = require("@unicode/unicode-13.0.0/Binary_Property/ID_Start/code-points");
const idStartESNextSet = new Set(idStartESNext);
// UnicodeIDContinue: any Unicode code point with the Unicode property “ID_Continue”
const idContinueESNext = require("@unicode/unicode-13.0.0/Binary_Property/ID_Continue/code-points");
const idContinueESNextSet = new Set(idContinueESNext);
// These identifiers are valid in both ES5 and ES6+ (i.e. an intersection of both)
const idStartES5AndESNext = idStartES5.filter(n => idStartESNextSet.has(n));
const idContinueES5AndESNext = idContinueES5.filter(n => idContinueESNextSet.has(n));
// These identifiers are valid in either ES5 or ES6+ (i.e. a union of both)
const idStartES5OrESNext = [...new Set(idStartES5.concat(idStartESNext))].sort((a, b) => a - b);
const idContinueES5OrESNext = [...new Set(idContinueES5.concat(idContinueESNext))].sort((a, b) => a - b);
function generateRangeTable(codePoints) {
let lines = [];
let index = 0;
let latinOffset = 0;
while (latinOffset < codePoints.length && codePoints[latinOffset] <= 0xff) {
latinOffset++;
}
lines.push(`RangeTable.init(`, ` ${latinOffset},`, ` &[_]R16Range{`);
// 16-bit code points
while (index < codePoints.length && codePoints[index] < 0x1000) {
let start = codePoints[index];
index++;
while (index < codePoints.length && codePoints[index] < 0x1000 && codePoints[index] === codePoints[index - 1] + 1) {
index++;
}
let end = codePoints[index - 1];
lines.push(` .{0x${start.toString(16)}, 0x${end.toString(16)}},`);
}
lines.push(` },`, `&[_]R32Range{`);
// 32-bit code points
while (index < codePoints.length) {
let start = codePoints[index];
index++;
while (index < codePoints.length && codePoints[index] === codePoints[index - 1] + 1) {
index++;
}
let end = codePoints[index - 1];
lines.push(` .{0x${start.toString(16)}, 0x${end.toString(16)}},`);
}
lines.push(` },`, `);`);
return lines.join("\n");
}
function generateBigSwitchStatement(codePoints) {
let lines = [];
let index = 0;
let latinOffset = 0;
while (latinOffset < codePoints.length && codePoints[latinOffset] <= 0xff) {
latinOffset++;
}
lines.push(`return switch(codepoint) {`);
// 16-bit code points
while (index < codePoints.length && codePoints[index] < 0x1000) {
let start = codePoints[index];
index++;
while (index < codePoints.length && codePoints[index] < 0x1000 && codePoints[index] === codePoints[index - 1] + 1) {
index++;
}
let end = codePoints[index - 1];
lines.push(`0x${start.toString(16)}...0x${end.toString(16)},`);
}
// 32-bit code points
while (index < codePoints.length) {
let start = codePoints[index];
index++;
while (index < codePoints.length && codePoints[index] === codePoints[index - 1] + 1) {
index++;
}
let end = codePoints[index - 1];
lines.push(` 0x${start.toString(16)}...0x${end.toString(16)},`);
}
lines.push(` => true,
else => false
};`);
return lines.join("\n");
}
fs.writeFileSync(
path.join(__dirname, "..", "src", "js_lexer", "unicode.zig"),
`// This file was automatically generated by ${path.basename(__filename)}. Do not edit.
const RangeTable = @import("./range_table.zig");
// ES5 || ESNext
pub const id_start = ${generateRangeTable(idStartES5OrESNext)}
// ES5 || ESNext
pub const id_continue = ${generateRangeTable(idContinueES5OrESNext)}
pub const printable_id_start = ${generateRangeTable(idStartESNext)}
pub const printable_id_continue = ${generateRangeTable(idContinueESNext)}
pub fn isIdentifierStart(comptime Codepoint: type, codepoint: Codepoint) bool{
${generateBigSwitchStatement(idStartES5OrESNext)}
}
pub fn isIdentifierContinue(comptime Codepoint: type, codepoint: Codepoint) bool{
${generateBigSwitchStatement(idContinueES5OrESNext)}
}
`,
);

View File

@@ -0,0 +1,108 @@
import { Generator, Context } from "./unicode-generator";
// Create sets for fast lookups
const idStartES5Set = new Set([
...require("@unicode/unicode-3.0.0/General_Category/Uppercase_Letter/code-points"),
...require("@unicode/unicode-3.0.0/General_Category/Lowercase_Letter/code-points"),
...require("@unicode/unicode-3.0.0/General_Category/Titlecase_Letter/code-points"),
...require("@unicode/unicode-3.0.0/General_Category/Modifier_Letter/code-points"),
...require("@unicode/unicode-3.0.0/General_Category/Other_Letter/code-points"),
]);
const idContinueES5Set = new Set([
...idStartES5Set,
...require("@unicode/unicode-3.0.0/General_Category/Nonspacing_Mark/code-points"),
...require("@unicode/unicode-3.0.0/General_Category/Spacing_Mark/code-points"),
...require("@unicode/unicode-3.0.0/General_Category/Decimal_Number/code-points"),
...require("@unicode/unicode-3.0.0/General_Category/Connector_Punctuation/code-points"),
]);
const idStartESNextSet = new Set(require("@unicode/unicode-15.1.0/Binary_Property/ID_Start/code-points"));
const idContinueESNextSet = new Set(require("@unicode/unicode-15.1.0/Binary_Property/ID_Continue/code-points"));
// Exclude known problematic codepoints
const ID_Continue_mistake = new Set([0x30fb, 0xff65]);
function bitsToU64Array(bits: number[]): bigint[] {
const result: bigint[] = [];
for (let i = 0; i < bits.length; i += 64) {
let value = 0n;
for (let j = 0; j < 64 && i + j < bits.length; j++) {
if (bits[i + j]) {
value |= 1n << BigInt(j);
}
}
result.push(value);
}
return result;
}
async function generateTable(table: string, name: string, checkFn: (cp: number) => boolean) {
const context: Context<boolean> = {
get: (cp: number) => checkFn(cp),
eql: (a: boolean, b: boolean) => a === b,
};
const generator = new Generator(context);
const tables = await generator.generate();
return `
pub fn ${name}(cp: u21) bool {
if (cp > 0x10FFFF) return false;
const high = cp >> 8;
const low = cp & 0xFF;
const stage2_idx = ${table}.stage1[high];
const bit_pos = stage2_idx + low;
const u64_idx = bit_pos >> 6;
const bit_idx = @as(u6, @intCast(bit_pos & 63));
return (${table}.stage2[u64_idx] & (@as(u64, 1) << bit_idx)) != 0;
}
const ${table} = struct {
pub const stage1 = [_]u16{${tables.stage1.join(",")}};
pub const stage2 = [_]u64{${bitsToU64Array(tables.stage2)
.map(n => n.toString())
.join(",")}};
};
`;
}
async function main() {
const functions = [
{
name: "isIDStartES5",
table: "idStartES5",
check: (cp: number) => idStartES5Set.has(cp),
},
{
name: "isIDContinueES5",
table: "idContinueES5",
check: (cp: number) => idContinueES5Set.has(cp),
},
{
name: "isIDStartESNext",
table: "idStartESNext",
check: (cp: number) => idStartESNextSet.has(cp),
},
{
name: "isIDContinueESNext",
table: "idContinueESNext",
check: (cp: number) => idContinueESNextSet.has(cp) && !ID_Continue_mistake.has(cp),
},
];
const results = await Promise.all(
functions.map(async ({ name, check, table }) => {
const code = await generateTable(table, name, check);
return `
/// ${name} checks if a codepoint is valid in the ${name} category
${code}`;
}),
);
console.log(`/// This file is auto-generated. Do not edit.
${results.join("\n\n")}`);
}
main();

View File

@@ -0,0 +1,231 @@
import * as fs from "fs";
import path from "path";
import { execSync } from "child_process";
interface LetterGroup {
offset: number;
length: number;
packages: string[];
}
// Read and parse input file
const content = fs.readFileSync(path.join(__dirname, "..", "src", "cli", "add_completions.txt"), "utf8");
const packages = content
.split("\n")
.map(line => line.trim())
.filter(line => line.length > 0)
.sort();
// Group packages by first letter
const letterGroups = new Map<string, LetterGroup>();
let currentOffset = 0;
let maxListSize = 0;
for (const pkg of packages) {
if (pkg.length === 0) continue;
const firstLetter = pkg[0].toLowerCase();
if (!letterGroups.has(firstLetter)) {
letterGroups.set(firstLetter, {
offset: currentOffset,
length: 0,
packages: [],
});
}
const group = letterGroups.get(firstLetter)!;
group.packages.push(pkg);
group.length++;
maxListSize = Math.max(maxListSize, group.length);
}
// Helper to ensure temp dir exists
const tmpDir = path.join(__dirname, "tmp");
if (!fs.existsSync(tmpDir)) {
fs.mkdirSync(tmpDir);
}
// Create a single buffer with all package data
const dataChunks: Buffer[] = [];
let totalUncompressed = 0;
// Store total package count first
const totalCountBuf = Buffer.alloc(4);
totalCountBuf.writeUInt32LE(packages.length, 0);
dataChunks.push(totalCountBuf);
totalUncompressed += 4;
// Then all packages with length prefixes
for (const pkg of packages) {
const lenBuf = Buffer.alloc(2);
lenBuf.writeUInt16LE(pkg.length, 0);
dataChunks.push(lenBuf);
dataChunks.push(Buffer.from(pkg, "utf8"));
totalUncompressed += 2 + pkg.length;
}
const uncompressedData = Buffer.concat(dataChunks);
// Write to temp file and compress with zstd
const uncompressedPath = path.join(tmpDir, "packages.bin");
const compressedPath = path.join(tmpDir, "packages.bin.zst");
fs.writeFileSync(uncompressedPath, uncompressedData);
execSync(`zstd -1 --rm -f "${uncompressedPath}" -o "${compressedPath}"`);
// Read back compressed data
const compressedData = fs.readFileSync(compressedPath);
fs.unlinkSync(compressedPath);
// Calculate compression ratio
const totalCompressed = compressedData.length;
const ratio = ((totalCompressed / totalUncompressed) * 100).toFixed(1);
console.log("\nCompression statistics:");
console.log(`Uncompressed size: ${totalUncompressed} bytes`);
console.log(`Compressed size: ${totalCompressed} bytes`);
console.log(`Compression ratio: ${ratio}%`);
// Generate Zig code
const chunks: string[] = [];
// Header with comments and imports
chunks.push(`// Auto-generated file. Do not edit.
// To regenerate this file, run:
//
// bun misctools/generate-add-completions.ts
//
// If you update add_completions.txt, then you should run this script again.
//
// This used to be a comptime block, but it made the build too slow.
// Compressing the completions list saves about 100 KB of binary size.
const std = @import("std");
const bun = @import("root").bun;
const zstd = bun.zstd;
const Environment = bun.Environment;
pub const FirstLetter = enum(u8) {
a = 'a',
b = 'b',
c = 'c',
d = 'd',
e = 'e',
f = 'f',
g = 'g',
h = 'h',
i = 'i',
j = 'j',
k = 'k',
l = 'l',
m = 'm',
n = 'n',
o = 'o',
p = 'p',
q = 'q',
r = 'r',
s = 's',
t = 't',
u = 'u',
v = 'v',
w = 'w',
x = 'x',
y = 'y',
z = 'z',
};`);
// Add the compressed data
chunks.push(`const compressed_data = [_]u8{${[...compressedData].join(",")}};`);
// Add uncompressed size constant
chunks.push(`const uncompressed_size: usize = ${totalUncompressed};`);
// Generate index entries
const indexEntries: string[] = [];
let offset = 0;
for (const letter of "abcdefghijklmnopqrstuvwxyz") {
const group = letterGroups.get(letter);
if (group) {
indexEntries.push(` .${letter} = .{ .offset = ${offset}, .length = ${group.length} }`);
offset += group.length;
} else {
indexEntries.push(` .${letter} = .{ .offset = ${offset}, .length = 0 }`);
}
}
// Generate index type and instance
chunks.push(`pub const IndexEntry = struct {
offset: usize,
length: usize,
};
pub const Index = std.EnumArray(FirstLetter, IndexEntry);
pub const index = Index.init(.{
${indexEntries.join(",\n")}
});`);
// Generate the decompression and access function
chunks.push(`var decompressed_data: ?[]u8 = null;
var packages_list: ?[][]const u8 = null;
pub fn init(allocator: std.mem.Allocator) !void {
// Decompress data
var data = try allocator.alloc(u8, uncompressed_size);
errdefer allocator.free(data);
const result = zstd.decompress(data, &compressed_data);
decompressed_data = data[0..result.success];
// Parse package list
const total_count = std.mem.readInt(u32, data[0..4], .little);
var packages = try allocator.alloc([]const u8, total_count);
errdefer allocator.free(packages);
var pos: usize = 4;
var i: usize = 0;
while (i < total_count) : (i += 1) {
const len = std.mem.readInt(u16, data[pos..][0..2], .little);
pos += 2;
packages[i] = data[pos..pos + len];
pos += len;
}
packages_list = packages;
}
pub fn deinit(allocator: std.mem.Allocator) void {
if (packages_list) |pkgs| {
allocator.free(pkgs);
packages_list = null;
}
if (decompressed_data) |data| {
allocator.free(data);
decompressed_data = null;
}
}
pub fn getPackages(letter: FirstLetter) []const []const u8 {
const entry = index.get(letter);
if (entry.length == 0) return &[_][]const u8{};
return packages_list.?[entry.offset..entry.offset + entry.length];
}`);
// Add biggest_list constant
chunks.push(`pub const biggest_list: usize = ${maxListSize};`);
// Write the output
let zigCode = chunks.join("\n\n");
zigCode = execSync("zig fmt --stdin", {
input: zigCode,
encoding: "utf8",
}).toString();
fs.writeFileSync(path.join(__dirname, "..", "src", "cli", "add_completions.zig"), zigCode);
// Clean up temp dir
try {
fs.rmdirSync(tmpDir);
} catch {}
console.log(`\nGenerated Zig completions for ${packages.length} packages`);

View File

@@ -0,0 +1,7 @@
process handle -p true -s false -n false SIGUSR1
command script import misctools/lldb/lldb_pretty_printers.py
type category enable zig.lang
type category enable zig.std
command script import misctools/lldb/lldb_webkit.py

View File

@@ -0,0 +1,733 @@
# https://github.com/ziglang/zig/blob/master/tools/lldb_pretty_printers.py
# pretty printing for the zig language, zig standard library, and zig stage 2 compiler.
# put commands in ~/.lldbinit to run them automatically when starting lldb
# `command script import /path/to/zig/tools/lldb_pretty_printers.py` to import this file
# `type category enable zig.lang` to enable pretty printing for the zig language
# `type category enable zig.std` to enable pretty printing for the zig standard library
# `type category enable zig.stage2` to enable pretty printing for the zig stage 2 compiler
import lldb
import re
page_size = 1 << 12
def log2_int(i): return i.bit_length() - 1
# Define Zig Language
zig_keywords = {
'addrspace',
'align',
'allowzero',
'and',
'anyframe',
'anytype',
'asm',
'async',
'await',
'break',
'callconv',
'catch',
'comptime',
'const',
'continue',
'defer',
'else',
'enum',
'errdefer',
'error',
'export',
'extern',
'fn',
'for',
'if',
'inline',
'noalias',
'noinline',
'nosuspend',
'opaque',
'or',
'orelse',
'packed',
'pub',
'resume',
'return',
'linksection',
'struct',
'suspend',
'switch',
'test',
'threadlocal',
'try',
'union',
'unreachable',
'usingnamespace',
'var',
'volatile',
'while',
}
zig_primitives = {
'anyerror',
'anyframe',
'anyopaque',
'bool',
'c_int',
'c_long',
'c_longdouble',
'c_longlong',
'c_short',
'c_uint',
'c_ulong',
'c_ulonglong',
'c_ushort',
'comptime_float',
'comptime_int',
'f128',
'f16',
'f32',
'f64',
'f80',
'false',
'isize',
'noreturn',
'null',
'true',
'type',
'undefined',
'usize',
'void',
}
zig_integer_type = re.compile('[iu][1-9][0-9]+')
zig_identifier_regex = re.compile('[A-Z_a-z][0-9A-Z_a-z]*')
def zig_IsVariableName(string): return string != '_' and string not in zig_keywords and string not in zig_primitives and not zig_integer_type.fullmatch(string) and zig_identifier_regex.fullmatch(string)
def zig_IsFieldName(string): return string not in zig_keywords and zig_identifier_regex.fullmatch(string)
class zig_Slice_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
try:
self.ptr = self.value.GetChildMemberWithName('ptr')
self.len = self.value.GetChildMemberWithName('len').unsigned if self.ptr.unsigned > page_size else 0
self.elem_type = self.ptr.type.GetPointeeType()
self.elem_size = self.elem_type.size
except: pass
def has_children(self): return True
def num_children(self): return self.len or 0
def get_child_index(self, name):
try: return int(name.removeprefix('[').removesuffix(']'))
except: return -1
def get_child_at_index(self, index):
if index not in range(self.len): return None
try: return self.ptr.CreateChildAtOffset('[%d]' % index, index * self.elem_size, self.elem_type)
except: return None
def zig_String_decode(value, offset=0, length=None):
try:
value = value.GetNonSyntheticValue()
data = value.GetChildMemberWithName('ptr').GetPointeeData(offset, length if length is not None else value.GetChildMemberWithName('len').unsigned)
b = bytes(data.uint8)
b = b.replace(b'\\', b'\\\\')
b = b.replace(b'\n', b'\\n')
b = b.replace(b'\r', b'\\r')
b = b.replace(b'\t', b'\\t')
b = b.replace(b'"', b'\\"')
b = b.replace(b'\'', b'\\\'')
s = b.decode(encoding='ascii', errors='backslashreplace')
return s if s.isprintable() else ''.join((c if c.isprintable() else '\\x%02x' % ord(c) for c in s))
except: return None
def zig_String_SummaryProvider(value, _=None): return '"%s"' % zig_String_decode(value)
def zig_String_AsIdentifier(value, pred):
string = zig_String_decode(value)
return string if pred(string) else '@"%s"' % string
class zig_Optional_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
try:
self.child = self.value.GetChildMemberWithName('some').unsigned == 1 and self.value.GetChildMemberWithName('data').Clone('child')
except: pass
def has_children(self): return bool(self.child)
def num_children(self): return int(self.child)
def get_child_index(self, name): return 0 if self.child and (name == 'child' or name == '?') else -1
def get_child_at_index(self, index): return self.child if self.child and index == 0 else None
def zig_Optional_SummaryProvider(value, _=None):
child = value.GetChildMemberWithName('child')
return child or 'null'
class zig_ErrorUnion_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
try:
self.error_set = self.value.GetChildMemberWithName('tag').Clone('error_set')
self.payload = self.value.GetChildMemberWithName('value').Clone('payload') if self.error_set.unsigned == 0 else None
except: pass
def has_children(self): return True
def num_children(self): return 1
def get_child_index(self, name): return 0 if name == ('payload' if self.payload else 'error_set') else -1
def get_child_at_index(self, index): return self.payload or self.error_set if index == 0 else None
class zig_TaggedUnion_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
try:
self.tag = self.value.GetChildMemberWithName('tag')
self.payload = self.value.GetChildMemberWithName('payload').GetChildMemberWithName(self.tag.value)
except: pass
def has_children(self): return True
def num_children(self): return 1 + (self.payload is not None)
def get_child_index(self, name):
try: return ('tag', 'payload').index(name)
except: return -1
def get_child_at_index(self, index): return (self.tag, self.payload)[index] if index in range(2) else None
# Define Zig Standard Library
class std_SegmentedList_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
try:
self.prealloc_segment = self.value.GetChildMemberWithName('prealloc_segment')
self.dynamic_segments = zig_Slice_SynthProvider(self.value.GetChildMemberWithName('dynamic_segments'))
self.dynamic_segments.update()
self.len = self.value.GetChildMemberWithName('len').unsigned
except: pass
def has_children(self): return True
def num_children(self): return self.len
def get_child_index(self, name):
try: return int(name.removeprefix('[').removesuffix(']'))
except: return -1
def get_child_at_index(self, index):
try:
if index not in range(self.len): return None
prealloc_item_count = len(self.prealloc_segment)
if index < prealloc_item_count: return self.prealloc_segment.child[index]
prealloc_exp = prealloc_item_count.bit_length() - 1
shelf_index = log2_int(index + 1) if prealloc_item_count == 0 else log2_int(index + prealloc_item_count) - prealloc_exp - 1
shelf = self.dynamic_segments.get_child_at_index(shelf_index)
box_index = (index + 1) - (1 << shelf_index) if prealloc_item_count == 0 else index + prealloc_item_count - (1 << ((prealloc_exp + 1) + shelf_index))
elem_type = shelf.type.GetPointeeType()
return shelf.CreateChildAtOffset('[%d]' % index, box_index * elem_type.size, elem_type)
except: return None
class std_MultiArrayList_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
try:
self.len = 0
value_type = self.value.type
for helper in self.value.target.FindFunctions('%s.dbHelper' % value_type.name, lldb.eFunctionNameTypeFull):
ptr_self_type, ptr_child_type, ptr_field_type, ptr_entry_type = helper.function.type.GetFunctionArgumentTypes()
if ptr_self_type.GetPointeeType() == value_type: break
else: return
self.entry_type = ptr_entry_type.GetPointeeType()
self.bytes = self.value.GetChildMemberWithName('bytes')
self.len = self.value.GetChildMemberWithName('len').unsigned
self.capacity = self.value.GetChildMemberWithName('capacity').unsigned
except: pass
def has_children(self): return True
def num_children(self): return self.len
def get_child_index(self, name):
try: return int(name.removeprefix('[').removesuffix(']'))
except: return -1
def get_child_at_index(self, index):
try:
if index not in range(self.len): return None
offset = 0
data = lldb.SBData()
for field in self.entry_type.fields:
field_type = field.type.GetPointeeType()
field_size = field_type.size
data.Append(self.bytes.CreateChildAtOffset(field.name, offset + index * field_size, field_type).address_of.data)
offset += self.capacity * field_size
return self.bytes.CreateValueFromData('[%d]' % index, data, self.entry_type)
except: return None
class std_MultiArrayList_Slice_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
try:
self.len = 0
value_type = self.value.type
for helper in self.value.target.FindFunctions('%s.dbHelper' % value_type.name, lldb.eFunctionNameTypeFull):
ptr_self_type, ptr_child_type, ptr_field_type, ptr_entry_type = helper.function.type.GetFunctionArgumentTypes()
if ptr_self_type.GetPointeeType() == value_type: break
else: return
self.fields = {member.name: index for index, member in enumerate(ptr_field_type.GetPointeeType().enum_members)}
self.entry_type = ptr_entry_type.GetPointeeType()
self.ptrs = self.value.GetChildMemberWithName('ptrs')
self.len = self.value.GetChildMemberWithName('len').unsigned
self.capacity = self.value.GetChildMemberWithName('capacity').unsigned
except: pass
def has_children(self): return True
def num_children(self): return self.len
def get_child_index(self, name):
try: return int(name.removeprefix('[').removesuffix(']'))
except: return -1
def get_child_at_index(self, index):
try:
if index not in range(self.len): return None
data = lldb.SBData()
for field in self.entry_type.fields:
field_type = field.type.GetPointeeType()
data.Append(self.ptrs.child[self.fields[field.name.removesuffix('_ptr')]].CreateChildAtOffset(field.name, index * field_type.size, field_type).address_of.data)
return self.ptrs.CreateValueFromData('[%d]' % index, data, self.entry_type)
except: return None
class std_HashMapUnmanaged_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
try:
self.capacity = 0
self.indices = tuple()
self.metadata = self.value.GetChildMemberWithName('metadata')
if not self.metadata.unsigned: return
value_type = self.value.type
for helper in self.value.target.FindFunctions('%s.dbHelper' % value_type.name, lldb.eFunctionNameTypeFull):
ptr_self_type, ptr_hdr_type, ptr_entry_type = helper.function.type.GetFunctionArgumentTypes()
if ptr_self_type.GetPointeeType() == value_type: break
else: return
self.entry_type = ptr_entry_type.GetPointeeType()
hdr_type = ptr_hdr_type.GetPointeeType()
hdr = self.metadata.CreateValueFromAddress('header', self.metadata.deref.load_addr - hdr_type.size, hdr_type)
self.values = hdr.GetChildMemberWithName('values')
self.keys = hdr.GetChildMemberWithName('keys')
self.capacity = hdr.GetChildMemberWithName('capacity').unsigned
self.indices = tuple(i for i, value in enumerate(self.metadata.GetPointeeData(0, self.capacity).sint8) if value < 0)
except: pass
def has_children(self): return True
def num_children(self): return len(self.indices)
def get_capacity(self): return self.capacity
def get_child_index(self, name):
try: return int(name.removeprefix('[').removesuffix(']'))
except: return -1
def get_child_at_index(self, index):
try:
fields = {name: base.CreateChildAtOffset(name, self.indices[index] * pointee_type.size, pointee_type).address_of.data for name, base, pointee_type in ((name, base, base.type.GetPointeeType()) for name, base in (('key_ptr', self.keys), ('value_ptr', self.values)))}
data = lldb.SBData()
for field in self.entry_type.fields: data.Append(fields[field.name])
return self.metadata.CreateValueFromData('[%d]' % index, data, self.entry_type)
except: return None
def std_HashMapUnmanaged_SummaryProvider(value, _=None):
synth = std_HashMapUnmanaged_SynthProvider(value.GetNonSyntheticValue(), _)
synth.update()
return 'len=%d capacity=%d' % (synth.num_children(), synth.get_capacity())
# formats a struct of fields of the form `name_ptr: *Type` by auto dereferencing its fields
class std_Entry_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
try:
self.children = tuple(child.Clone(child.name.removesuffix('_ptr')) for child in self.value.children if child.type.GetPointeeType().size != 0)
self.indices = {child.name: i for i, child in enumerate(self.children)}
except: pass
def has_children(self): return self.num_children() != 0
def num_children(self): return len(self.children)
def get_child_index(self, name): return self.indices.get(name)
def get_child_at_index(self, index): return self.children[index].deref if index in range(len(self.children)) else None
# Define Zig Stage2 Compiler
class TagAndPayload_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
try:
self.tag = self.value.GetChildMemberWithName('tag') or self.value.GetChildMemberWithName('tag_ptr').deref.Clone('tag')
data = self.value.GetChildMemberWithName('data_ptr') or self.value.GetChildMemberWithName('data')
self.payload = data.GetChildMemberWithName('payload').GetChildMemberWithName(data.GetChildMemberWithName('tag').value)
except: pass
def has_children(self): return True
def num_children(self): return 2
def get_child_index(self, name):
try: return ('tag', 'payload').index(name)
except: return -1
def get_child_at_index(self, index): return (self.tag, self.payload)[index] if index in range(2) else None
def InstRef_SummaryProvider(value, _=None):
return value if any(value.unsigned == member.unsigned for member in value.type.enum_members) else (
'InternPool.Index(%d)' % value.unsigned if value.unsigned < 0x80000000 else 'instructions[%d]' % (value.unsigned - 0x80000000))
def InstIndex_SummaryProvider(value, _=None):
return 'instructions[%d]' % value.unsigned
class zig_DeclIndex_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
try:
ip = InternPool_Find(self.value.thread)
if not ip: return
self.ptr = ip.GetChildMemberWithName('allocated_decls').GetChildAtIndex(self.value.unsigned).address_of.Clone('decl')
except: pass
def has_children(self): return True
def num_children(self): return 1
def get_child_index(self, name): return 0 if name == 'decl' else -1
def get_child_at_index(self, index): return self.ptr if index == 0 else None
class Module_Namespace__Module_Namespace_Index_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
try:
ip = InternPool_Find(self.value.thread)
if not ip: return
self.ptr = ip.GetChildMemberWithName('allocated_namespaces').GetChildAtIndex(self.value.unsigned).address_of.Clone('namespace')
except: pass
def has_children(self): return True
def num_children(self): return 1
def get_child_index(self, name): return 0 if name == 'namespace' else -1
def get_child_at_index(self, index): return self.ptr if index == 0 else None
class TagOrPayloadPtr_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
try:
value_type = self.value.type
for helper in self.value.target.FindFunctions('%s.dbHelper' % value_type.name, lldb.eFunctionNameTypeFull):
ptr_self_type, ptr_tag_to_payload_map_type = helper.function.type.GetFunctionArgumentTypes()
self_type = ptr_self_type.GetPointeeType()
if self_type == value_type: break
else: return
tag_to_payload_map = {field.name: field.type for field in ptr_tag_to_payload_map_type.GetPointeeType().fields}
tag = self.value.GetChildMemberWithName('tag_if_small_enough')
if tag.unsigned < page_size:
self.tag = tag.Clone('tag')
self.payload = None
else:
ptr_otherwise = self.value.GetChildMemberWithName('ptr_otherwise')
self.tag = ptr_otherwise.GetChildMemberWithName('tag')
self.payload = ptr_otherwise.Cast(tag_to_payload_map[self.tag.value]).GetChildMemberWithName('data').Clone('payload')
except: pass
def has_children(self): return True
def num_children(self): return 1 + (self.payload is not None)
def get_child_index(self, name):
try: return ('tag', 'payload').index(name)
except: return -1
def get_child_at_index(self, index): return (self.tag, self.payload)[index] if index in range(2) else None
def Module_Decl_name(decl):
error = lldb.SBError()
return decl.process.ReadCStringFromMemory(decl.GetChildMemberWithName('name').deref.load_addr, 256, error)
def Module_Namespace_RenderFullyQualifiedName(namespace):
parent = namespace.GetChildMemberWithName('parent')
if parent.unsigned < page_size: return zig_String_decode(namespace.GetChildMemberWithName('file_scope').GetChildMemberWithName('sub_file_path')).removesuffix('.zig').replace('/', '.')
return '.'.join((Module_Namespace_RenderFullyQualifiedName(parent), Module_Decl_name(namespace.GetChildMemberWithName('ty').GetChildMemberWithName('payload').GetChildMemberWithName('owner_decl').GetChildMemberWithName('decl'))))
def Module_Decl_RenderFullyQualifiedName(decl): return '.'.join((Module_Namespace_RenderFullyQualifiedName(decl.GetChildMemberWithName('src_namespace')), Module_Decl_name(decl)))
def OwnerDecl_RenderFullyQualifiedName(payload): return Module_Decl_RenderFullyQualifiedName(payload.GetChildMemberWithName('owner_decl').GetChildMemberWithName('decl'))
def InternPool_Find(thread):
for frame in thread:
ip = frame.FindVariable('ip') or frame.FindVariable('intern_pool')
if ip: return ip
mod = frame.FindVariable('zcu') or frame.FindVariable('mod') or frame.FindVariable('module')
if mod:
ip = mod.GetChildMemberWithName('intern_pool')
if ip: return ip
class InternPool_Index_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
try:
index_type = self.value.type
for helper in self.value.target.FindFunctions('%s.dbHelper' % index_type.name, lldb.eFunctionNameTypeFull):
ptr_self_type, ptr_tag_to_encoding_map_type = helper.function.type.GetFunctionArgumentTypes()
if ptr_self_type.GetPointeeType() == index_type: break
else: return
tag_to_encoding_map = {field.name: field.type for field in ptr_tag_to_encoding_map_type.GetPointeeType().fields}
ip = InternPool_Find(self.value.thread)
if not ip: return
self.item = ip.GetChildMemberWithName('items').GetChildAtIndex(self.value.unsigned)
extra = ip.GetChildMemberWithName('extra').GetChildMemberWithName('items')
self.tag = self.item.GetChildMemberWithName('tag').Clone('tag')
self.data = None
self.trailing = None
data = self.item.GetChildMemberWithName('data')
encoding_type = tag_to_encoding_map[self.tag.value]
dynamic_values = {}
for encoding_field in encoding_type.fields:
if encoding_field.name == 'data':
if encoding_field.type.IsPointerType():
extra_index = data.unsigned
self.data = extra.GetChildAtIndex(extra_index).address_of.Cast(encoding_field.type).deref.Clone('data')
extra_index += encoding_field.type.GetPointeeType().num_fields
else:
self.data = data.Cast(encoding_field.type).Clone('data')
elif encoding_field.name == 'trailing':
trailing_data = lldb.SBData()
for trailing_field in encoding_field.type.fields:
trailing_data.Append(extra.GetChildAtIndex(extra_index).address_of.data)
trailing_len = dynamic_values['trailing.%s.len' % trailing_field.name].unsigned
trailing_data.Append(lldb.SBData.CreateDataFromInt(trailing_len, trailing_data.GetAddressByteSize()))
extra_index += trailing_len
self.trailing = self.data.CreateValueFromData('trailing', trailing_data, encoding_field.type)
else:
for path in encoding_field.type.GetPointeeType().name.removeprefix('%s::' % encoding_type.name).removeprefix('%s.' % encoding_type.name).partition('__')[0].split(' orelse '):
if path.startswith('data.'):
root = self.data
path = path[len('data'):]
else: return
dynamic_value = root.GetValueForExpressionPath(path)
if dynamic_value:
dynamic_values[encoding_field.name] = dynamic_value
break
except: pass
def has_children(self): return True
def num_children(self): return 2 + (self.trailing is not None)
def get_child_index(self, name):
try: return ('tag', 'data', 'trailing').index(name)
except: return -1
def get_child_at_index(self, index): return (self.tag, self.data, self.trailing)[index] if index in range(3) else None
def InternPool_NullTerminatedString_SummaryProvider(value, _=None):
try:
ip = InternPool_Find(value.thread)
if not ip: return
items = ip.GetChildMemberWithName('string_bytes').GetChildMemberWithName('items')
b = bytearray()
i = 0
while True:
x = items.GetChildAtIndex(value.unsigned + i).GetValueAsUnsigned()
if x == 0: break
b.append(x)
i += 1
s = b.decode(encoding='utf8', errors='backslashreplace')
s1 = s if s.isprintable() else ''.join((c if c.isprintable() else '\\x%02x' % ord(c) for c in s))
return '"%s"' % s1
except:
pass
def type_Type_pointer(payload):
pointee_type = payload.GetChildMemberWithName('pointee_type')
sentinel = payload.GetChildMemberWithName('sentinel').GetChildMemberWithName('child')
align = payload.GetChildMemberWithName('align').unsigned
addrspace = payload.GetChildMemberWithName('addrspace').value
bit_offset = payload.GetChildMemberWithName('bit_offset').unsigned
host_size = payload.GetChildMemberWithName('host_size').unsigned
vector_index = payload.GetChildMemberWithName('vector_index')
allowzero = payload.GetChildMemberWithName('allowzero').unsigned
const = not payload.GetChildMemberWithName('mutable').unsigned
volatile = payload.GetChildMemberWithName('volatile').unsigned
size = payload.GetChildMemberWithName('size').value
if size == 'One': summary = '*'
elif size == 'Many': summary = '[*'
elif size == 'Slice': summary = '['
elif size == 'C': summary = '[*c'
if sentinel: summary += ':%s' % value_Value_SummaryProvider(sentinel)
if size != 'One': summary += ']'
if allowzero: summary += 'allowzero '
if align != 0 or host_size != 0 or vector_index.value != 'none': summary += 'align(%d%s%s) ' % (align, ':%d:%d' % (bit_offset, host_size) if bit_offset != 0 or host_size != 0 else '', ':?' if vector_index.value == 'runtime' else ':%d' % vector_index.unsigned if vector_index.value != 'none' else '')
if addrspace != 'generic': summary += 'addrspace(.%s) ' % addrspace
if const: summary += 'const '
if volatile: summary += 'volatile '
summary += type_Type_SummaryProvider(pointee_type)
return summary
def type_Type_function(payload):
param_types = payload.GetChildMemberWithName('param_types').children
comptime_params = payload.GetChildMemberWithName('comptime_params').GetPointeeData(0, len(param_types)).uint8
return_type = payload.GetChildMemberWithName('return_type')
alignment = payload.GetChildMemberWithName('alignment').unsigned
noalias_bits = payload.GetChildMemberWithName('noalias_bits').unsigned
cc = payload.GetChildMemberWithName('cc').value
is_var_args = payload.GetChildMemberWithName('is_var_args').unsigned
return 'fn(%s)%s%s %s' % (', '.join(tuple(''.join(('comptime ' if comptime_param else '', 'noalias ' if noalias_bits & 1 << i else '', type_Type_SummaryProvider(param_type))) for i, (comptime_param, param_type) in enumerate(zip(comptime_params, param_types))) + (('...',) if is_var_args else ())), ' align(%d)' % alignment if alignment != 0 else '', ' callconv(.%s)' % cc if cc != 'Unspecified' else '', type_Type_SummaryProvider(return_type))
def type_Type_SummaryProvider(value, _=None):
tag = value.GetChildMemberWithName('tag').value
return type_tag_handlers.get(tag, lambda payload: tag)(value.GetChildMemberWithName('payload'))
type_tag_handlers = {
'atomic_order': lambda payload: 'std.builtin.AtomicOrder',
'atomic_rmw_op': lambda payload: 'std.builtin.AtomicRmwOp',
'calling_convention': lambda payload: 'std.builtin.CallingConvention',
'address_space': lambda payload: 'std.builtin.AddressSpace',
'float_mode': lambda payload: 'std.builtin.FloatMode',
'reduce_op': lambda payload: 'std.builtin.ReduceOp',
'modifier': lambda payload: 'std.builtin.CallModifier',
'prefetch_options': lambda payload: 'std.builtin.PrefetchOptions',
'export_options': lambda payload: 'std.builtin.ExportOptions',
'extern_options': lambda payload: 'std.builtin.ExternOptions',
'type_info': lambda payload: 'std.builtin.Type',
'enum_literal': lambda payload: '@TypeOf(.enum_literal)',
'null': lambda payload: '@TypeOf(null)',
'undefined': lambda payload: '@TypeOf(undefined)',
'empty_struct_literal': lambda payload: '@TypeOf(.{})',
'anyerror_void_error_union': lambda payload: 'anyerror!void',
'slice_const_u8': lambda payload: '[]const u8',
'slice_const_u8_sentinel_0': lambda payload: '[:0]const u8',
'fn_noreturn_no_args': lambda payload: 'fn() noreturn',
'fn_void_no_args': lambda payload: 'fn() void',
'fn_naked_noreturn_no_args': lambda payload: 'fn() callconv(.Naked) noreturn',
'fn_ccc_void_no_args': lambda payload: 'fn() callconv(.C) void',
'single_const_pointer_to_comptime_int': lambda payload: '*const comptime_int',
'manyptr_u8': lambda payload: '[*]u8',
'manyptr_const_u8': lambda payload: '[*]const u8',
'manyptr_const_u8_sentinel_0': lambda payload: '[*:0]const u8',
'function': type_Type_function,
'error_union': lambda payload: '%s!%s' % (type_Type_SummaryProvider(payload.GetChildMemberWithName('error_set')), type_Type_SummaryProvider(payload.GetChildMemberWithName('payload'))),
'array_u8': lambda payload: '[%d]u8' % payload.unsigned,
'array_u8_sentinel_0': lambda payload: '[%d:0]u8' % payload.unsigned,
'vector': lambda payload: '@Vector(%d, %s)' % (payload.GetChildMemberWithName('len').unsigned, type_Type_SummaryProvider(payload.GetChildMemberWithName('elem_type'))),
'array': lambda payload: '[%d]%s' % (payload.GetChildMemberWithName('len').unsigned, type_Type_SummaryProvider(payload.GetChildMemberWithName('elem_type'))),
'array_sentinel': lambda payload: '[%d:%s]%s' % (payload.GetChildMemberWithName('len').unsigned, value_Value_SummaryProvider(payload.GetChildMemberWithName('sentinel')), type_Type_SummaryProvider(payload.GetChildMemberWithName('elem_type'))),
'tuple': lambda payload: 'tuple{%s}' % ', '.join(('comptime %%s = %s' % value_Value_SummaryProvider(value) if value.GetChildMemberWithName('tag').value != 'unreachable_value' else '%s') % type_Type_SummaryProvider(type) for type, value in zip(payload.GetChildMemberWithName('types').children, payload.GetChildMemberWithName('values').children)),
'anon_struct': lambda payload: 'struct{%s}' % ', '.join(('comptime %%s: %%s = %s' % value_Value_SummaryProvider(value) if value.GetChildMemberWithName('tag').value != 'unreachable_value' else '%s: %s') % (zig_String_AsIdentifier(name, zig_IsFieldName), type_Type_SummaryProvider(type)) for name, type, value in zip(payload.GetChildMemberWithName('names').children, payload.GetChildMemberWithName('types').children, payload.GetChildMemberWithName('values').children)),
'pointer': type_Type_pointer,
'single_const_pointer': lambda payload: '*const %s' % type_Type_SummaryProvider(payload),
'single_mut_pointer': lambda payload: '*%s' % type_Type_SummaryProvider(payload),
'many_const_pointer': lambda payload: '[*]const %s' % type_Type_SummaryProvider(payload),
'many_mut_pointer': lambda payload: '[*]%s' % type_Type_SummaryProvider(payload),
'c_const_pointer': lambda payload: '[*c]const %s' % type_Type_SummaryProvider(payload),
'c_mut_pointer': lambda payload: '[*c]%s' % type_Type_SummaryProvider(payload),
'slice_const': lambda payload: '[]const %s' % type_Type_SummaryProvider(payload),
'mut_slice': lambda payload: '[]%s' % type_Type_SummaryProvider(payload),
'int_signed': lambda payload: 'i%d' % payload.unsigned,
'int_unsigned': lambda payload: 'u%d' % payload.unsigned,
'optional': lambda payload: '?%s' % type_Type_SummaryProvider(payload),
'optional_single_mut_pointer': lambda payload: '?*%s' % type_Type_SummaryProvider(payload),
'optional_single_const_pointer': lambda payload: '?*const %s' % type_Type_SummaryProvider(payload),
'anyframe_T': lambda payload: 'anyframe->%s' % type_Type_SummaryProvider(payload),
'error_set': lambda payload: type_tag_handlers['error_set_merged'](payload.GetChildMemberWithName('names')),
'error_set_single': lambda payload: 'error{%s}' % zig_String_AsIdentifier(payload, zig_IsFieldName),
'error_set_merged': lambda payload: 'error{%s}' % ','.join(zig_String_AsIdentifier(child.GetChildMemberWithName('key'), zig_IsFieldName) for child in payload.GetChildMemberWithName('entries').children),
'error_set_inferred': lambda payload: '@typeInfo(@typeInfo(@TypeOf(%s)).@"fn".return_type.?).error_union.error_set' % OwnerDecl_RenderFullyQualifiedName(payload.GetChildMemberWithName('func')),
'enum_full': OwnerDecl_RenderFullyQualifiedName,
'enum_nonexhaustive': OwnerDecl_RenderFullyQualifiedName,
'enum_numbered': OwnerDecl_RenderFullyQualifiedName,
'enum_simple': OwnerDecl_RenderFullyQualifiedName,
'struct': OwnerDecl_RenderFullyQualifiedName,
'union': OwnerDecl_RenderFullyQualifiedName,
'union_safety_tagged': OwnerDecl_RenderFullyQualifiedName,
'union_tagged': OwnerDecl_RenderFullyQualifiedName,
'opaque': OwnerDecl_RenderFullyQualifiedName,
}
def value_Value_str_lit(payload):
for frame in payload.thread:
mod = frame.FindVariable('zcu') or frame.FindVariable('mod') or frame.FindVariable('module')
if mod: break
else: return
return '"%s"' % zig_String_decode(mod.GetChildMemberWithName('string_literal_bytes').GetChildMemberWithName('items'), payload.GetChildMemberWithName('index').unsigned, payload.GetChildMemberWithName('len').unsigned)
def value_Value_SummaryProvider(value, _=None):
tag = value.GetChildMemberWithName('tag').value
return value_tag_handlers.get(tag, lambda payload: tag.removesuffix('_type'))(value.GetChildMemberWithName('payload'))
value_tag_handlers = {
'undef': lambda payload: 'undefined',
'zero': lambda payload: '0',
'one': lambda payload: '1',
'void_value': lambda payload: '{}',
'unreachable_value': lambda payload: 'unreachable',
'null_value': lambda payload: 'null',
'bool_true': lambda payload: 'true',
'bool_false': lambda payload: 'false',
'empty_struct_value': lambda payload: '.{}',
'empty_array': lambda payload: '.{}',
'ty': type_Type_SummaryProvider,
'int_type': lambda payload: '%c%d' % (payload.GetChildMemberWithName('bits').unsigned, 's' if payload.GetChildMemberWithName('signed').unsigned == 1 else 'u'),
'int_u64': lambda payload: '%d' % payload.unsigned,
'int_i64': lambda payload: '%d' % payload.signed,
'int_big_positive': lambda payload: sum(child.unsigned << i * child.type.size * 8 for i, child in enumerate(payload.children)),
'int_big_negative': lambda payload: '-%s' % value_tag_handlers['int_big_positive'](payload),
'function': OwnerDecl_RenderFullyQualifiedName,
'extern_fn': OwnerDecl_RenderFullyQualifiedName,
'variable': lambda payload: value_Value_SummaryProvider(payload.GetChildMemberWithName('decl').GetChildMemberWithName('val')),
'runtime_value': value_Value_SummaryProvider,
'decl_ref': lambda payload: value_Value_SummaryProvider(payload.GetChildMemberWithName('decl').GetChildMemberWithName('val')),
'decl_ref_mut': lambda payload: value_Value_SummaryProvider(payload.GetChildMemberWithName('decl_index').GetChildMemberWithName('decl').GetChildMemberWithName('val')),
'comptime_field_ptr': lambda payload: '&%s' % value_Value_SummaryProvider(payload.GetChildMemberWithName('field_val')),
'elem_ptr': lambda payload: '(%s)[%d]' % (value_Value_SummaryProvider(payload.GetChildMemberWithName('array_ptr')), payload.GetChildMemberWithName('index').unsigned),
'field_ptr': lambda payload: '(%s).field[%d]' % (value_Value_SummaryProvider(payload.GetChildMemberWithName('container_ptr')), payload.GetChildMemberWithName('field_index').unsigned),
'bytes': lambda payload: '"%s"' % zig_String_decode(payload),
'str_lit': value_Value_str_lit,
'repeated': lambda payload: '.{%s} ** _' % value_Value_SummaryProvider(payload),
'empty_array_sentinel': lambda payload: '.{%s}' % value_Value_SummaryProvider(payload),
'slice': lambda payload: '(%s)[0..%s]' % tuple(value_Value_SummaryProvider(payload.GetChildMemberWithName(name)) for name in ('ptr', 'len')),
'float_16': lambda payload: payload.value,
'float_32': lambda payload: payload.value,
'float_64': lambda payload: payload.value,
'float_80': lambda payload: payload.value,
'float_128': lambda payload: payload.value,
'enum_literal': lambda payload: '.%s' % zig_String_AsIdentifier(payload, zig_IsFieldName),
'enum_field_index': lambda payload: 'field[%d]' % payload.unsigned,
'error': lambda payload: 'error.%s' % zig_String_AsIdentifier(payload.GetChildMemberWithName('name'), zig_IsFieldName),
'eu_payload': value_Value_SummaryProvider,
'eu_payload_ptr': lambda payload: '&((%s).* catch unreachable)' % value_Value_SummaryProvider(payload.GetChildMemberWithName('container_ptr')),
'opt_payload': value_Value_SummaryProvider,
'opt_payload_ptr': lambda payload: '&(%s).*.?' % value_Value_SummaryProvider(payload.GetChildMemberWithName('container_ptr')),
'aggregate': lambda payload: '.{%s}' % ', '.join(map(value_Value_SummaryProvider, payload.children)),
'union': lambda payload: '.{.%s = %s}' % tuple(value_Value_SummaryProvider(payload.GetChildMemberWithName(name)) for name in ('tag', 'val')),
'lazy_align': lambda payload: '@alignOf(%s)' % type_Type_SummaryProvider(payload),
'lazy_size': lambda payload: '@sizeOf(%s)' % type_Type_SummaryProvider(payload),
}
# Initialize
def add(debugger, *, category, regex=False, type, identifier=None, synth=False, inline_children=False, expand=False, summary=False):
prefix = '.'.join((__name__, (identifier or type).replace('.', '_').replace(':', '_')))
if summary: debugger.HandleCommand('type summary add --category %s%s%s "%s"' % (category, ' --inline-children' if inline_children else ''.join((' --expand' if expand else '', ' --python-function %s_SummaryProvider' % prefix if summary == True else ' --summary-string "%s"' % summary)), ' --regex' if regex else '', type))
if synth: debugger.HandleCommand('type synthetic add --category %s%s --python-class %s_SynthProvider "%s"' % (category, ' --regex' if regex else '', prefix, type))
def MultiArrayList_Entry(type): return '^multi_array_list\\.MultiArrayList\\(%s\\)\\.Entry__struct_[1-9][0-9]*$' % type
def __lldb_init_module(debugger, _=None):
# Initialize Zig Categories
debugger.HandleCommand('type category define --language c99 zig.lang zig.std')
# Initialize Zig Language
add(debugger, category='zig.lang', regex=True, type='^\\[\\]', identifier='zig_Slice', synth=True, expand=True, summary='len=${svar%#}')
add(debugger, category='zig.lang', type='[]u8', identifier='zig_String', summary=True)
add(debugger, category='zig.lang', regex=True, type='^\\?', identifier='zig_Optional', synth=True, summary=True)
add(debugger, category='zig.lang', regex=True, type='^(error{.*}|anyerror)!', identifier='zig_ErrorUnion', synth=True, inline_children=True, summary=True)
# Initialize Zig Standard Library
add(debugger, category='zig.std', type='mem.Allocator', summary='${var.ptr}')
add(debugger, category='zig.std', regex=True, type='^segmented_list\\.SegmentedList\\(.*\\)$', identifier='std_SegmentedList', synth=True, expand=True, summary='len=${var.len}')
add(debugger, category='zig.std', regex=True, type='^multi_array_list\\.MultiArrayList\\(.*\\)$', identifier='std_MultiArrayList', synth=True, expand=True, summary='len=${var.len} capacity=${var.capacity}')
add(debugger, category='zig.std', regex=True, type='^multi_array_list\\.MultiArrayList\\(.*\\)\\.Slice$', identifier='std_MultiArrayList_Slice', synth=True, expand=True, summary='len=${var.len} capacity=${var.capacity}')
add(debugger, category='zig.std', regex=True, type=MultiArrayList_Entry('.*'), identifier='std_Entry', synth=True, inline_children=True, summary=True)
add(debugger, category='zig.std', regex=True, type='^hash_map\\.HashMapUnmanaged\\(.*\\)$', identifier='std_HashMapUnmanaged', synth=True, expand=True, summary=True)
add(debugger, category='zig.std', regex=True, type='^hash_map\\.HashMapUnmanaged\\(.*\\)\\.Entry$', identifier = 'std_Entry', synth=True, inline_children=True, summary=True)
# Initialize Zig Stage2 Compiler
add(debugger, category='zig.stage2', type='Zir.Inst', identifier='TagAndPayload', synth=True, inline_children=True, summary=True)
add(debugger, category='zig.stage2', regex=True, type=MultiArrayList_Entry('Zir\\.Inst'), identifier='TagAndPayload', synth=True, inline_children=True, summary=True)
add(debugger, category='zig.stage2', regex=True, type='^Zir\\.Inst\\.Data\\.Data__struct_[1-9][0-9]*$', inline_children=True, summary=True)
add(debugger, category='zig.stage2', type='Zir.Inst::Zir.Inst.Ref', identifier='InstRef', summary=True)
add(debugger, category='zig.stage2', type='Zir.Inst::Zir.Inst.Index', identifier='InstIndex', summary=True)
add(debugger, category='zig.stage2', type='Air.Inst', identifier='TagAndPayload', synth=True, inline_children=True, summary=True)
add(debugger, category='zig.stage2', type='Air.Inst::Air.Inst.Ref', identifier='InstRef', summary=True)
add(debugger, category='zig.stage2', type='Air.Inst::Air.Inst.Index', identifier='InstIndex', summary=True)
add(debugger, category='zig.stage2', regex=True, type=MultiArrayList_Entry('Air\\.Inst'), identifier='TagAndPayload', synth=True, inline_children=True, summary=True)
add(debugger, category='zig.stage2', regex=True, type='^Air\\.Inst\\.Data\\.Data__struct_[1-9][0-9]*$', inline_children=True, summary=True)
add(debugger, category='zig.stage2', type='zig.DeclIndex', synth=True)
add(debugger, category='zig.stage2', type='Module.Namespace::Module.Namespace.Index', synth=True)
add(debugger, category='zig.stage2', type='Module.LazySrcLoc', identifier='zig_TaggedUnion', synth=True)
add(debugger, category='zig.stage2', type='InternPool.Index', synth=True)
add(debugger, category='zig.stage2', type='InternPool.NullTerminatedString', summary=True)
add(debugger, category='zig.stage2', type='InternPool.Key', identifier='zig_TaggedUnion', synth=True)
add(debugger, category='zig.stage2', type='InternPool.Key.Int.Storage', identifier='zig_TaggedUnion', synth=True)
add(debugger, category='zig.stage2', type='InternPool.Key.ErrorUnion.Value', identifier='zig_TaggedUnion', synth=True)
add(debugger, category='zig.stage2', type='InternPool.Key.Float.Storage', identifier='zig_TaggedUnion', synth=True)
add(debugger, category='zig.stage2', type='InternPool.Key.Ptr.Addr', identifier='zig_TaggedUnion', synth=True)
add(debugger, category='zig.stage2', type='InternPool.Key.Aggregate.Storage', identifier='zig_TaggedUnion', synth=True)
add(debugger, category='zig.stage2', type='arch.x86_64.CodeGen.MCValue', identifier='zig_TaggedUnion', synth=True, inline_children=True, summary=True)

File diff suppressed because it is too large Load Diff

View File

@@ -5,7 +5,10 @@
"license": "MIT",
"devDependencies": {
"@unicode/unicode-13.0.0": "^1.2.1",
"@unicode/unicode-3.0.0": "^1.2.1",
"@unicode/unicode-3.0.0": "^1.6.5",
"semver": "^7.3.7"
},
"dependencies": {
"@unicode/unicode-15.1.0": "^1.6.5"
}
}

View File

@@ -0,0 +1,138 @@
import crypto from "crypto";
// Types to mirror Zig's structures
interface Context<Elem> {
get(codepoint: number): Promise<Elem> | Elem;
eql(a: Elem, b: Elem): boolean;
}
interface Tables<Elem> {
stage1: number[];
stage2: number[];
stage3: Elem[];
}
class Generator<Elem> {
private static readonly BLOCK_SIZE = 256;
private readonly ctx: Context<Elem>;
private readonly blockMap = new Map<string, number>();
constructor(ctx: Context<Elem>) {
this.ctx = ctx;
}
private hashBlock(block: number[]): string {
const hash = crypto.createHash("sha256");
hash.update(Buffer.from(new Uint16Array(block).buffer));
return hash.digest("hex");
}
async generate(): Promise<Tables<Elem>> {
const stage1: number[] = [];
const stage2: number[] = [];
const stage3: Elem[] = [];
let block = new Array(Generator.BLOCK_SIZE).fill(0);
let blockLen = 0;
// Maximum Unicode codepoint is 0x10FFFF
for (let cp = 0; cp <= 0x10ffff; cp++) {
// Get the mapping for this codepoint
const elem = await this.ctx.get(cp);
// Find or add the element in stage3
let blockIdx = stage3.findIndex(item => this.ctx.eql(item, elem));
if (blockIdx === -1) {
blockIdx = stage3.length;
stage3.push(elem);
}
if (blockIdx > 0xffff) {
throw new Error("Block index too large");
}
// Add to current block
block[blockLen] = blockIdx;
blockLen++;
// Check if we need to finalize this block
if (blockLen < Generator.BLOCK_SIZE && cp !== 0x10ffff) {
continue;
}
// Fill remaining block space with zeros if needed
if (blockLen < Generator.BLOCK_SIZE) {
block.fill(0, blockLen);
}
// Get or create stage2 index for this block
const blockHash = this.hashBlock(block);
let stage2Idx = this.blockMap.get(blockHash);
if (stage2Idx === undefined) {
stage2Idx = stage2.length;
this.blockMap.set(blockHash, stage2Idx);
stage2.push(...block.slice(0, blockLen));
}
if (stage2Idx > 0xffff) {
throw new Error("Stage2 index too large");
}
// Add mapping to stage1
stage1.push(stage2Idx);
// Reset block
block = new Array(Generator.BLOCK_SIZE).fill(0);
blockLen = 0;
}
return { stage1, stage2, stage3 };
}
// Generates Zig code for the lookup tables
static writeZig<Elem>(tableName: string, tables: Tables<Elem>, elemToString: (elem: Elem) => string): string {
let output = `/// Auto-generated. Do not edit.\n`;
output += `fn ${tableName}(comptime Elem: type) type {\n`;
output += " return struct {\n";
// Stage 1
output += `pub const stage1: [${tables.stage1.length}]u16 = .{`;
output += tables.stage1.join(",");
output += "};\n\n";
// Stage 2
output += `pub const stage2: [${tables.stage2.length}]u8 = .{`;
output += tables.stage2.join(",");
output += "};\n\n";
// Stage 3
output += `pub const stage3: [${tables.stage3.length}]Elem = .{`;
output += tables.stage3.map(elemToString).join(",");
output += "};\n";
output += " };\n}\n";
return output;
}
}
// Example usage:
async function example() {
// Example context that maps codepoints to their category
const ctx: Context<string> = {
get: async (cp: number) => {
// This would normally look up the actual Unicode category
return "Lu";
},
eql: (a: string, b: string) => a === b,
};
const generator = new Generator(ctx);
const tables = await generator.generate();
// Generate Zig code
const zigCode = Generator.writeZig(tables, (elem: string) => `"${elem}"`);
console.log(zigCode);
}
export { Generator, type Context, type Tables };

View File

@@ -1,7 +1,7 @@
{
"private": true,
"name": "bun",
"version": "1.1.39",
"version": "1.1.43",
"workspaces": [
"./packages/bun-types"
],

View File

@@ -1,55 +1,25 @@
use bun_native_plugin::{define_bun_plugin, BunLoader, OnBeforeParse};
use bun_native_plugin::{anyhow, bun, define_bun_plugin, BunLoader, Result};
use mdxjs::{compile, Options as CompileOptions};
use napi_derive::napi;
#[macro_use]
extern crate napi;
define_bun_plugin!("bun-mdx-rs");
#[no_mangle]
pub extern "C" fn bun_mdx_rs(
args: *const bun_native_plugin::sys::OnBeforeParseArguments,
result: *mut bun_native_plugin::sys::OnBeforeParseResult,
) {
let args = unsafe { &*args };
let mut handle = match OnBeforeParse::from_raw(args, result) {
Ok(handle) => handle,
Err(_) => {
return;
}
};
let source_str = match handle.input_source_code() {
Ok(source_str) => source_str,
Err(_) => {
handle.log_error("Failed to fetch source code");
return;
}
};
#[bun]
pub fn bun_mdx_rs(handle: &mut OnBeforeParse) -> Result<()> {
let source_str = handle.input_source_code()?;
let mut options = CompileOptions::gfm();
// Leave it as JSX for Bun to handle
options.jsx = true;
let path = match handle.path() {
Ok(path) => path,
Err(e) => {
handle.log_error(&format!("Failed to get path: {:?}", e));
return;
}
};
let path = handle.path()?;
options.filepath = Some(path.to_string());
match compile(&source_str, &options) {
Ok(compiled) => {
handle.set_output_source_code(compiled, BunLoader::BUN_LOADER_JSX);
}
Err(_) => {
handle.log_error("Failed to compile MDX");
return;
}
}
let jsx = compile(&source_str, &options)
.map_err(|e| anyhow::anyhow!("Failed to compile MDX: {:?}", e))?;
handle.set_output_source_code(jsx, BunLoader::BUN_LOADER_JSX);
Ok(())
}

View File

@@ -116,6 +116,7 @@ try {
entrypoints: [join(import.meta.dir, "out/manifest.js")],
outdir: "out",
minify: true,
throw: true,
});
const jsFilename = "manifest-" + jsBundle.outputs[0].hash + ".js";
// const cssBundle = await build({

View File

@@ -11,6 +11,12 @@ dependencies = [
"memchr",
]
[[package]]
name = "anyhow"
version = "1.0.94"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c1fd03a028ef38ba2276dce7e33fcd6369c158a1bca17946c4b1b701891c1ff7"
[[package]]
name = "bindgen"
version = "0.70.1"
@@ -37,11 +43,24 @@ version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de"
[[package]]
name = "bun-macro"
version = "0.1.0"
dependencies = [
"anyhow",
"napi",
"quote",
"syn",
]
[[package]]
name = "bun-native-plugin"
version = "0.1.0"
dependencies = [
"anyhow",
"bindgen",
"bun-macro",
"napi",
]
[[package]]
@@ -70,6 +89,25 @@ dependencies = [
"libloading",
]
[[package]]
name = "convert_case"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca"
dependencies = [
"unicode-segmentation",
]
[[package]]
name = "ctor"
version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a2785755761f3ddc1492979ce1e48d2c00d09311c39e4466429188f3dd6501"
dependencies = [
"quote",
"syn",
]
[[package]]
name = "either"
version = "1.13.0"
@@ -125,6 +163,55 @@ version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
[[package]]
name = "napi"
version = "2.16.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "214f07a80874bb96a8433b3cdfc84980d56c7b02e1a0d7ba4ba0db5cef785e2b"
dependencies = [
"bitflags",
"ctor",
"napi-derive",
"napi-sys",
"once_cell",
]
[[package]]
name = "napi-derive"
version = "2.16.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7cbe2585d8ac223f7d34f13701434b9d5f4eb9c332cccce8dee57ea18ab8ab0c"
dependencies = [
"cfg-if",
"convert_case",
"napi-derive-backend",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "napi-derive-backend"
version = "1.0.75"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1639aaa9eeb76e91c6ae66da8ce3e89e921cd3885e99ec85f4abacae72fc91bf"
dependencies = [
"convert_case",
"once_cell",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "napi-sys"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "427802e8ec3a734331fec1035594a210ce1ff4dc5bc1950530920ab717964ea3"
dependencies = [
"libloading",
]
[[package]]
name = "nom"
version = "7.1.3"
@@ -135,6 +222,12 @@ dependencies = [
"minimal-lexical",
]
[[package]]
name = "once_cell"
version = "1.20.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775"
[[package]]
name = "prettyplease"
version = "0.2.25"
@@ -221,6 +314,12 @@ version = "1.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83"
[[package]]
name = "unicode-segmentation"
version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493"
[[package]]
name = "windows-targets"
version = "0.52.6"

View File

@@ -5,3 +5,13 @@ edition = "2021"
[build-dependencies]
bindgen = "0.70.1"
[dependencies]
anyhow = "1.0.94"
bun-macro = { path = "./bun-macro" }
napi = { version = "2.14.1", default-features = false, features = ["napi4"] }
[features]
default = ["napi"]
napi = []

View File

@@ -1,10 +1,10 @@
> ⚠️ Note: This is an advanced and experimental API recommended only for plugin developers who are familiar with systems proramming and the C ABI. Use with caution.
> ⚠️ Note: This is an advanced and experimental API recommended only for plugin developers who are familiar with systems programming and the C ABI. Use with caution.
# Bun Native Plugins
This crate provides a Rustified wrapper over the Bun's native bundler plugin C API.
Some advantages to _native_ bundler plugins as opposed to regular ones implemented in JS:
Some advantages to _native_ bundler plugins as opposed to regular ones implemented in JS are:
- Native plugins take full advantage of Bun's parallelized bundler pipeline and run on multiple threads at the same time
- Unlike JS, native plugins don't need to do the UTF-8 <-> UTF-16 source code string conversions
@@ -30,61 +30,84 @@ Then install this crate:
cargo add bun-native-plugin
```
Now, inside the `lib.rs` file, expose a C ABI function which has the same function signature as the plugin lifecycle hook that you want to implement.
Now, inside the `lib.rs` file, we'll use the `bun_native_plugin::bun` proc macro to define a function which
will implement our native plugin.
For example, implementing `onBeforeParse`:
Here's an example implementing the `onBeforeParse` hook:
```rs
use bun_native_plugin::{define_bun_plugin, OnBeforeParse};
use bun_native_plugin::{define_bun_plugin, OnBeforeParse, bun, Result, anyhow, BunLoader};
use napi_derive::napi;
/// Define with the name of the plugin
/// Define the plugin and its name
define_bun_plugin!("replace-foo-with-bar");
/// This is necessary for napi-rs to compile this into a proper NAPI module
#[napi]
pub fn register_bun_plugin() {}
/// Use `no_mangle` so that we can reference this symbol by name later
/// when registering this native plugin in JS.
/// Here we'll implement `onBeforeParse` with code that replaces all occurrences of
/// `foo` with `bar`.
///
/// Here we'll create a dummy plugin which replaces all occurences of
/// `foo` with `bar`
#[no_mangle]
pub extern "C" fn on_before_parse_plugin_impl(
args: *const bun_native_plugin::sys::OnBeforeParseArguments,
result: *mut bun_native_plugin::sys::OnBeforeParseResult,
) {
let args = unsafe { &*args };
// This returns a handle which is a safe wrapper over the raw
// C API.
let mut handle = OnBeforeParse::from_raw(args, result) {
Ok(handle) => handle,
Err(_) => {
// `OnBeforeParse::from_raw` handles error logging
// so it fine to return here.
return;
}
};
let input_source_code = match handle.input_source_code() {
Ok(source_str) => source_str,
Err(_) => {
// If we encounter an error, we must log it so that
// Bun knows this plugin failed.
handle.log_error("Failed to fetch source code!");
return;
}
};
/// We use the #[bun] macro to generate some of the boilerplate code.
///
/// The argument of the function (`handle: &mut OnBeforeParse`) tells
/// the macro that this function implements the `onBeforeParse` hook.
#[bun]
pub fn replace_foo_with_bar(handle: &mut OnBeforeParse) -> Result<()> {
// Fetch the input source code.
let input_source_code = handle.input_source_code()?;
// Get the Loader for the file
let loader = handle.output_loader();
let output_source_code = source_str.replace("foo", "bar");
handle.set_output_source_code(output_source_code, loader);
let output_source_code = input_source_code.replace("foo", "bar");
handle.set_output_source_code(output_source_code, BunLoader::BUN_LOADER_JSX);
Ok(())
}
```
Then compile this NAPI module. If you using napi-rs, the `package.json` should have a `build` script you can run:
Internally, the `#[bun]` macro wraps your code and declares a C ABI function which implements
the function signature of `onBeforeParse` plugins in Bun's C API for bundler plugins.
Then it calls your code. The wrapper looks _roughly_ like this:
```rs
pub extern "C" fn replace_foo_with_bar(
args: *const bun_native_plugin::sys::OnBeforeParseArguments,
result: *mut bun_native_plugin::sys::OnBeforeParseResult,
) {
// The actual code you wrote is inlined here
fn __replace_foo_with_bar(handle: &mut OnBeforeParse) -> Result<()> {
// Fetch the input source code.
let input_source_code = handle.input_source_code()?;
// Get the Loader for the file
let loader = handle.output_loader();
let output_source_code = input_source_code.replace("foo", "bar");
handle.set_output_source_code(output_source_code, BunLoader::BUN_LOADER_JSX);
Ok(())
}
let args = unsafe { &*args };
let mut handle = OnBeforeParse::from_raw(args, result) {
Ok(handle) => handle,
Err(_) => {
return;
}
};
if let Err(e) = __replace_fo_with_bar(&handle) {
handle.log_err(&e.to_string());
}
}
```
Now, let's compile this NAPI module. If you're using napi-rs, the `package.json` should have a `build` script you can run:
```bash
bun run build
@@ -107,7 +130,7 @@ const result = await Bun.build({
// We tell it to use function we implemented inside of our `lib.rs` code.
build.onBeforeParse(
{ filter: /\.ts/ },
{ napiModule, symbol: "on_before_parse_plugin_impl" },
{ napiModule, symbol: "replace_foo_with_bar" },
);
},
},
@@ -119,19 +142,14 @@ const result = await Bun.build({
### Error handling and panics
It is highly recommended to avoid panicking as this will crash the runtime. Instead, you must handle errors and log them:
In the case that the value of the `Result` your plugin function returns is an `Err(...)`, the error will be logged to Bun's bundler.
```rs
let input_source_code = match handle.input_source_code() {
Ok(source_str) => source_str,
Err(_) => {
// If we encounter an error, we must log it so that
// Bun knows this plugin failed.
handle.log_error("Failed to fetch source code!");
return;
}
};
```
It is highly advised that you return all errors and avoid `.unwrap()`'ing or `.expecting()`'ing results.
The `#[bun]` wrapper macro actually runs your code inside of a [`panic::catch_unwind`](https://doc.rust-lang.org/std/panic/fn.catch_unwind.html),
which may catch _some_ panics but **not guaranteed to catch all panics**.
Therefore, it is recommended to **avoid panics at all costs**.
### Passing state to and from JS: `External`
@@ -199,41 +217,16 @@ console.log("Total `foo`s encountered: ", pluginState.getFooCount());
Finally, from the native implementation of your plugin, you can extract the external:
```rs
pub extern "C" fn on_before_parse_plugin_impl(
args: *const bun_native_plugin::sys::OnBeforeParseArguments,
result: *mut bun_native_plugin::sys::OnBeforeParseResult,
) {
let args = unsafe { &*args };
let mut handle = OnBeforeParse::from_raw(args, result) {
Ok(handle) => handle,
Err(_) => {
// `OnBeforeParse::from_raw` handles error logging
// so it fine to return here.
return;
}
};
let plugin_state: &PluginState =
#[bun]
pub fn on_before_parse_plugin_impl(handle: &mut OnBeforeParse) {
// This operation is only safe if you pass in an external when registering the plugin.
// If you don't, this could lead to a segfault or access of undefined memory.
match unsafe { handle.external().and_then(|state| state.ok_or(Error::Unknown)) } {
Ok(state) => state,
Err(_) => {
handle.log_error("Failed to get external!");
return;
}
};
let plugin_state: &PluginState =
unsafe { handle.external().and_then(|state| state.ok_or(Error::Unknown))? };
// Fetch our source code again
let input_source_code = match handle.input_source_code() {
Ok(source_str) => source_str,
Err(_) => {
handle.log_error("Failed to fetch source code!");
return;
}
};
let input_source_code = handle.input_source_code()?;
// Count the number of `foo`s and add it to our state
let foo_count = source_code.matches("foo").count() as u32;
@@ -243,6 +236,6 @@ pub extern "C" fn on_before_parse_plugin_impl(
### Concurrency
Your `extern "C"` plugin function can be called _on any thread_ at _any time_ and _multiple times at once_.
Your plugin function can be called _on any thread_ at _any time_ and possibly _multiple times at once_.
Therefore, you must design any state management to be threadsafe
Therefore, you must design any state management to be threadsafe.

View File

@@ -0,0 +1,14 @@
[package]
name = "bun-macro"
version = "0.1.0"
edition = "2021"
[lib]
proc-macro = true
[dependencies]
syn = { version = "2.0", features = ["full"] }
quote = "1.0"
napi = "2.16.13"
anyhow = "1.0.94"

View File

@@ -0,0 +1,54 @@
use proc_macro::TokenStream;
use quote::quote;
use syn::{parse_macro_input, Ident, ItemFn};
#[proc_macro_attribute]
pub fn bun(_attr: TokenStream, item: TokenStream) -> TokenStream {
// Parse the input function
let input_fn = parse_macro_input!(item as ItemFn);
let fn_name = &input_fn.sig.ident;
let inner_fn_name = Ident::new(&format!("__{}", fn_name), fn_name.span());
let fn_block = &input_fn.block;
// Generate the wrapped function
let output = quote! {
#[no_mangle]
pub unsafe extern "C" fn #fn_name(
args_raw: *mut bun_native_plugin::sys::OnBeforeParseArguments,
result: *mut bun_native_plugin::sys::OnBeforeParseResult,
) {
fn #inner_fn_name(handle: &mut bun_native_plugin::OnBeforeParse) -> Result<()> {
#fn_block
}
let args_path = unsafe { (*args_raw).path_ptr };
let args_path_len = unsafe { (*args_raw).path_len };
let result_pointer = result;
let result = std::panic::catch_unwind(|| {
let mut handle = match bun_native_plugin::OnBeforeParse::from_raw(args_raw, result) {
Ok(handle) => handle,
Err(_) => return,
};
if let Err(e) = #inner_fn_name(&mut handle) {
handle.log_error(&format!("{:?}", e));
}
});
if let Err(e) = result {
let msg_string = format!("Plugin crashed: {:?}", e);
let mut log_options = bun_native_plugin::log_from_message_and_level(
&msg_string,
bun_native_plugin::sys::BunLogLevel::BUN_LOG_LEVEL_ERROR,
args_path,
args_path_len,
);
unsafe {
((*result_pointer).log.unwrap())(args_raw, &mut log_options);
}
}
}
};
output.into()
}

View File

@@ -1,4 +1,4 @@
//! > ⚠️ Note: This is an advanced and experimental API recommended only for plugin developers who are familiar with systems proramming and the C ABI. Use with caution.
//! > ⚠️ Note: This is an advanced and experimental API recommended only for plugin developers who are familiar with systems programming and the C ABI. Use with caution.
//!
//! # Bun Native Plugins
//!
@@ -44,7 +44,7 @@
//! /// Use `no_mangle` so that we can reference this symbol by name later
//! /// when registering this native plugin in JS.
//! ///
//! /// Here we'll create a dummy plugin which replaces all occurences of
//! /// Here we'll create a dummy plugin which replaces all occurrences of
//! /// `foo` with `bar`
//! #[no_mangle]
//! pub extern "C" fn on_before_parse_plugin_impl(
@@ -244,10 +244,11 @@
//! Your `extern "C"` plugin function can be called _on any thread_ at _any time_ and _multiple times at once_.
//!
//! Therefore, you must design any state management to be threadsafe
#![allow(non_upper_case_globals)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
pub use anyhow;
pub use bun_macro::bun;
#[repr(transparent)]
pub struct BunPluginName(*const c_char);
@@ -261,7 +262,7 @@ impl BunPluginName {
#[macro_export]
macro_rules! define_bun_plugin {
($name:expr) => {
pub static BUN_PLUGIN_NAME_STRING: &str = $name;
pub static BUN_PLUGIN_NAME_STRING: &str = concat!($name, "\0");
#[no_mangle]
pub static BUN_PLUGIN_NAME: bun_native_plugin::BunPluginName =
@@ -279,7 +280,9 @@ use std::{
borrow::Cow,
cell::UnsafeCell,
ffi::{c_char, c_void},
marker::PhantomData,
str::Utf8Error,
sync::PoisonError,
};
pub mod sys {
@@ -323,7 +326,7 @@ impl Drop for SourceCodeContext {
pub type BunLogLevel = sys::BunLogLevel;
pub type BunLoader = sys::BunLoader;
fn get_from_raw_str<'a>(ptr: *const u8, len: usize) -> Result<Cow<'a, str>> {
fn get_from_raw_str<'a>(ptr: *const u8, len: usize) -> PluginResult<Cow<'a, str>> {
let slice: &'a [u8] = unsafe { std::slice::from_raw_parts(ptr, len) };
// Windows allows invalid UTF-16 strings in the filesystem. These get converted to WTF-8 in Zig.
@@ -351,9 +354,31 @@ pub enum Error {
IncompatiblePluginVersion,
ExternalTypeMismatch,
Unknown,
LockPoisoned,
}
pub type Result<T> = std::result::Result<T, Error>;
pub type PluginResult<T> = std::result::Result<T, Error>;
pub type Result<T> = anyhow::Result<T>;
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:?}", self)
}
}
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
None
}
fn description(&self) -> &str {
"description() is deprecated; use Display"
}
fn cause(&self) -> Option<&dyn std::error::Error> {
self.source()
}
}
impl From<Utf8Error> for Error {
fn from(value: Utf8Error) -> Self {
@@ -361,6 +386,12 @@ impl From<Utf8Error> for Error {
}
}
impl<Guard> From<PoisonError<Guard>> for Error {
fn from(_: PoisonError<Guard>) -> Self {
Self::LockPoisoned
}
}
/// A safe handle for the arguments + result struct for the
/// `OnBeforeParse` bundler lifecycle hook.
///
@@ -370,9 +401,10 @@ impl From<Utf8Error> for Error {
///
/// To initialize this struct, see the `from_raw` method.
pub struct OnBeforeParse<'a> {
args_raw: &'a sys::OnBeforeParseArguments,
pub args_raw: *mut sys::OnBeforeParseArguments,
result_raw: *mut sys::OnBeforeParseResult,
compilation_context: *mut SourceCodeContext,
__phantom: PhantomData<&'a ()>,
}
impl<'a> OnBeforeParse<'a> {
@@ -394,10 +426,10 @@ impl<'a> OnBeforeParse<'a> {
/// }
/// ```
pub fn from_raw(
args: &'a sys::OnBeforeParseArguments,
args: *mut sys::OnBeforeParseArguments,
result: *mut sys::OnBeforeParseResult,
) -> Result<Self> {
if args.__struct_size < std::mem::size_of::<sys::OnBeforeParseArguments>()
) -> PluginResult<Self> {
if unsafe { (*args).__struct_size } < std::mem::size_of::<sys::OnBeforeParseArguments>()
|| unsafe { (*result).__struct_size } < std::mem::size_of::<sys::OnBeforeParseResult>()
{
let message = "This plugin is not compatible with the current version of Bun.";
@@ -405,8 +437,8 @@ impl<'a> OnBeforeParse<'a> {
__struct_size: std::mem::size_of::<sys::BunLogOptions>(),
message_ptr: message.as_ptr(),
message_len: message.len(),
path_ptr: args.path_ptr,
path_len: args.path_len,
path_ptr: unsafe { (*args).path_ptr },
path_len: unsafe { (*args).path_len },
source_line_text_ptr: std::ptr::null(),
source_line_text_len: 0,
level: BunLogLevel::BUN_LOG_LEVEL_ERROR as i8,
@@ -426,15 +458,21 @@ impl<'a> OnBeforeParse<'a> {
args_raw: args,
result_raw: result,
compilation_context: std::ptr::null_mut() as *mut _,
__phantom: Default::default(),
})
}
pub fn path(&self) -> Result<Cow<'_, str>> {
get_from_raw_str(self.args_raw.path_ptr, self.args_raw.path_len)
pub fn path(&self) -> PluginResult<Cow<'_, str>> {
unsafe { get_from_raw_str((*self.args_raw).path_ptr, (*self.args_raw).path_len) }
}
pub fn namespace(&self) -> Result<Cow<'_, str>> {
get_from_raw_str(self.args_raw.namespace_ptr, self.args_raw.namespace_len)
pub fn namespace(&self) -> PluginResult<Cow<'_, str>> {
unsafe {
get_from_raw_str(
(*self.args_raw).namespace_ptr,
(*self.args_raw).namespace_len,
)
}
}
/// Get the external object from the `OnBeforeParse` arguments.
@@ -485,12 +523,13 @@ impl<'a> OnBeforeParse<'a> {
/// },
/// };
/// ```
pub unsafe fn external<T: 'static + Sync>(&self) -> Result<Option<&'static T>> {
if self.args_raw.external.is_null() {
pub unsafe fn external<T: 'static + Sync>(&self) -> PluginResult<Option<&'static T>> {
if unsafe { (*self.args_raw).external.is_null() } {
return Ok(None);
}
let external: *mut TaggedObject<T> = self.args_raw.external as *mut TaggedObject<T>;
let external: *mut TaggedObject<T> =
unsafe { (*self.args_raw).external as *mut TaggedObject<T> };
unsafe {
if (*external).type_id != TypeId::of::<T>() {
@@ -505,12 +544,13 @@ impl<'a> OnBeforeParse<'a> {
///
/// This is unsafe as you must ensure that no other invocation of the plugin
/// simultaneously holds a mutable reference to the external.
pub unsafe fn external_mut<T: 'static + Sync>(&mut self) -> Result<Option<&mut T>> {
if self.args_raw.external.is_null() {
pub unsafe fn external_mut<T: 'static + Sync>(&mut self) -> PluginResult<Option<&mut T>> {
if unsafe { (*self.args_raw).external.is_null() } {
return Ok(None);
}
let external: *mut TaggedObject<T> = self.args_raw.external as *mut TaggedObject<T>;
let external: *mut TaggedObject<T> =
unsafe { (*self.args_raw).external as *mut TaggedObject<T> };
unsafe {
if (*external).type_id != TypeId::of::<T>() {
@@ -525,9 +565,12 @@ impl<'a> OnBeforeParse<'a> {
///
/// On Windows, this function may return an `Err(Error::Utf8(...))` if the
/// source code contains invalid UTF-8.
pub fn input_source_code(&self) -> Result<Cow<'_, str>> {
pub fn input_source_code(&self) -> PluginResult<Cow<'_, str>> {
let fetch_result = unsafe {
((*self.result_raw).fetchSourceCode.unwrap())(self.args_raw, self.result_raw)
((*self.result_raw).fetchSourceCode.unwrap())(
self.args_raw as *const _,
self.result_raw,
)
};
if fetch_result != 0 {
@@ -587,7 +630,7 @@ impl<'a> OnBeforeParse<'a> {
}
/// Set the output loader for the current file.
pub fn set_output_loader(&self, loader: BunLogLevel) {
pub fn set_output_loader(&self, loader: BunLoader) {
// SAFETY: We don't hand out mutable references to `result_raw` so dereferencing it is safe.
unsafe {
(*self.result_raw).loader = loader as u8;
@@ -606,22 +649,36 @@ impl<'a> OnBeforeParse<'a> {
/// Log a message with the given level.
pub fn log(&self, message: &str, level: BunLogLevel) {
let mut log_options = sys::BunLogOptions {
__struct_size: std::mem::size_of::<sys::BunLogOptions>(),
message_ptr: message.as_ptr(),
message_len: message.len(),
path_ptr: self.args_raw.path_ptr,
path_len: self.args_raw.path_len,
source_line_text_ptr: std::ptr::null(),
source_line_text_len: 0,
level: level as i8,
line: 0,
lineEnd: 0,
column: 0,
columnEnd: 0,
};
let mut log_options = log_from_message_and_level(
message,
level,
unsafe { (*self.args_raw).path_ptr },
unsafe { (*self.args_raw).path_len },
);
unsafe {
((*self.result_raw).log.unwrap())(self.args_raw, &mut log_options);
}
}
}
pub fn log_from_message_and_level(
message: &str,
level: BunLogLevel,
path: *const u8,
path_len: usize,
) -> sys::BunLogOptions {
sys::BunLogOptions {
__struct_size: std::mem::size_of::<sys::BunLogOptions>(),
message_ptr: message.as_ptr(),
message_len: message.len(),
path_ptr: path as *const _,
path_len,
source_line_text_ptr: std::ptr::null(),
source_line_text_len: 0,
level: level as i8,
line: 0,
lineEnd: 0,
column: 0,
columnEnd: 0,
}
}

View File

@@ -15,7 +15,7 @@ This plugin can be used to support `.yaml` loaders in Bun's bundler by passing i
```ts
import yamlPlugin from "bun-plugin-yaml";
Bun.build({
await Bun.build({
entrypoints: ["./index.tsx"],
// other config

View File

@@ -529,7 +529,7 @@ declare module "bun" {
*/
// tslint:disable-next-line:unified-signatures
function write(
destination: BunFile | Bun.PathLike,
destination: BunFile | S3File | Bun.PathLike,
input: Blob | NodeJS.TypedArray | ArrayBufferLike | string | Bun.BlobPart[],
options?: {
/** If writing to a PathLike, set the permissions of the file. */
@@ -1210,7 +1210,813 @@ declare module "bun" {
* For empty Blob, this always returns true.
*/
exists(): Promise<boolean>;
/**
* Write data to the file. This is equivalent to using {@link Bun.write} with a {@link BunFile}.
* @param data - The data to write.
* @param options - The options to use for the write.
*/
write(
data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer | Request | Response | BunFile,
options?: { highWaterMark?: number },
): Promise<number>;
/**
* Deletes the file.
*/
unlink(): Promise<void>;
}
interface NetworkSink extends FileSink {
/**
* Write a chunk of data to the network.
*
* If the network is not writable yet, the data is buffered.
*/
write(chunk: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer): number;
/**
* Flush the internal buffer, committing the data to the network.
*/
flush(): number | Promise<number>;
/**
* Finish the upload. This also flushes the internal buffer.
*/
end(error?: Error): number | Promise<number>;
}
type S3 = {
/**
* Create a new instance of an S3 bucket so that credentials can be managed
* from a single instance instead of being passed to every method.
*
* @param options The default options to use for the S3 client. Can be
* overriden by passing options to the methods.
*
* ## Keep S3 credentials in a single instance
*
* @example
* const bucket = new Bun.S3({
* accessKeyId: "your-access-key",
* secretAccessKey: "your-secret-key",
* bucket: "my-bucket",
* endpoint: "https://s3.us-east-1.amazonaws.com",
* sessionToken: "your-session-token",
* });
*
* // S3Bucket is callable, so you can do this:
* const file = bucket("my-file.txt");
*
* // or this:
* await file.write("Hello Bun!");
* await file.text();
*
* // To delete the file:
* await bucket.delete("my-file.txt");
*
* // To write a file without returning the instance:
* await bucket.write("my-file.txt", "Hello Bun!");
*
*/
new (options?: S3Options): S3Bucket;
/**
* Delete a file from an S3-compatible object storage service.
*
* @param path The path to the file.
* @param options The options to use for the S3 client.
*
* For an instance method version, {@link S3File.unlink}. You can also use {@link S3Bucket.unlink}.
*
* @example
* import { S3 } from "bun";
* await S3.unlink("s3://my-bucket/my-file.txt", {
* accessKeyId: "your-access-key",
* secretAccessKey: "your-secret-key",
* });
*
* @example
* await S3.unlink("key", {
* bucket: "my-bucket",
* accessKeyId: "your-access-key",
* secretAccessKey: "your-secret-key",
* });
*/
delete(path: string, options?: S3Options): Promise<void>;
/**
* unlink is an alias for {@link S3.delete}
*/
unlink: S3["delete"];
/**
* Writes data to an S3-compatible storage service.
* Supports various input types and handles large files with multipart uploads.
*
* @param path The path or key where the file will be written
* @param data The data to write
* @param options S3 configuration and upload options
* @returns promise that resolves with the number of bytes written
*
* @example
* // Writing a string
* await S3.write("hello.txt", "Hello World!", {
* bucket: "my-bucket",
* type: "text/plain"
* });
*
* @example
* // Writing JSON
* await S3.write(
* "data.json",
* JSON.stringify({ hello: "world" }),
* { type: "application/json" }
* );
*
* @example
* // Writing a large file with multipart upload
* await S3.write("large-file.dat", largeBuffer, {
* partSize: 10 * 1024 * 1024, // 10MB parts
* queueSize: 4, // Upload 4 parts in parallel
* retry: 3 // Retry failed parts up to 3 times
* });
*/
write(
path: string,
data: string | ArrayBufferView | ArrayBufferLike | Response | Request | ReadableStream | Blob | File,
options?: S3Options,
): Promise<number>;
};
var S3: S3;
/**
* Creates a new S3File instance for working with a single file.
*
* @param path The path or key of the file
* @param options S3 configuration options
* @returns `S3File` instance for the specified path
*
* @example
* import { s3 } from "bun";
* const file = s3("my-file.txt", {
* bucket: "my-bucket",
* accessKeyId: "your-access-key",
* secretAccessKey: "your-secret-key"
* });
*
* // Read the file
* const content = await file.text();
*
* @example
* // Using s3:// protocol
* const file = s3("s3://my-bucket/my-file.txt", {
* accessKeyId: "your-access-key",
* secretAccessKey: "your-secret-key"
* });
*/
function s3(path: string | URL, options?: S3Options): S3File;
/**
* Configuration options for S3 operations
*/
interface S3Options extends BlobPropertyBag {
/**
* The Access Control List (ACL) policy for the file.
* Controls who can access the file and what permissions they have.
*
* @example
* // Setting public read access
* const file = s3("public-file.txt", {
* acl: "public-read",
* bucket: "my-bucket"
* });
*
* @example
* // Using with presigned URLs
* const url = file.presign({
* acl: "public-read",
* expiresIn: 3600
* });
*/
acl?:
| "private"
| "public-read"
| "public-read-write"
| "aws-exec-read"
| "authenticated-read"
| "bucket-owner-read"
| "bucket-owner-full-control"
| "log-delivery-write";
/**
* The S3 bucket name. Can be set via `S3_BUCKET` or `AWS_BUCKET` environment variables.
*
* @example
* // Using explicit bucket
* const file = s3("my-file.txt", { bucket: "my-bucket" });
*
* @example
* // Using environment variables
* // With S3_BUCKET=my-bucket in .env
* const file = s3("my-file.txt");
*/
bucket?: string;
/**
* The AWS region. Can be set via `S3_REGION` or `AWS_REGION` environment variables.
*
* @example
* const file = s3("my-file.txt", {
* bucket: "my-bucket",
* region: "us-west-2"
* });
*/
region?: string;
/**
* The access key ID for authentication.
* Can be set via `S3_ACCESS_KEY_ID` or `AWS_ACCESS_KEY_ID` environment variables.
*/
accessKeyId?: string;
/**
* The secret access key for authentication.
* Can be set via `S3_SECRET_ACCESS_KEY` or `AWS_SECRET_ACCESS_KEY` environment variables.
*/
secretAccessKey?: string;
/**
* Optional session token for temporary credentials.
* Can be set via `S3_SESSION_TOKEN` or `AWS_SESSION_TOKEN` environment variables.
*
* @example
* // Using temporary credentials
* const file = s3("my-file.txt", {
* accessKeyId: tempAccessKey,
* secretAccessKey: tempSecretKey,
* sessionToken: tempSessionToken
* });
*/
sessionToken?: string;
/**
* The S3-compatible service endpoint URL.
* Can be set via `S3_ENDPOINT` or `AWS_ENDPOINT` environment variables.
*
* @example
* // AWS S3
* const file = s3("my-file.txt", {
* endpoint: "https://s3.us-east-1.amazonaws.com"
* });
*
* @example
* // Cloudflare R2
* const file = s3("my-file.txt", {
* endpoint: "https://<account-id>.r2.cloudflarestorage.com"
* });
*
* @example
* // DigitalOcean Spaces
* const file = s3("my-file.txt", {
* endpoint: "https://<region>.digitaloceanspaces.com"
* });
*
* @example
* // MinIO (local development)
* const file = s3("my-file.txt", {
* endpoint: "http://localhost:9000"
* });
*/
endpoint?: string;
/**
* The size of each part in multipart uploads (in MiB).
* - Minimum: 5 MiB
* - Maximum: 5120 MiB
* - Default: 5 MiB
*
* @example
* // Configuring multipart uploads
* const file = s3("large-file.dat", {
* partSize: 10, // 10 MiB parts
* queueSize: 4 // Upload 4 parts in parallel
* });
*
* const writer = file.writer();
* // ... write large file in chunks
*/
partSize?: number;
/**
* Number of parts to upload in parallel for multipart uploads.
* - Default: 5
* - Maximum: 255
*
* Increasing this value can improve upload speeds for large files
* but will use more memory.
*/
queueSize?: number;
/**
* Number of retry attempts for failed uploads.
* - Default: 3
* - Maximum: 255
*
* @example
* // Setting retry attempts
* const file = s3("my-file.txt", {
* retry: 5 // Retry failed uploads up to 5 times
* });
*/
retry?: number;
/**
* The Content-Type of the file.
* Automatically set based on file extension when possible.
*
* @example
* // Setting explicit content type
* const file = s3("data.bin", {
* type: "application/octet-stream"
* });
*/
type?: string;
/**
* @deprecated The size of the internal buffer in bytes. Defaults to 5 MiB. use `partSize` and `queueSize` instead.
*/
highWaterMark?: number;
}
/**
* Options for generating presigned URLs
*/
interface S3FilePresignOptions extends S3Options {
/**
* Number of seconds until the presigned URL expires.
* - Default: 86400 (1 day)
*
* @example
* // Short-lived URL
* const url = file.presign({
* expiresIn: 3600 // 1 hour
* });
*
* @example
* // Long-lived public URL
* const url = file.presign({
* expiresIn: 7 * 24 * 60 * 60, // 7 days
* acl: "public-read"
* });
*/
expiresIn?: number;
/**
* The HTTP method allowed for the presigned URL.
*
* @example
* // GET URL for downloads
* const downloadUrl = file.presign({
* method: "GET",
* expiresIn: 3600
* });
*
* @example
* // PUT URL for uploads
* const uploadUrl = file.presign({
* method: "PUT",
* expiresIn: 3600,
* type: "application/json"
* });
*/
method?: "GET" | "POST" | "PUT" | "DELETE" | "HEAD";
}
/**
* Represents a file in an S3-compatible storage service.
* Extends the Blob interface for compatibility with web APIs.
*/
interface S3File extends Blob {
/**
* The size of the file in bytes.
* This is a Promise because it requires a network request to determine the size.
*
* @example
* // Getting file size
* const size = await file.size;
* console.log(`File size: ${size} bytes`);
*
* @example
* // Check if file is larger than 1MB
* if (await file.size > 1024 * 1024) {
* console.log("Large file detected");
* }
*/
/**
* TODO: figure out how to get the typescript types to not error for this property.
*/
// size: Promise<number>;
/**
* Creates a new S3File representing a slice of the original file.
* Uses HTTP Range headers for efficient partial downloads.
*
* @param begin - Starting byte offset
* @param end - Ending byte offset (exclusive)
* @param contentType - Optional MIME type for the slice
* @returns A new S3File representing the specified range
*
* @example
* // Reading file header
* const header = file.slice(0, 1024);
* const headerText = await header.text();
*
* @example
* // Reading with content type
* const jsonSlice = file.slice(1024, 2048, "application/json");
* const data = await jsonSlice.json();
*
* @example
* // Reading from offset to end
* const remainder = file.slice(1024);
* const content = await remainder.text();
*/
slice(begin?: number, end?: number, contentType?: string): S3File;
slice(begin?: number, contentType?: string): S3File;
slice(contentType?: string): S3File;
/**
* Creates a writable stream for uploading data.
* Suitable for large files as it uses multipart upload.
*
* @param options - Configuration for the upload
* @returns A NetworkSink for writing data
*
* @example
* // Basic streaming write
* const writer = file.writer({
* type: "application/json"
* });
* writer.write('{"hello": ');
* writer.write('"world"}');
* await writer.end();
*
* @example
* // Optimized large file upload
* const writer = file.writer({
* partSize: 10 * 1024 * 1024, // 10MB parts
* queueSize: 4, // Upload 4 parts in parallel
* retry: 3 // Retry failed parts
* });
*
* // Write large chunks of data efficiently
* for (const chunk of largeDataChunks) {
* await writer.write(chunk);
* }
* await writer.end();
*
* @example
* // Error handling
* const writer = file.writer();
* try {
* await writer.write(data);
* await writer.end();
* } catch (err) {
* console.error('Upload failed:', err);
* // Writer will automatically abort multipart upload on error
* }
*/
writer(options?: S3Options): NetworkSink;
/**
* Gets a readable stream of the file's content.
* Useful for processing large files without loading them entirely into memory.
*
* @returns A ReadableStream for the file content
*
* @example
* // Basic streaming read
* const stream = file.stream();
* for await (const chunk of stream) {
* console.log('Received chunk:', chunk);
* }
*
* @example
* // Piping to response
* const stream = file.stream();
* return new Response(stream, {
* headers: { 'Content-Type': file.type }
* });
*
* @example
* // Processing large files
* const stream = file.stream();
* const textDecoder = new TextDecoder();
* for await (const chunk of stream) {
* const text = textDecoder.decode(chunk);
* // Process text chunk by chunk
* }
*/
readonly readable: ReadableStream;
stream(): ReadableStream;
/**
* The name or path of the file in the bucket.
*
* @example
* const file = s3("folder/image.jpg");
* console.log(file.name); // "folder/image.jpg"
*/
readonly name?: string;
/**
* The bucket name containing the file.
*
* @example
* const file = s3("s3://my-bucket/file.txt");
* console.log(file.bucket); // "my-bucket"
*/
readonly bucket?: string;
/**
* Checks if the file exists in S3.
* Uses HTTP HEAD request to efficiently check existence without downloading.
*
* @returns Promise resolving to true if file exists, false otherwise
*
* @example
* // Basic existence check
* if (await file.exists()) {
* console.log("File exists in S3");
* }
*
* @example
* // With error handling
* try {
* const exists = await file.exists();
* if (!exists) {
* console.log("File not found");
* }
* } catch (err) {
* console.error("Error checking file:", err);
* }
*/
exists(): Promise<boolean>;
/**
* Uploads data to S3.
* Supports various input types and automatically handles large files.
*
* @param data - The data to upload
* @param options - Upload configuration options
* @returns Promise resolving to number of bytes written
*
* @example
* // Writing string data
* await file.write("Hello World", {
* type: "text/plain"
* });
*
* @example
* // Writing JSON
* const data = { hello: "world" };
* await file.write(JSON.stringify(data), {
* type: "application/json"
* });
*
* @example
* // Writing from Response
* const response = await fetch("https://example.com/data");
* await file.write(response);
*
* @example
* // Writing with ACL
* await file.write(data, {
* acl: "public-read",
* type: "application/octet-stream"
* });
*/
write(
data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer | Request | Response | BunFile | S3File | Blob,
options?: S3Options,
): Promise<number>;
/**
* Generates a presigned URL for the file.
* Allows temporary access to the file without exposing credentials.
*
* @param options - Configuration for the presigned URL
* @returns Presigned URL string
*
* @example
* // Basic download URL
* const url = file.presign({
* expiresIn: 3600 // 1 hour
* });
*
* @example
* // Upload URL with specific content type
* const uploadUrl = file.presign({
* method: "PUT",
* expiresIn: 3600,
* type: "image/jpeg",
* acl: "public-read"
* });
*
* @example
* // URL with custom permissions
* const url = file.presign({
* method: "GET",
* expiresIn: 7 * 24 * 60 * 60, // 7 days
* acl: "public-read"
* });
*/
presign(options?: S3FilePresignOptions): string;
/**
* Deletes the file from S3.
*
* @returns Promise that resolves when deletion is complete
*
* @example
* // Basic deletion
* await file.delete();
*
* @example
* // With error handling
* try {
* await file.delete();
* console.log("File deleted successfully");
* } catch (err) {
* console.error("Failed to delete file:", err);
* }
*/
delete(): Promise<void>;
/**
* Alias for delete() method.
* Provided for compatibility with Node.js fs API naming.
*
* @example
* await file.unlink();
*/
unlink: S3File["delete"];
}
/**
* A configured S3 bucket instance for managing files.
* The instance is callable to create S3File instances and provides methods
* for common operations.
*
* @example
* // Basic bucket setup
* const bucket = new S3({
* bucket: "my-bucket",
* accessKeyId: "key",
* secretAccessKey: "secret"
* });
*
* // Get file instance
* const file = bucket("image.jpg");
*
* // Common operations
* await bucket.write("data.json", JSON.stringify({hello: "world"}));
* const url = bucket.presign("file.pdf");
* await bucket.unlink("old.txt");
*/
type S3Bucket = {
/**
* Creates an S3File instance for the given path.
*
* @example
* const file = bucket("image.jpg");
* await file.write(imageData);
* const configFile = bucket("config.json", {
* type: "application/json",
* acl: "private"
* });
*/
(path: string, options?: S3Options): S3File;
/**
* Writes data directly to a path in the bucket.
* Supports strings, buffers, streams, and web API types.
*
* @example
* // Write string
* await bucket.write("hello.txt", "Hello World");
*
* // Write JSON with type
* await bucket.write(
* "data.json",
* JSON.stringify({hello: "world"}),
* {type: "application/json"}
* );
*
* // Write from fetch
* const res = await fetch("https://example.com/data");
* await bucket.write("data.bin", res);
*
* // Write with ACL
* await bucket.write("public.html", html, {
* acl: "public-read",
* type: "text/html"
* });
*/
write(
path: string,
data:
| string
| ArrayBufferView
| ArrayBuffer
| SharedArrayBuffer
| Request
| Response
| BunFile
| S3File
| Blob
| File,
options?: S3Options,
): Promise<number>;
/**
* Generate a presigned URL for temporary access to a file.
* Useful for generating upload/download URLs without exposing credentials.
*
* @example
* // Download URL
* const downloadUrl = bucket.presign("file.pdf", {
* expiresIn: 3600 // 1 hour
* });
*
* // Upload URL
* const uploadUrl = bucket.presign("uploads/image.jpg", {
* method: "PUT",
* expiresIn: 3600,
* type: "image/jpeg",
* acl: "public-read"
* });
*
* // Long-lived public URL
* const publicUrl = bucket.presign("public/doc.pdf", {
* expiresIn: 7 * 24 * 60 * 60, // 7 days
* acl: "public-read"
* });
*/
presign(path: string, options?: S3FilePresignOptions): string;
/**
* Delete a file from the bucket.
*
* @example
* // Simple delete
* await bucket.unlink("old-file.txt");
*
* // With error handling
* try {
* await bucket.unlink("file.dat");
* console.log("File deleted");
* } catch (err) {
* console.error("Delete failed:", err);
* }
*/
unlink(path: string, options?: S3Options): Promise<void>;
/**
* Get the size of a file in bytes.
* Uses HEAD request to efficiently get size.
*
* @example
* // Get size
* const bytes = await bucket.size("video.mp4");
* console.log(`Size: ${bytes} bytes`);
*
* // Check if file is large
* if (await bucket.size("data.zip") > 100 * 1024 * 1024) {
* console.log("File is larger than 100MB");
* }
*/
size(path: string, options?: S3Options): Promise<number>;
/**
* Check if a file exists in the bucket.
* Uses HEAD request to check existence.
*
* @example
* // Check existence
* if (await bucket.exists("config.json")) {
* const file = bucket("config.json");
* const config = await file.json();
* }
*
* // With error handling
* try {
* if (!await bucket.exists("required.txt")) {
* throw new Error("Required file missing");
* }
* } catch (err) {
* console.error("Check failed:", err);
* }
*/
exists(path: string, options?: S3Options): Promise<boolean>;
};
/**
* This lets you use macros as regular imports
@@ -1544,7 +2350,7 @@ declare module "bun" {
define?: Record<string, string>;
// origin?: string; // e.g. http://mydomain.com
loader?: { [k in string]: Loader };
sourcemap?: "none" | "linked" | "inline" | "external" | "linked"; // default: "none", true -> "inline"
sourcemap?: "none" | "linked" | "inline" | "external" | "linked" | boolean; // default: "none", true -> "inline"
/**
* package.json `exports` conditions used when resolving imports
*
@@ -1630,14 +2436,41 @@ declare module "bun" {
/**
* **Experimental**
*
* Enable CSS support.
* Bundle CSS files.
*
* This will be enabled by default in Bun v1.2.
*
* @default false (until Bunv 1.2)
*/
experimentalCss?: boolean;
/**
* **Experimental**
*
* Bundle JavaScript & CSS files from HTML files. JavaScript & CSS files
* from non-external <script> or <link> tags will be bundled.
*
* Underneath, this works similarly to HTMLRewriter.
*
* This will be enabled by default in Bun v1.2.
*
* @default false (until Bun v1.2)
*/
html?: boolean;
/**
* Drop function calls to matching property accesses.
*/
drop?: string[];
/**
* When set to `true`, the returned promise rejects with an AggregateError when a build failure happens.
* When set to `false`, the `success` property of the returned object will be `false` when a build failure happens.
*
* This defaults to `false` in Bun 1.1 and will change to `true` in Bun 1.2
* as most usage of `Bun.build` forgets to check for errors.
*/
throw?: boolean;
}
namespace Password {
@@ -2151,6 +2984,8 @@ declare module "bun" {
* });
*/
data: T;
getBufferedAmount(): number;
}
/**
@@ -2985,7 +3820,7 @@ declare module "bun" {
* "Hello, world!"
* );
* ```
* @param path The path to the file (lazily loaded)
* @param path The path to the file (lazily loaded) if the path starts with `s3://` it will behave like {@link S3File}
*/
// tslint:disable-next-line:unified-signatures
function file(path: string | URL, options?: BlobPropertyBag): BunFile;
@@ -3011,7 +3846,7 @@ declare module "bun" {
* console.log(file.type); // "application/json"
* ```
*
* @param path The path to the file as a byte buffer (the buffer is copied)
* @param path The path to the file as a byte buffer (the buffer is copied) if the path starts with `s3://` it will behave like {@link S3File}
*/
// tslint:disable-next-line:unified-signatures
function file(path: ArrayBufferLike | Uint8Array, options?: BlobPropertyBag): BunFile;
@@ -3339,9 +4174,24 @@ declare module "bun" {
function nanoseconds(): number;
/**
* Generate a heap snapshot for seeing where the heap is being used
* Show precise statistics about memory usage of your application
*
* Generate a heap snapshot in JavaScriptCore's format that can be viewed with `bun --inspect` or Safari's Web Inspector
*/
function generateHeapSnapshot(): HeapSnapshot;
function generateHeapSnapshot(format?: "jsc"): HeapSnapshot;
/**
* Show precise statistics about memory usage of your application
*
* Generate a V8 Heap Snapshot that can be used with Chrome DevTools & Visual Studio Code
*
* This is a JSON string that can be saved to a file.
* ```ts
* const snapshot = Bun.generateHeapSnapshot("v8");
* await Bun.write("heap.heapsnapshot", snapshot);
* ```
*/
function generateHeapSnapshot(format: "v8"): string;
/**
* The next time JavaScriptCore is idle, clear unused memory and attempt to reduce the heap size.
@@ -3806,7 +4656,7 @@ declare module "bun" {
| "browser";
/** https://bun.sh/docs/bundler/loaders */
type Loader = "js" | "jsx" | "ts" | "tsx" | "json" | "toml" | "file" | "napi" | "wasm" | "text" | "css";
type Loader = "js" | "jsx" | "ts" | "tsx" | "json" | "toml" | "file" | "napi" | "wasm" | "text" | "css" | "html";
interface PluginConstraints {
/**

View File

@@ -135,6 +135,7 @@ type _Body = typeof globalThis extends { onerror: any }
readonly text: () => Promise<string>;
};
import { S3FileOptions } from "bun";
import type { TextDecoder as NodeTextDecoder, TextEncoder as NodeTextEncoder } from "util";
import type { MessagePort } from "worker_threads";
import type { WebSocket as _WebSocket } from "ws";
@@ -815,6 +816,11 @@ declare global {
rejectUnauthorized?: boolean | undefined; // Defaults to true
checkServerIdentity?: any; // TODO: change `any` to `checkServerIdentity`
};
/**
* Override the default S3 options
*/
s3?: S3FileOptions;
}
/**

View File

@@ -26,6 +26,8 @@ declare namespace HTMLRewriterTypes {
readonly name: string | null;
readonly publicId: string | null;
readonly systemId: string | null;
readonly removed: boolean;
remove(): Doctype;
}
interface DocumentEnd {

View File

@@ -214,4 +214,16 @@ declare module "bun:jsc" {
* Run JavaScriptCore's sampling profiler
*/
function startSamplingProfiler(optionalDirectory?: string): void;
/**
* Non-recursively estimate the memory usage of an object, excluding the memory usage of
* properties or other objects it references. For more accurate per-object
* memory usage, use {@link Bun.generateHeapSnapshot}.
*
* This is a best-effort estimate. It may not be 100% accurate. When it's
* wrong, it may mean the memory is non-contiguous (such as a large array).
*
* Passing a primitive type that isn't heap allocated returns 0.
*/
function estimateShallowMemoryUsageOf(value: object | CallableFunction | bigint | symbol | string): number;
}

View File

@@ -304,13 +304,21 @@ static LIBUS_SOCKET_DESCRIPTOR win32_set_nonblocking(LIBUS_SOCKET_DESCRIPTOR fd)
}
LIBUS_SOCKET_DESCRIPTOR bsd_set_nonblocking(LIBUS_SOCKET_DESCRIPTOR fd) {
#ifdef _WIN32
/* Libuv will set windows sockets as non-blocking */
#elif defined(__APPLE__)
fcntl(fd, F_SETFL, fcntl(fd, F_GETFL, 0) | O_NONBLOCK | O_CLOEXEC);
#else
fcntl(fd, F_SETFL, fcntl(fd, F_GETFL, 0) | O_NONBLOCK);
/* Libuv will set windows sockets as non-blocking */
#ifndef _WIN32
if (LIKELY(fd != LIBUS_SOCKET_ERROR)) {
int flags = fcntl(fd, F_GETFL, 0);
// F_GETFL supports O_NONBLCOK
fcntl(fd, F_SETFL, flags | O_NONBLOCK);
flags = fcntl(fd, F_GETFD, 0);
// F_GETFD supports FD_CLOEXEC
fcntl(fd, F_SETFD, flags | FD_CLOEXEC);
}
#endif
return fd;
}
@@ -395,12 +403,27 @@ void bsd_socket_flush(LIBUS_SOCKET_DESCRIPTOR fd) {
}
LIBUS_SOCKET_DESCRIPTOR bsd_create_socket(int domain, int type, int protocol) {
LIBUS_SOCKET_DESCRIPTOR created_fd;
#if defined(SOCK_CLOEXEC) && defined(SOCK_NONBLOCK)
int flags = SOCK_CLOEXEC | SOCK_NONBLOCK;
LIBUS_SOCKET_DESCRIPTOR created_fd = socket(domain, type | flags, protocol);
const int flags = SOCK_CLOEXEC | SOCK_NONBLOCK;
do {
created_fd = socket(domain, type | flags, protocol);
} while (IS_EINTR(created_fd));
if (UNLIKELY(created_fd == -1)) {
return LIBUS_SOCKET_ERROR;
}
return apple_no_sigpipe(created_fd);
#else
LIBUS_SOCKET_DESCRIPTOR created_fd = socket(domain, type, protocol);
do {
created_fd = socket(domain, type, protocol);
} while (IS_EINTR(created_fd));
if (UNLIKELY(created_fd == -1)) {
return LIBUS_SOCKET_ERROR;
}
return bsd_set_nonblocking(apple_no_sigpipe(created_fd));
#endif
}

View File

@@ -35,6 +35,9 @@ us_ssl_ctx_get_X509_without_callback_from(struct us_cert_string_t content) {
OPENSSL_PUT_ERROR(SSL, ERR_R_PEM_LIB);
goto end;
}
// NOTE: PEM_read_bio_X509 allocates, so input BIO must be freed.
BIO_free(in);
return x;
end:
X509_free(x);
@@ -140,4 +143,4 @@ extern "C" X509_STORE *us_get_default_ca_store() {
}
return store;
}
}

View File

@@ -438,6 +438,10 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int eof, in
}
if(eof && s) {
if (UNLIKELY(us_socket_is_closed(0, s))) {
// Do not call on_end after the socket has been closed
return;
}
if (us_socket_is_shut_down(0, s)) {
/* We got FIN back after sending it */
s = us_socket_close(0, s, LIBUS_SOCKET_CLOSE_CODE_CLEAN_SHUTDOWN, NULL);

View File

@@ -73,6 +73,10 @@ public:
DROPPED
};
size_t memoryCost() {
return getBufferedAmount() + sizeof(WebSocket);
}
/* Sending fragmented messages puts a bit of effort on the user; you must not interleave regular sends
* with fragmented sends and you must sendFirstFragment, [sendFragment], then finally sendLastFragment. */
SendStatus sendFirstFragment(std::string_view message, OpCode opCode = OpCode::BINARY, bool compress = false) {

View File

@@ -31,7 +31,7 @@ At its core is the _Bun runtime_, a fast JavaScript runtime designed as a drop-i
When running programs with Bun from a Visual Studio Code terminal, Bun will connect to the extension and report errors as they happen, at the exact location they happened. We recommend using this feature with `bun --watch` so you can see errors on every save.
![Error messages example](./error-messages.gif)
![Error messages example](https://raw.githubusercontent.com/oven-sh/bun/refs/heads/main/packages/bun-vscode/error-messages.gif)
<div align="center">
<sup>In the example above VSCode is saving on every keypress. Under normal configuration you'd only see errors on every save.</sup>

View File

@@ -0,0 +1,6 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "JSON schema for bun.lock files.",
"allowTrailingCommas": true,
"type": "object"
}

View File

@@ -0,0 +1,341 @@
{
"lockfileVersion": 0,
"workspaces": {
"": {
"dependencies": {
"axios": "^1.7.7",
"elysia": "^0.6.3",
"express": "^4.18.2",
"mime": "^3.0.0",
"mime-db": "^1.52.0",
"react": "^0.0.0-experimental-380f5d67-20241113",
"react-dom": "^0.0.0-experimental-380f5d67-20241113",
"react-refresh": "^0.0.0-experimental-380f5d67-20241113",
"react-server-dom-bun": "^0.0.0-experimental-603e6108-20241029",
"react-server-dom-webpack": "^0.0.0-experimental-380f5d67-20241113",
},
"devDependencies": {
"@types/bun": "latest",
},
"peerDependencies": {
"typescript": "^5.0.0",
},
},
},
"trustedDependencies": [
"mime",
],
"packages": {
"@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.5", "", { "dependencies": { "@jridgewell/set-array": "^1.2.1", "@jridgewell/sourcemap-codec": "^1.4.10", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg=="],
"@jridgewell/resolve-uri": ["@jridgewell/resolve-uri@3.1.2", "", {}, "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw=="],
"@jridgewell/set-array": ["@jridgewell/set-array@1.2.1", "", {}, "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A=="],
"@jridgewell/source-map": ["@jridgewell/source-map@0.3.6", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.25" } }, "sha512-1ZJTZebgqllO79ue2bm3rIGud/bOe0pP5BjSRCRxxYkEZS8STV7zN84UBbiYu7jy+eCKSnVIUgoWWE/tt+shMQ=="],
"@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.5.0", "", {}, "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ=="],
"@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.25", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ=="],
"@sinclair/typebox": ["@sinclair/typebox@0.30.4", "", {}, "sha512-wFuuDR+O1OAE2GL0q68h1Ty00RE6Ihcixr55A6TU5RCvOUHnwJw9LGuDVg9NxDiAp7m/YJpa+UaOuLAz0ziyOQ=="],
"@types/bun": ["@types/bun@1.1.13", "", { "dependencies": { "bun-types": "1.1.34" } }, "sha512-KmQxSBgVWCl6RSuerlLGZlIWfdxkKqat0nxN61+qu4y1KDn0Ll3j7v1Pl8GnaL3a/U6GGWVTJh75ap62kR1E8Q=="],
"@types/eslint": ["@types/eslint@9.6.1", "", { "dependencies": { "@types/estree": "*", "@types/json-schema": "*" } }, "sha512-FXx2pKgId/WyYo2jXw63kk7/+TY7u7AziEJxJAnSFzHlqTAS3Ync6SvgYAN/k4/PQpnnVuzoMuVnByKK2qp0ag=="],
"@types/eslint-scope": ["@types/eslint-scope@3.7.7", "", { "dependencies": { "@types/eslint": "*", "@types/estree": "*" } }, "sha512-MzMFlSLBqNF2gcHWO0G1vP/YQyfvrxZ0bF+u7mzUdZ1/xK4A4sru+nraZz5i3iEIk1l1uyicaDVTB4QbbEkAYg=="],
"@types/estree": ["@types/estree@1.0.6", "", {}, "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw=="],
"@types/json-schema": ["@types/json-schema@7.0.15", "", {}, "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA=="],
"@types/node": ["@types/node@20.12.14", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-scnD59RpYD91xngrQQLGkE+6UrHUPzeKZWhhjBSa3HSkwjbQc38+q3RoIVEwxQGRw3M+j5hpNAM+lgV3cVormg=="],
"@types/ws": ["@types/ws@8.5.13", "", { "dependencies": { "@types/node": "*" } }, "sha512-osM/gWBTPKgHV8XkTunnegTRIsvF6owmf5w+JtAfOw472dptdm0dlGv4xCt6GwQRcC2XVOvvRE/0bAoQcL2QkA=="],
"@webassemblyjs/ast": ["@webassemblyjs/ast@1.14.1", "", { "dependencies": { "@webassemblyjs/helper-numbers": "1.13.2", "@webassemblyjs/helper-wasm-bytecode": "1.13.2" } }, "sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ=="],
"@webassemblyjs/floating-point-hex-parser": ["@webassemblyjs/floating-point-hex-parser@1.13.2", "", {}, "sha512-6oXyTOzbKxGH4steLbLNOu71Oj+C8Lg34n6CqRvqfS2O71BxY6ByfMDRhBytzknj9yGUPVJ1qIKhRlAwO1AovA=="],
"@webassemblyjs/helper-api-error": ["@webassemblyjs/helper-api-error@1.13.2", "", {}, "sha512-U56GMYxy4ZQCbDZd6JuvvNV/WFildOjsaWD3Tzzvmw/mas3cXzRJPMjP83JqEsgSbyrmaGjBfDtV7KDXV9UzFQ=="],
"@webassemblyjs/helper-buffer": ["@webassemblyjs/helper-buffer@1.14.1", "", {}, "sha512-jyH7wtcHiKssDtFPRB+iQdxlDf96m0E39yb0k5uJVhFGleZFoNw1c4aeIcVUPPbXUVJ94wwnMOAqUHyzoEPVMA=="],
"@webassemblyjs/helper-numbers": ["@webassemblyjs/helper-numbers@1.13.2", "", { "dependencies": { "@webassemblyjs/floating-point-hex-parser": "1.13.2", "@webassemblyjs/helper-api-error": "1.13.2", "@xtuc/long": "4.2.2" } }, "sha512-FE8aCmS5Q6eQYcV3gI35O4J789wlQA+7JrqTTpJqn5emA4U2hvwJmvFRC0HODS+3Ye6WioDklgd6scJ3+PLnEA=="],
"@webassemblyjs/helper-wasm-bytecode": ["@webassemblyjs/helper-wasm-bytecode@1.13.2", "", {}, "sha512-3QbLKy93F0EAIXLh0ogEVR6rOubA9AoZ+WRYhNbFyuB70j3dRdwH9g+qXhLAO0kiYGlg3TxDV+I4rQTr/YNXkA=="],
"@webassemblyjs/helper-wasm-section": ["@webassemblyjs/helper-wasm-section@1.14.1", "", { "dependencies": { "@webassemblyjs/ast": "1.14.1", "@webassemblyjs/helper-buffer": "1.14.1", "@webassemblyjs/helper-wasm-bytecode": "1.13.2", "@webassemblyjs/wasm-gen": "1.14.1" } }, "sha512-ds5mXEqTJ6oxRoqjhWDU83OgzAYjwsCV8Lo/N+oRsNDmx/ZDpqalmrtgOMkHwxsG0iI//3BwWAErYRHtgn0dZw=="],
"@webassemblyjs/ieee754": ["@webassemblyjs/ieee754@1.13.2", "", { "dependencies": { "@xtuc/ieee754": "^1.2.0" } }, "sha512-4LtOzh58S/5lX4ITKxnAK2USuNEvpdVV9AlgGQb8rJDHaLeHciwG4zlGr0j/SNWlr7x3vO1lDEsuePvtcDNCkw=="],
"@webassemblyjs/leb128": ["@webassemblyjs/leb128@1.13.2", "", { "dependencies": { "@xtuc/long": "4.2.2" } }, "sha512-Lde1oNoIdzVzdkNEAWZ1dZ5orIbff80YPdHx20mrHwHrVNNTjNr8E3xz9BdpcGqRQbAEa+fkrCb+fRFTl/6sQw=="],
"@webassemblyjs/utf8": ["@webassemblyjs/utf8@1.13.2", "", {}, "sha512-3NQWGjKTASY1xV5m7Hr0iPeXD9+RDobLll3T9d2AO+g3my8xy5peVyjSag4I50mR1bBSN/Ct12lo+R9tJk0NZQ=="],
"@webassemblyjs/wasm-edit": ["@webassemblyjs/wasm-edit@1.14.1", "", { "dependencies": { "@webassemblyjs/ast": "1.14.1", "@webassemblyjs/helper-buffer": "1.14.1", "@webassemblyjs/helper-wasm-bytecode": "1.13.2", "@webassemblyjs/helper-wasm-section": "1.14.1", "@webassemblyjs/wasm-gen": "1.14.1", "@webassemblyjs/wasm-opt": "1.14.1", "@webassemblyjs/wasm-parser": "1.14.1", "@webassemblyjs/wast-printer": "1.14.1" } }, "sha512-RNJUIQH/J8iA/1NzlE4N7KtyZNHi3w7at7hDjvRNm5rcUXa00z1vRz3glZoULfJ5mpvYhLybmVcwcjGrC1pRrQ=="],
"@webassemblyjs/wasm-gen": ["@webassemblyjs/wasm-gen@1.14.1", "", { "dependencies": { "@webassemblyjs/ast": "1.14.1", "@webassemblyjs/helper-wasm-bytecode": "1.13.2", "@webassemblyjs/ieee754": "1.13.2", "@webassemblyjs/leb128": "1.13.2", "@webassemblyjs/utf8": "1.13.2" } }, "sha512-AmomSIjP8ZbfGQhumkNvgC33AY7qtMCXnN6bL2u2Js4gVCg8fp735aEiMSBbDR7UQIj90n4wKAFUSEd0QN2Ukg=="],
"@webassemblyjs/wasm-opt": ["@webassemblyjs/wasm-opt@1.14.1", "", { "dependencies": { "@webassemblyjs/ast": "1.14.1", "@webassemblyjs/helper-buffer": "1.14.1", "@webassemblyjs/wasm-gen": "1.14.1", "@webassemblyjs/wasm-parser": "1.14.1" } }, "sha512-PTcKLUNvBqnY2U6E5bdOQcSM+oVP/PmrDY9NzowJjislEjwP/C4an2303MCVS2Mg9d3AJpIGdUFIQQWbPds0Sw=="],
"@webassemblyjs/wasm-parser": ["@webassemblyjs/wasm-parser@1.14.1", "", { "dependencies": { "@webassemblyjs/ast": "1.14.1", "@webassemblyjs/helper-api-error": "1.13.2", "@webassemblyjs/helper-wasm-bytecode": "1.13.2", "@webassemblyjs/ieee754": "1.13.2", "@webassemblyjs/leb128": "1.13.2", "@webassemblyjs/utf8": "1.13.2" } }, "sha512-JLBl+KZ0R5qB7mCnud/yyX08jWFw5MsoalJ1pQ4EdFlgj9VdXKGuENGsiCIjegI1W7p91rUlcB/LB5yRJKNTcQ=="],
"@webassemblyjs/wast-printer": ["@webassemblyjs/wast-printer@1.14.1", "", { "dependencies": { "@webassemblyjs/ast": "1.14.1", "@xtuc/long": "4.2.2" } }, "sha512-kPSSXE6De1XOR820C90RIo2ogvZG+c3KiHzqUoO/F34Y2shGzesfqv7o57xrxovZJH/MetF5UjroJ/R/3isoiw=="],
"@xtuc/ieee754": ["@xtuc/ieee754@1.2.0", "", {}, "sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA=="],
"@xtuc/long": ["@xtuc/long@4.2.2", "", {}, "sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ=="],
"accepts": ["accepts@1.3.8", "", { "dependencies": { "mime-types": "~2.1.34", "negotiator": "0.6.3" } }, "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw=="],
"acorn": ["acorn@8.14.0", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA=="],
"acorn-loose": ["acorn-loose@8.4.0", "", { "dependencies": { "acorn": "^8.11.0" } }, "sha512-M0EUka6rb+QC4l9Z3T0nJEzNOO7JcoJlYMrBlyBCiFSXRyxjLKayd4TbQs2FDRWQU1h9FR7QVNHt+PEaoNL5rQ=="],
"ajv": ["ajv@6.12.6", "", { "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", "json-schema-traverse": "^0.4.1", "uri-js": "^4.2.2" } }, "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g=="],
"ajv-keywords": ["ajv-keywords@3.5.2", "", { "peerDependencies": { "ajv": "^6.9.1" } }, "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ=="],
"array-flatten": ["array-flatten@1.1.1", "", {}, "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg=="],
"asynckit": ["asynckit@0.4.0", "", {}, "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="],
"axios": ["axios@1.7.7", "", { "dependencies": { "follow-redirects": "^1.15.6", "form-data": "^4.0.0", "proxy-from-env": "^1.1.0" } }, "sha512-S4kL7XrjgBmvdGut0sN3yJxqYzrDOnivkBiN0OFs6hLiUam3UPvswUo0kqGyhqUZGEOytHyumEdXsAkgCOUf3Q=="],
"body-parser": ["body-parser@1.20.1", "", { "dependencies": { "bytes": "3.1.2", "content-type": "~1.0.4", "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "http-errors": "2.0.0", "iconv-lite": "0.4.24", "on-finished": "2.4.1", "qs": "6.11.0", "raw-body": "2.5.1", "type-is": "~1.6.18", "unpipe": "1.0.0" } }, "sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw=="],
"browserslist": ["browserslist@4.24.2", "", { "dependencies": { "caniuse-lite": "^1.0.30001669", "electron-to-chromium": "^1.5.41", "node-releases": "^2.0.18", "update-browserslist-db": "^1.1.1" }, "bin": { "browserslist": "cli.js" } }, "sha512-ZIc+Q62revdMcqC6aChtW4jz3My3klmCO1fEmINZY/8J3EpBg5/A/D0AKmBveUh6pgoeycoMkVMko84tuYS+Gg=="],
"buffer-from": ["buffer-from@1.1.2", "", {}, "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ=="],
"bun-types": ["bun-types@1.1.34", "", { "dependencies": { "@types/node": "~20.12.8", "@types/ws": "~8.5.10" } }, "sha512-br5QygTEL/TwB4uQOb96Ky22j4Gq2WxWH/8Oqv20fk5HagwKXo/akB+LiYgSfzexCt6kkcUaVm+bKiPl71xPvw=="],
"bytes": ["bytes@3.1.2", "", {}, "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg=="],
"call-bind": ["call-bind@1.0.2", "", { "dependencies": { "function-bind": "^1.1.1", "get-intrinsic": "^1.0.2" } }, "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA=="],
"caniuse-lite": ["caniuse-lite@1.0.30001680", "", {}, "sha512-rPQy70G6AGUMnbwS1z6Xg+RkHYPAi18ihs47GH0jcxIG7wArmPgY3XbS2sRdBbxJljp3thdT8BIqv9ccCypiPA=="],
"chrome-trace-event": ["chrome-trace-event@1.0.4", "", {}, "sha512-rNjApaLzuwaOTjCiT8lSDdGN1APCiqkChLMJxJPWLunPAt5fy8xgU9/jNOchV84wfIxrA0lRQB7oCT8jrn/wrQ=="],
"combined-stream": ["combined-stream@1.0.8", "", { "dependencies": { "delayed-stream": "~1.0.0" } }, "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg=="],
"commander": ["commander@2.20.3", "", {}, "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ=="],
"content-disposition": ["content-disposition@0.5.4", "", { "dependencies": { "safe-buffer": "5.2.1" } }, "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ=="],
"content-type": ["content-type@1.0.5", "", {}, "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA=="],
"cookie": ["cookie@0.5.0", "", {}, "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw=="],
"cookie-signature": ["cookie-signature@1.0.6", "", {}, "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ=="],
"debug": ["debug@2.6.9", "", { "dependencies": { "ms": "2.0.0" } }, "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA=="],
"delayed-stream": ["delayed-stream@1.0.0", "", {}, "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="],
"depd": ["depd@2.0.0", "", {}, "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw=="],
"destroy": ["destroy@1.2.0", "", {}, "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg=="],
"ee-first": ["ee-first@1.1.1", "", {}, "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow=="],
"electron-to-chromium": ["electron-to-chromium@1.5.58", "", {}, "sha512-al2l4r+24ZFL7WzyPTlyD0fC33LLzvxqLCwurtBibVPghRGO9hSTl+tis8t1kD7biPiH/en4U0I7o/nQbYeoVA=="],
"elysia": ["elysia@0.6.3", "", { "dependencies": { "@sinclair/typebox": "^0.30.4", "fast-querystring": "^1.1.2", "memoirist": "0.1.4", "openapi-types": "^12.1.3" }, "peerDependencies": { "typescript": ">= 5.0.0" }, "optionalPeers": ["typescript"] }, "sha512-LhdH476fotAQuEUpnLdn8fAzwo3ZmwHVrYzQhujo+x+OpmMXGMJXT7L7/Ct+b5wwR2txP5xCxI1A0suxhRxgIQ=="],
"encodeurl": ["encodeurl@1.0.2", "", {}, "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w=="],
"enhanced-resolve": ["enhanced-resolve@5.17.1", "", { "dependencies": { "graceful-fs": "^4.2.4", "tapable": "^2.2.0" } }, "sha512-LMHl3dXhTcfv8gM4kEzIUeTQ+7fpdA0l2tUf34BddXPkz2A5xJ5L/Pchd5BL6rdccM9QGvu0sWZzK1Z1t4wwyg=="],
"es-module-lexer": ["es-module-lexer@1.5.4", "", {}, "sha512-MVNK56NiMrOwitFB7cqDwq0CQutbw+0BvLshJSse0MUNU+y1FC3bUS/AQg7oUng+/wKrrki7JfmwtVHkVfPLlw=="],
"escalade": ["escalade@3.2.0", "", {}, "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA=="],
"escape-html": ["escape-html@1.0.3", "", {}, "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow=="],
"eslint-scope": ["eslint-scope@5.1.1", "", { "dependencies": { "esrecurse": "^4.3.0", "estraverse": "^4.1.1" } }, "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw=="],
"esrecurse": ["esrecurse@4.3.0", "", { "dependencies": { "estraverse": "^5.2.0" } }, "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag=="],
"estraverse": ["estraverse@4.3.0", "", {}, "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw=="],
"etag": ["etag@1.8.1", "", {}, "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg=="],
"events": ["events@3.3.0", "", {}, "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q=="],
"express": ["express@4.18.2", "", { "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", "body-parser": "1.20.1", "content-disposition": "0.5.4", "content-type": "~1.0.4", "cookie": "0.5.0", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "etag": "~1.8.1", "finalhandler": "1.2.0", "fresh": "0.5.2", "http-errors": "2.0.0", "merge-descriptors": "1.0.1", "methods": "~1.1.2", "on-finished": "2.4.1", "parseurl": "~1.3.3", "path-to-regexp": "0.1.7", "proxy-addr": "~2.0.7", "qs": "6.11.0", "range-parser": "~1.2.1", "safe-buffer": "5.2.1", "send": "0.18.0", "serve-static": "1.15.0", "setprototypeof": "1.2.0", "statuses": "2.0.1", "type-is": "~1.6.18", "utils-merge": "1.0.1", "vary": "~1.1.2" } }, "sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ=="],
"fast-decode-uri-component": ["fast-decode-uri-component@1.0.1", "", {}, "sha512-WKgKWg5eUxvRZGwW8FvfbaH7AXSh2cL+3j5fMGzUMCxWBJ3dV3a7Wz8y2f/uQ0e3B6WmodD3oS54jTQ9HVTIIg=="],
"fast-deep-equal": ["fast-deep-equal@3.1.3", "", {}, "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="],
"fast-json-stable-stringify": ["fast-json-stable-stringify@2.1.0", "", {}, "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw=="],
"fast-querystring": ["fast-querystring@1.1.2", "", { "dependencies": { "fast-decode-uri-component": "^1.0.1" } }, "sha512-g6KuKWmFXc0fID8WWH0jit4g0AGBoJhCkJMb1RmbsSEUNvQ+ZC8D6CUZ+GtF8nMzSPXnhiePyyqqipzNNEnHjg=="],
"finalhandler": ["finalhandler@1.2.0", "", { "dependencies": { "debug": "2.6.9", "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "on-finished": "2.4.1", "parseurl": "~1.3.3", "statuses": "2.0.1", "unpipe": "~1.0.0" } }, "sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg=="],
"follow-redirects": ["follow-redirects@1.15.9", "", {}, "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ=="],
"form-data": ["form-data@4.0.1", "", { "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", "mime-types": "^2.1.12" } }, "sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw=="],
"forwarded": ["forwarded@0.2.0", "", {}, "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow=="],
"fresh": ["fresh@0.5.2", "", {}, "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q=="],
"function-bind": ["function-bind@1.1.1", "", {}, "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A=="],
"get-intrinsic": ["get-intrinsic@1.2.1", "", { "dependencies": { "function-bind": "^1.1.1", "has": "^1.0.3", "has-proto": "^1.0.1", "has-symbols": "^1.0.3" } }, "sha512-2DcsyfABl+gVHEfCOaTrWgyt+tb6MSEGmKq+kI5HwLbIYgjgmMcV8KQ41uaKz1xxUcn9tJtgFbQUEVcEbd0FYw=="],
"glob-to-regexp": ["glob-to-regexp@0.4.1", "", {}, "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw=="],
"graceful-fs": ["graceful-fs@4.2.11", "", {}, "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="],
"has": ["has@1.0.3", "", { "dependencies": { "function-bind": "^1.1.1" } }, "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw=="],
"has-flag": ["has-flag@4.0.0", "", {}, "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="],
"has-proto": ["has-proto@1.0.1", "", {}, "sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg=="],
"has-symbols": ["has-symbols@1.0.3", "", {}, "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A=="],
"http-errors": ["http-errors@2.0.0", "", { "dependencies": { "depd": "2.0.0", "inherits": "2.0.4", "setprototypeof": "1.2.0", "statuses": "2.0.1", "toidentifier": "1.0.1" } }, "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ=="],
"iconv-lite": ["iconv-lite@0.4.24", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3" } }, "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA=="],
"inherits": ["inherits@2.0.4", "", {}, "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="],
"ipaddr.js": ["ipaddr.js@1.9.1", "", {}, "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g=="],
"jest-worker": ["jest-worker@27.5.1", "", { "dependencies": { "@types/node": "*", "merge-stream": "^2.0.0", "supports-color": "^8.0.0" } }, "sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg=="],
"json-parse-even-better-errors": ["json-parse-even-better-errors@2.3.1", "", {}, "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w=="],
"json-schema-traverse": ["json-schema-traverse@0.4.1", "", {}, "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="],
"loader-runner": ["loader-runner@4.3.0", "", {}, "sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg=="],
"media-typer": ["media-typer@0.3.0", "", {}, "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ=="],
"memoirist": ["memoirist@0.1.4", "", {}, "sha512-D6GbPSqO2nUVOmm7VZjJc5tC60pkOVUPzLwkKl1vCiYP+2b1cG8N9q1O3P0JmNM68u8vsgefPbxRUCSGxSXD+g=="],
"merge-descriptors": ["merge-descriptors@1.0.1", "", {}, "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w=="],
"merge-stream": ["merge-stream@2.0.0", "", {}, "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w=="],
"methods": ["methods@1.1.2", "", {}, "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w=="],
"mime": ["mime@3.0.0", "", { "bin": { "mime": "cli.js" } }, "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A=="],
"mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="],
"mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="],
"ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="],
"negotiator": ["negotiator@0.6.3", "", {}, "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg=="],
"neo-async": ["neo-async@2.6.2", "", {}, "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw=="],
"node-releases": ["node-releases@2.0.18", "", {}, "sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g=="],
"object-inspect": ["object-inspect@1.12.3", "", {}, "sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g=="],
"on-finished": ["on-finished@2.4.1", "", { "dependencies": { "ee-first": "1.1.1" } }, "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg=="],
"openapi-types": ["openapi-types@12.1.3", "", {}, "sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw=="],
"parseurl": ["parseurl@1.3.3", "", {}, "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ=="],
"path-to-regexp": ["path-to-regexp@0.1.7", "", {}, "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ=="],
"picocolors": ["picocolors@1.1.1", "", {}, "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="],
"proxy-addr": ["proxy-addr@2.0.7", "", { "dependencies": { "forwarded": "0.2.0", "ipaddr.js": "1.9.1" } }, "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg=="],
"proxy-from-env": ["proxy-from-env@1.1.0", "", {}, "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="],
"punycode": ["punycode@2.3.1", "", {}, "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg=="],
"qs": ["qs@6.11.0", "", { "dependencies": { "side-channel": "^1.0.4" } }, "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q=="],
"randombytes": ["randombytes@2.1.0", "", { "dependencies": { "safe-buffer": "^5.1.0" } }, "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ=="],
"range-parser": ["range-parser@1.2.1", "", {}, "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg=="],
"raw-body": ["raw-body@2.5.1", "", { "dependencies": { "bytes": "3.1.2", "http-errors": "2.0.0", "iconv-lite": "0.4.24", "unpipe": "1.0.0" } }, "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig=="],
"react": ["react@0.0.0-experimental-380f5d67-20241113", "", {}, "sha512-QquU1j1TmZR+KgGSFvWTlOuwLvGrA8ldUJean+gT0nYIhSJ1ZkdXJQFnFRWqxoc74C7SY1o4NMz0yJxpUBoQ2w=="],
"react-dom": ["react-dom@0.0.0-experimental-380f5d67-20241113", "", { "dependencies": { "scheduler": "0.0.0-experimental-380f5d67-20241113" }, "peerDependencies": { "react": "0.0.0-experimental-380f5d67-20241113" } }, "sha512-1ok9k5rAF7YuTveNefkPOvZHHuh5RLnCc5DU7sT7IL3i2K+LZmlsbSdlylMevjt9OzovxWQdsk04Fd4GKVCBWg=="],
"react-refresh": ["react-refresh@0.0.0-experimental-380f5d67-20241113", "", {}, "sha512-PwTxoYh02oTSdM2DLV8r3ZzHwObVDIsS05fxNcajIZe+/kIFTWThmXYJpGMljzjIs0wwScVkMONU6URTRPQvHA=="],
"react-server-dom-bun": ["react-server-dom-bun@0.0.0-experimental-603e6108-20241029", "", { "dependencies": { "neo-async": "^2.6.1" } }, "sha512-FfteCHlOgJSnDJRatgIkIU74jQQ9M1+fH2e6kfY9Sibu8FAWEUjgApKQPDfiXgjrkY7w0ITQu0b2FezC0eGzCw=="],
"react-server-dom-webpack": ["react-server-dom-webpack@0.0.0-experimental-380f5d67-20241113", "", { "dependencies": { "acorn-loose": "^8.3.0", "neo-async": "^2.6.1", "webpack-sources": "^3.2.0" }, "peerDependencies": { "react": "0.0.0-experimental-380f5d67-20241113", "react-dom": "0.0.0-experimental-380f5d67-20241113", "webpack": "^5.59.0" } }, "sha512-hUluisy+9Srvrju5yS+qBOIAX82E+MRYOmoTNbV0kUsTi964ZZFLBzuruASAyUbbP1OhtFl0DwBxYN+UT0yUFQ=="],
"safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="],
"safer-buffer": ["safer-buffer@2.1.2", "", {}, "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="],
"scheduler": ["scheduler@0.0.0-experimental-380f5d67-20241113", "", {}, "sha512-UtSmlBSHar7hQvCXiozfIryfUFCL58+mqjrZONnLD06xdTlfgLrTcI5gS3Xo/RnNhUziLPV0DsinpI3a+q7Yzg=="],
"schema-utils": ["schema-utils@3.3.0", "", { "dependencies": { "@types/json-schema": "^7.0.8", "ajv": "^6.12.5", "ajv-keywords": "^3.5.2" } }, "sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg=="],
"send": ["send@0.18.0", "", { "dependencies": { "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "etag": "~1.8.1", "fresh": "0.5.2", "http-errors": "2.0.0", "mime": "1.6.0", "ms": "2.1.3", "on-finished": "2.4.1", "range-parser": "~1.2.1", "statuses": "2.0.1" } }, "sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg=="],
"serialize-javascript": ["serialize-javascript@6.0.2", "", { "dependencies": { "randombytes": "^2.1.0" } }, "sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g=="],
"serve-static": ["serve-static@1.15.0", "", { "dependencies": { "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "parseurl": "~1.3.3", "send": "0.18.0" } }, "sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g=="],
"setprototypeof": ["setprototypeof@1.2.0", "", {}, "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw=="],
"side-channel": ["side-channel@1.0.4", "", { "dependencies": { "call-bind": "^1.0.0", "get-intrinsic": "^1.0.2", "object-inspect": "^1.9.0" } }, "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw=="],
"source-map": ["source-map@0.6.1", "", {}, "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="],
"source-map-support": ["source-map-support@0.5.21", "", { "dependencies": { "buffer-from": "^1.0.0", "source-map": "^0.6.0" } }, "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w=="],
"statuses": ["statuses@2.0.1", "", {}, "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ=="],
"supports-color": ["supports-color@8.1.1", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q=="],
"tapable": ["tapable@2.2.1", "", {}, "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ=="],
"terser": ["terser@5.36.0", "", { "dependencies": { "@jridgewell/source-map": "^0.3.3", "acorn": "^8.8.2", "commander": "^2.20.0", "source-map-support": "~0.5.20" }, "bin": { "terser": "bin/terser" } }, "sha512-IYV9eNMuFAV4THUspIRXkLakHnV6XO7FEdtKjf/mDyrnqUg9LnlOn6/RwRvM9SZjR4GUq8Nk8zj67FzVARr74w=="],
"terser-webpack-plugin": ["terser-webpack-plugin@5.3.10", "", { "dependencies": { "@jridgewell/trace-mapping": "^0.3.20", "jest-worker": "^27.4.5", "schema-utils": "^3.1.1", "serialize-javascript": "^6.0.1", "terser": "^5.26.0" }, "peerDependencies": { "webpack": "^5.1.0" } }, "sha512-BKFPWlPDndPs+NGGCr1U59t0XScL5317Y0UReNrHaw9/FwhPENlq6bfgs+4yPfyP51vqC1bQ4rp1EfXW5ZSH9w=="],
"toidentifier": ["toidentifier@1.0.1", "", {}, "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA=="],
"type-is": ["type-is@1.6.18", "", { "dependencies": { "media-typer": "0.3.0", "mime-types": "~2.1.24" } }, "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g=="],
"undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="],
"unpipe": ["unpipe@1.0.0", "", {}, "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ=="],
"update-browserslist-db": ["update-browserslist-db@1.1.1", "", { "dependencies": { "escalade": "^3.2.0", "picocolors": "^1.1.0" }, "peerDependencies": { "browserslist": ">= 4.21.0" }, "bin": { "update-browserslist-db": "cli.js" } }, "sha512-R8UzCaa9Az+38REPiJ1tXlImTJXlVfgHZsglwBD/k6nj76ctsH1E3q4doGrukiLQd3sGQYu56r5+lo5r94l29A=="],
"uri-js": ["uri-js@4.4.1", "", { "dependencies": { "punycode": "^2.1.0" } }, "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg=="],
"utils-merge": ["utils-merge@1.0.1", "", {}, "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA=="],
"vary": ["vary@1.1.2", "", {}, "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg=="],
"watchpack": ["watchpack@2.4.2", "", { "dependencies": { "glob-to-regexp": "^0.4.1", "graceful-fs": "^4.1.2" } }, "sha512-TnbFSbcOCcDgjZ4piURLCbJ3nJhznVh9kw6F6iokjiFPl8ONxe9A6nMDVXDiNbrSfLILs6vB07F7wLBrwPYzJw=="],
"webpack": ["webpack@5.96.1", "", { "dependencies": { "@types/eslint-scope": "^3.7.7", "@types/estree": "^1.0.6", "@webassemblyjs/ast": "^1.12.1", "@webassemblyjs/wasm-edit": "^1.12.1", "@webassemblyjs/wasm-parser": "^1.12.1", "acorn": "^8.14.0", "browserslist": "^4.24.0", "chrome-trace-event": "^1.0.2", "enhanced-resolve": "^5.17.1", "es-module-lexer": "^1.2.1", "eslint-scope": "5.1.1", "events": "^3.2.0", "glob-to-regexp": "^0.4.1", "graceful-fs": "^4.2.11", "json-parse-even-better-errors": "^2.3.1", "loader-runner": "^4.2.0", "mime-types": "^2.1.27", "neo-async": "^2.6.2", "schema-utils": "^3.2.0", "tapable": "^2.1.1", "terser-webpack-plugin": "^5.3.10", "watchpack": "^2.4.1", "webpack-sources": "^3.2.3" }, "bin": { "webpack": "bin/webpack.js" } }, "sha512-l2LlBSvVZGhL4ZrPwyr8+37AunkcYj5qh8o6u2/2rzoPc8gxFJkLj1WxNgooi9pnoc06jh0BjuXnamM4qlujZA=="],
"webpack-sources": ["webpack-sources@3.2.3", "", {}, "sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w=="],
"esrecurse/estraverse": ["estraverse@5.3.0", "", {}, "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA=="],
"send/mime": ["mime@1.6.0", "", { "bin": { "mime": "cli.js" } }, "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg=="],
"send/ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "bun-vscode",
"version": "0.0.22",
"version": "0.0.25",
"author": "oven",
"repository": {
"type": "git",
@@ -289,19 +289,29 @@
"aliases": [
"bun.lockb"
],
"extensions": [
".lockb"
"filenames": [
"bun.lockb"
],
"icon": {
"dark": "assets/icon-small.png",
"light": "assets/icon-small.png"
}
},
{
"id": "jsonc",
"filenames": [
"bun.lock"
]
}
],
"jsonValidation": [
{
"fileMatch": "package.json",
"url": "./assets/package.json"
},
{
"fileMatch": "bun.lock",
"url": "./assets/bun.lock.json"
}
],
"customEditors": [
@@ -310,7 +320,7 @@
"displayName": "bun.lockb",
"selector": [
{
"filenamePattern": "*.lockb"
"filenamePattern": "*bun.lockb"
}
],
"priority": "default"
@@ -332,7 +342,7 @@
]
},
"description": "The Visual Studio Code extension for Bun.",
"displayName": "Bun",
"displayName": "Bun for Visual Studio Code",
"engines": {
"vscode": "^1.60.0"
},

View File

@@ -64,8 +64,11 @@ export function registerDebugger(context: vscode.ExtensionContext, factory?: vsc
vscode.DebugConfigurationProviderTriggerKind.Dynamic,
),
vscode.debug.registerDebugAdapterDescriptorFactory("bun", factory ?? new InlineDebugAdapterFactory()),
vscode.window.onDidOpenTerminal(injectDebugTerminal),
);
if (getConfig("debugTerminal.enabled")) {
injectDebugTerminal2().then(context.subscriptions.push)
}
}
function runFileCommand(resource?: vscode.Uri): void {
@@ -94,8 +97,6 @@ function debugFileCommand(resource?: vscode.Uri) {
}
async function injectDebugTerminal(terminal: vscode.Terminal): Promise<void> {
if (!getConfig("debugTerminal.enabled")) return;
const { name, creationOptions } = terminal;
if (name !== "JavaScript Debug Terminal") {
return;
@@ -134,6 +135,41 @@ async function injectDebugTerminal(terminal: vscode.Terminal): Promise<void> {
setTimeout(() => terminal.dispose(), 100);
}
async function injectDebugTerminal2() {
const jsDebugExt = vscode.extensions.getExtension('ms-vscode.js-debug-nightly') || vscode.extensions.getExtension('ms-vscode.js-debug');
if (!jsDebugExt) {
return vscode.window.onDidOpenTerminal(injectDebugTerminal)
}
await jsDebugExt.activate()
const jsDebug: import('@vscode/js-debug').IExports = jsDebugExt.exports;
if (!jsDebug) {
return vscode.window.onDidOpenTerminal(injectDebugTerminal)
}
return jsDebug.registerDebugTerminalOptionsProvider({
async provideTerminalOptions(options) {
const session = new TerminalDebugSession();
await session.initialize();
const { adapter, signal } = session;
const stopOnEntry = getConfig("debugTerminal.stopOnEntry") === true;
const query = stopOnEntry ? "break=1" : "wait=1";
return {
...options,
env: {
...options.env,
"BUN_INSPECT": `${adapter.url}?${query}`,
"BUN_INSPECT_NOTIFY": signal.url,
BUN_INSPECT_CONNECT_TO: " ",
},
};
},
});
}
class DebugConfigurationProvider implements vscode.DebugConfigurationProvider {
provideDebugConfigurations(folder?: vscode.WorkspaceFolder): vscode.ProviderResult<vscode.DebugConfiguration[]> {
return [DEBUG_CONFIGURATION, RUN_CONFIGURATION, ATTACH_CONFIGURATION];
@@ -295,7 +331,7 @@ class FileDebugSession extends DebugSession {
}
this.adapter.on("Adapter.reverseRequest", ({ command, arguments: args }) =>
this.sendRequest(command, args, 5000, () => {}),
this.sendRequest(command, args, 5000, () => { }),
);
adapters.set(url, this);

View File

@@ -0,0 +1,39 @@
/*---------------------------------------------------------
* Copyright (C) Microsoft Corporation. All rights reserved.
*--------------------------------------------------------*/
declare module '@vscode/js-debug' {
import type * as vscode from 'vscode';
/** @see {IExports.registerDebugTerminalOptionsProvider} */
export interface IDebugTerminalOptionsProvider {
/**
* Called when the user creates a JavaScript Debug Terminal. It's called
* with the options js-debug wants to use to create the terminal. It should
* modify and return the options to use in the terminal.
*
* In order to avoid conflicting with existing logic, participants should
* try to modify options in a additive way. For example prefer appending
* to rather than reading and overwriting `options.env.PATH`.
*/
provideTerminalOptions(options: vscode.TerminalOptions): vscode.ProviderResult<vscode.TerminalOptions>;
}
/**
* Defines the exports of the `js-debug` extension. Once you have this typings
* file, these can be acquired in your extension using the following code:
*
* ```
* const jsDebugExt = vscode.extensions.getExtension('ms-vscode.js-debug-nightly')
* || vscode.extensions.getExtension('ms-vscode.js-debug');
* await jsDebugExt.activate()
* const jsDebug: import('@vscode/js-debug').IExports = jsDebug.exports;
* ```
*/
export interface IExports {
/**
* Registers a participant used when the user creates a JavaScript Debug Terminal.
*/
registerDebugTerminalOptionsProvider(provider: IDebugTerminalOptionsProvider): vscode.Disposable;
}
}

View File

@@ -20,7 +20,6 @@ import {
getEnv,
writeFile,
spawnSafe,
spawn,
mkdir,
} from "./utils.mjs";
import { parseArgs } from "node:util";
@@ -76,10 +75,10 @@ async function doBuildkiteAgent(action) {
command_user=${escape(username)}
pidfile=${escape(pidPath)}
start_stop_daemon_args=" \
--background \
--make-pidfile \
--stdout ${escape(agentLogPath)} \
start_stop_daemon_args=" \\
--background \\
--make-pidfile \\
--stdout ${escape(agentLogPath)} \\
--stderr ${escape(agentLogPath)}"
depend() {
@@ -88,7 +87,6 @@ async function doBuildkiteAgent(action) {
}
`;
writeFile(servicePath, service, { mode: 0o755 });
writeFile(`/etc/conf.d/buildkite-agent`, `rc_ulimit="-n 262144"`);
await spawnSafe(["rc-update", "add", "buildkite-agent", "default"], { stdio: "inherit", privileged: true });
}
@@ -143,7 +141,7 @@ async function doBuildkiteAgent(action) {
shell = `"${cmd}" /S /C`;
} else {
const sh = which("sh", { required: true });
shell = `${sh} -e -c`;
shell = `${sh} -elc`;
}
const flags = ["enable-job-log-tmpfile", "no-feature-reporting"];

View File

@@ -1,5 +1,5 @@
#!/bin/sh
# Version: 7
# Version: 9
# A script that installs the dependencies needed to build and test Bun.
# This should work on macOS and Linux with a POSIX shell.
@@ -11,15 +11,17 @@
# increment the version comment to indicate that a new image should be built.
# Otherwise, the existing image will be retroactively updated.
pid=$$
pid="$$"
print() {
echo "$@"
}
error() {
echo "error: $@" >&2
kill -s TERM "$pid"
print "error: $@" >&2
if ! [ "$$" = "$pid" ]; then
kill -s TERM "$pid"
fi
exit 1
}
@@ -39,24 +41,32 @@ execute_sudo() {
}
execute_as_user() {
sh="$(require sh)"
if [ "$sudo" = "1" ] || [ "$can_sudo" = "1" ]; then
if [ -f "$(which sudo)" ]; then
execute sudo -n -u "$user" /bin/sh -c "$*"
execute sudo -n -u "$user" "$sh" -lc "$*"
elif [ -f "$(which doas)" ]; then
execute doas -u "$user" /bin/sh -c "$*"
execute doas -u "$user" "$sh" -lc "$*"
elif [ -f "$(which su)" ]; then
execute su -s /bin/sh "$user" -c "$*"
execute su -s "$sh" "$user" -lc "$*"
else
execute /bin/sh -c "$*"
execute "$sh" -lc "$*"
fi
else
execute /bin/sh -c "$*"
execute "$sh" -lc "$*"
fi
}
grant_to_user() {
path="$1"
execute_sudo chown -R "$user:$group" "$path"
if ! [ -f "$path" ] && ! [ -d "$path" ]; then
error "Could not find file or directory: \"$path\""
fi
chown="$(require chown)"
execute_sudo "$chown" -R "$user:$group" "$path"
execute_sudo chmod -R 777 "$path"
}
which() {
@@ -68,15 +78,15 @@ require() {
if ! [ -f "$path" ]; then
error "Command \"$1\" is required, but is not installed."
fi
echo "$path"
print "$path"
}
fetch() {
curl=$(which curl)
curl="$(which curl)"
if [ -f "$curl" ]; then
execute "$curl" -fsSL "$1"
else
wget=$(which wget)
wget="$(which wget)"
if [ -f "$wget" ]; then
execute "$wget" -qO- "$1"
else
@@ -85,78 +95,115 @@ fetch() {
fi
}
download_file() {
url="$1"
filename="${2:-$(basename "$url")}"
tmp="$(execute mktemp -d)"
execute chmod 755 "$tmp"
compare_version() {
if [ "$1" = "$2" ]; then
print "0"
elif [ "$1" = "$(echo -e "$1\n$2" | sort -V | head -n1)" ]; then
print "-1"
else
print "1"
fi
}
path="$tmp/$filename"
fetch "$url" >"$path"
execute chmod 644 "$path"
create_directory() {
path="$1"
path_dir="$path"
while ! [ -d "$path_dir" ]; do
path_dir="$(dirname "$path_dir")"
done
path_needs_sudo="0"
if ! [ -r "$path_dir" ] || ! [ -w "$path_dir" ]; then
path_needs_sudo="1"
fi
mkdir="$(require mkdir)"
if [ "$path_needs_sudo" = "1" ]; then
execute_sudo "$mkdir" -p "$path"
else
execute "$mkdir" -p "$path"
fi
grant_to_user "$path"
}
create_tmp_directory() {
mktemp="$(require mktemp)"
path="$(execute "$mktemp" -d)"
grant_to_user "$path"
print "$path"
}
compare_version() {
if [ "$1" = "$2" ]; then
echo "0"
elif [ "$1" = "$(echo -e "$1\n$2" | sort -V | head -n1)" ]; then
echo "-1"
else
echo "1"
create_file() {
path="$1"
path_dir="$(dirname "$path")"
if ! [ -d "$path_dir" ]; then
create_directory "$path_dir"
fi
path_needs_sudo="0"
if ! [ -r "$path" ] || ! [ -w "$path" ]; then
path_needs_sudo="1"
fi
if [ "$path_needs_sudo" = "1" ]; then
execute_sudo touch "$path"
else
execute touch "$path"
fi
content="$2"
if [ -n "$content" ]; then
append_file "$path" "$content"
fi
grant_to_user "$path"
}
append_to_file() {
file="$1"
content="$2"
file_needs_sudo="0"
if [ -f "$file" ]; then
if ! [ -r "$file" ] || ! [ -w "$file" ]; then
file_needs_sudo="1"
fi
else
execute_as_user mkdir -p "$(dirname "$file")"
execute_as_user touch "$file"
append_file() {
path="$1"
if ! [ -f "$path" ]; then
create_file "$path"
fi
echo "$content" | while read -r line; do
if ! grep -q "$line" "$file"; then
if [ "$file_needs_sudo" = "1" ]; then
execute_sudo sh -c "echo '$line' >> '$file'"
path_needs_sudo="0"
if ! [ -r "$path" ] || ! [ -w "$path" ]; then
path_needs_sudo="1"
fi
content="$2"
print "$content" | while read -r line; do
if ! grep -q "$line" "$path"; then
sh="$(require sh)"
if [ "$path_needs_sudo" = "1" ]; then
execute_sudo "$sh" -c "echo '$line' >> '$path'"
else
echo "$line" >>"$file"
execute "$sh" -c "echo '$line' >> '$path'"
fi
fi
done
}
append_to_file_sudo() {
file="$1"
content="$2"
download_file() {
file_url="$1"
file_tmp_dir="$(create_tmp_directory)"
file_tmp_path="$file_tmp_dir/$(basename "$file_url")"
if ! [ -f "$file" ]; then
execute_sudo mkdir -p "$(dirname "$file")"
execute_sudo touch "$file"
fi
echo "$content" | while read -r line; do
if ! grep -q "$line" "$file"; then
echo "$line" | execute_sudo tee "$file" >/dev/null
fi
done
fetch "$file_url" >"$file_tmp_path"
grant_to_user "$file_tmp_path"
print "$file_tmp_path"
}
append_to_profile() {
content="$1"
profiles=".profile .zprofile .bash_profile .bashrc .zshrc"
for profile in $profiles; do
file="$home/$profile"
if [ "$ci" = "1" ] || [ -f "$file" ]; then
append_to_file "$file" "$content"
fi
for profile_path in "$current_home/$profile" "$home/$profile"; do
if [ "$ci" = "1" ] || [ -f "$profile_path" ]; then
append_file "$profile_path" "$content"
fi
done
done
}
@@ -190,19 +237,22 @@ move_to_bin() {
check_features() {
print "Checking features..."
case "$CI" in
true | 1)
ci=1
print "CI: enabled"
;;
esac
case "$@" in
*--ci*)
ci=1
print "CI: enabled"
;;
esac
for arg in "$@"; do
case "$arg" in
*--ci*)
ci=1
print "CI: enabled"
;;
*--osxcross*)
osxcross=1
print "Cross-compiling to macOS: enabled"
;;
*--gcc-13*)
gcc_version="13"
print "GCC 13: enabled"
;;
esac
done
}
check_operating_system() {
@@ -211,17 +261,29 @@ check_operating_system() {
os="$("$uname" -s)"
case "$os" in
Linux*) os="linux" ;;
Darwin*) os="darwin" ;;
*) error "Unsupported operating system: $os" ;;
Linux*)
os="linux"
;;
Darwin*)
os="darwin"
;;
*)
error "Unsupported operating system: $os"
;;
esac
print "Operating System: $os"
arch="$("$uname" -m)"
case "$arch" in
x86_64 | x64 | amd64) arch="x64" ;;
aarch64 | arm64) arch="aarch64" ;;
*) error "Unsupported architecture: $arch" ;;
x86_64 | x64 | amd64)
arch="x64"
;;
aarch64 | arm64)
arch="aarch64"
;;
*)
error "Unsupported architecture: $arch"
;;
esac
print "Architecture: $arch"
@@ -235,7 +297,7 @@ check_operating_system() {
abi="musl"
alpine="$(cat /etc/alpine-release)"
if [ "$alpine" ~ "_" ]; then
release="$(echo "$alpine" | cut -d_ -f1)-edge"
release="$(print "$alpine" | cut -d_ -f1)-edge"
else
release="$alpine"
fi
@@ -255,6 +317,7 @@ check_operating_system() {
distro="$("$sw_vers" -productName)"
release="$("$sw_vers" -productVersion)"
fi
case "$arch" in
x64)
sysctl="$(which sysctl)"
@@ -277,7 +340,7 @@ check_operating_system() {
ldd="$(which ldd)"
if [ -f "$ldd" ]; then
ldd_version="$($ldd --version 2>&1)"
abi_version="$(echo "$ldd_version" | grep -o -E '[0-9]+\.[0-9]+(\.[0-9]+)?' | head -n 1)"
abi_version="$(print "$ldd_version" | grep -o -E '[0-9]+\.[0-9]+(\.[0-9]+)?' | head -n 1)"
case "$ldd_version" in
*musl*)
abi="musl"
@@ -394,6 +457,10 @@ check_user() {
can_sudo=1
print "Sudo: can be used"
fi
current_user="$user"
current_group="$group"
current_home="$home"
}
check_ulimit() {
@@ -405,15 +472,12 @@ check_ulimit() {
systemd_conf="/etc/systemd/system.conf"
if [ -f "$systemd_conf" ]; then
limits_conf="/etc/security/limits.d/99-unlimited.conf"
if ! [ -f "$limits_conf" ]; then
execute_sudo mkdir -p "$(dirname "$limits_conf")"
execute_sudo touch "$limits_conf"
fi
create_file "$limits_conf"
fi
limits="core data fsize memlock nofile rss stack cpu nproc as locks sigpending msgqueue"
for limit in $limits; do
limit_upper="$(echo "$limit" | tr '[:lower:]' '[:upper:]')"
limit_upper="$(print "$limit" | tr '[:lower:]' '[:upper:]')"
limit_value="unlimited"
case "$limit" in
@@ -425,13 +489,13 @@ check_ulimit() {
if [ -f "$limits_conf" ]; then
limit_users="root *"
for limit_user in $limit_users; do
append_to_file "$limits_conf" "$limit_user soft $limit $limit_value"
append_to_file "$limits_conf" "$limit_user hard $limit $limit_value"
append_file "$limits_conf" "$limit_user soft $limit $limit_value"
append_file "$limits_conf" "$limit_user hard $limit $limit_value"
done
fi
if [ -f "$systemd_conf" ]; then
append_to_file "$systemd_conf" "DefaultLimit$limit_upper=$limit_value"
append_file "$systemd_conf" "DefaultLimit$limit_upper=$limit_value"
fi
done
@@ -448,13 +512,13 @@ check_ulimit() {
esac
rc_ulimit="$rc_ulimit -$limit_flag $limit_value"
done
append_to_file "$rc_conf" "rc_ulimit=\"$rc_ulimit\""
append_file "$rc_conf" "rc_ulimit=\"$rc_ulimit\""
fi
pam_confs="/etc/pam.d/common-session /etc/pam.d/common-session-noninteractive"
for pam_conf in $pam_confs; do
if [ -f "$pam_conf" ]; then
append_to_file "$pam_conf" "session optional pam_limits.so"
append_file "$pam_conf" "session optional pam_limits.so"
fi
done
@@ -462,6 +526,24 @@ check_ulimit() {
if [ -f "$systemctl" ]; then
execute_sudo "$systemctl" daemon-reload
fi
# Configure dpkg and apt for faster operation in CI environments
if [ "$ci" = "1" ] && [ "$pm" = "apt" ]; then
dpkg_conf="/etc/dpkg/dpkg.cfg.d/01-ci-options"
execute_sudo create_directory "$(dirname "$dpkg_conf")"
append_file "$dpkg_conf" "force-unsafe-io"
append_file "$dpkg_conf" "no-debsig"
apt_conf="/etc/apt/apt.conf.d/99-ci-options"
execute_sudo create_directory "$(dirname "$apt_conf")"
append_file "$apt_conf" 'Acquire::Languages "none";'
append_file "$apt_conf" 'Acquire::GzipIndexes "true";'
append_file "$apt_conf" 'Acquire::CompressionTypes::Order:: "gz";'
append_file "$apt_conf" 'APT::Get::Install-Recommends "false";'
append_file "$apt_conf" 'APT::Get::Install-Suggests "false";'
append_file "$apt_conf" 'Dpkg::Options { "--force-confdef"; "--force-confold"; }'
fi
}
package_manager() {
@@ -557,7 +639,7 @@ install_brew() {
bash="$(require bash)"
script=$(download_file "https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh")
NONINTERACTIVE=1 execute_as_user "$bash" "$script"
execute_as_user "$bash" -lc "NONINTERACTIVE=1 $script"
case "$arch" in
x64)
@@ -638,7 +720,7 @@ nodejs_version_exact() {
}
nodejs_version() {
echo "$(nodejs_version_exact)" | cut -d. -f1
print "$(nodejs_version_exact)" | cut -d. -f1
}
install_nodejs() {
@@ -674,14 +756,21 @@ install_nodejs() {
}
install_nodejs_headers() {
headers_tar="$(download_file "https://nodejs.org/download/release/v$(nodejs_version_exact)/node-v$(nodejs_version_exact)-headers.tar.gz")"
headers_dir="$(dirname "$headers_tar")"
execute tar -xzf "$headers_tar" -C "$headers_dir"
headers_include="$headers_dir/node-v$(nodejs_version_exact)/include"
execute_sudo cp -R "$headers_include/" "/usr"
nodejs_headers_tar="$(download_file "https://nodejs.org/download/release/v$(nodejs_version_exact)/node-v$(nodejs_version_exact)-headers.tar.gz")"
nodejs_headers_dir="$(dirname "$nodejs_headers_tar")"
execute tar -xzf "$nodejs_headers_tar" -C "$nodejs_headers_dir"
nodejs_headers_include="$nodejs_headers_dir/node-v$(nodejs_version_exact)/include"
execute_sudo cp -R "$nodejs_headers_include/" "/usr"
}
bun_version_exact() {
print "1.1.38"
}
install_bun() {
install_packages unzip
case "$pm" in
apk)
install_packages \
@@ -690,23 +779,24 @@ install_bun() {
;;
esac
bash="$(require bash)"
script=$(download_file "https://bun.sh/install")
version="${1:-"latest"}"
case "$version" in
latest)
execute_as_user "$bash" "$script"
case "$abi" in
musl)
bun_triplet="bun-$os-$arch-$abi"
;;
*)
execute_as_user "$bash" "$script" -s "$version"
bun_triplet="bun-$os-$arch"
;;
esac
move_to_bin "$home/.bun/bin/bun"
bun_path="$(which bun)"
bunx_path="$(dirname "$bun_path")/bunx"
execute_sudo ln -sf "$bun_path" "$bunx_path"
unzip="$(require unzip)"
bun_download_url="https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v$(bun_version_exact)/$bun_triplet.zip"
bun_zip="$(download_file "$bun_download_url")"
bun_tmpdir="$(dirname "$bun_zip")"
execute "$unzip" -o "$bun_zip" -d "$bun_tmpdir"
move_to_bin "$bun_tmpdir/$bun_triplet/bun"
bun_path="$(require bun)"
execute_sudo ln -sf "$bun_path" "$(dirname "$bun_path")/bunx"
}
install_cmake() {
@@ -799,24 +889,19 @@ install_build_essentials() {
install_cmake
install_llvm
install_osxcross
install_gcc
install_ccache
install_rust
install_docker
}
llvm_version_exact() {
case "$os-$abi" in
darwin-* | windows-* | linux-musl)
print "18.1.8"
;;
linux-*)
print "16.0.6"
;;
esac
print "18.1.8"
}
llvm_version() {
echo "$(llvm_version_exact)" | cut -d. -f1
print "$(llvm_version_exact)" | cut -d. -f1
}
install_llvm() {
@@ -824,14 +909,7 @@ install_llvm() {
apt)
bash="$(require bash)"
llvm_script="$(download_file "https://apt.llvm.org/llvm.sh")"
case "$distro-$release" in
ubuntu-24*)
execute_sudo "$bash" "$llvm_script" "$(llvm_version)" all -njammy
;;
*)
execute_sudo "$bash" "$llvm_script" "$(llvm_version)" all
;;
esac
execute_sudo "$bash" "$llvm_script" "$(llvm_version)" all
;;
brew)
install_packages "llvm@$(llvm_version)"
@@ -849,6 +927,77 @@ install_llvm() {
esac
}
install_gcc() {
if ! [ "$os" = "linux" ] || ! [ "$distro" = "ubuntu" ] || [ -z "$gcc_version" ]; then
return
fi
# Taken from WebKit's Dockerfile.
# https://github.com/oven-sh/WebKit/blob/816a3c02e0f8b53f8eec06b5ed911192589b51e2/Dockerfile
execute_sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
execute_sudo apt update -y
execute_sudo apt install -y \
"gcc-$gcc_version" \
"g++-$gcc_version" \
"libgcc-$gcc_version-dev" \
"libstdc++-$gcc_version-dev" \
libasan6 \
libubsan1 \
libatomic1 \
libtsan0 \
liblsan0 \
libgfortran5 \
libc6-dev
execute_sudo update-alternatives \
--install /usr/bin/gcc gcc "/usr/bin/gcc-$gcc_version" 130 \
--slave /usr/bin/g++ g++ "/usr/bin/g++-$gcc_version" \
--slave /usr/bin/gcc-ar gcc-ar "/usr/bin/gcc-ar-$gcc_version" \
--slave /usr/bin/gcc-nm gcc-nm "/usr/bin/gcc-nm-$gcc_version" \
--slave /usr/bin/gcc-ranlib gcc-ranlib "/usr/bin/gcc-ranlib-$gcc_version"
case "$arch" in
x64)
arch_path="x86_64-linux-gnu"
;;
aarch64)
arch_path="aarch64-linux-gnu"
;;
esac
llvm_v="18"
append_to_profile "export CC=clang-${llvm_v}"
append_to_profile "export CXX=clang++-${llvm_v}"
append_to_profile "export AR=llvm-ar-${llvm_v}"
append_to_profile "export RANLIB=llvm-ranlib-${llvm_v}"
append_to_profile "export LD=lld-${llvm_v}"
append_to_profile "export LD_LIBRARY_PATH=/usr/lib/gcc/${arch_path}/${gcc_version}:/usr/lib/${arch_path}"
append_to_profile "export LIBRARY_PATH=/usr/lib/gcc/${arch_path}/${gcc_version}:/usr/lib/${arch_path}"
append_to_profile "export CPLUS_INCLUDE_PATH=/usr/include/c++/${gcc_version}:/usr/include/${arch_path}/c++/${gcc_version}"
append_to_profile "export C_INCLUDE_PATH=/usr/lib/gcc/${arch_path}/${gcc_version}/include"
gcc_path="/usr/lib/gcc/$arch_path/$gcc_version"
create_directory "$gcc_path"
execute_sudo ln -sf /usr/lib/$arch_path/libstdc++.so.6 "$gcc_path/libstdc++.so.6"
ld_conf_path="/etc/ld.so.conf.d/gcc-$gcc_version.conf"
append_file "$ld_conf_path" "$gcc_path"
append_file "$ld_conf_path" "/usr/lib/$arch_path"
execute_sudo ldconfig
execute_sudo ln -sf $(which clang-$llvm_v) /usr/bin/clang
execute_sudo ln -sf $(which clang++-$llvm_v) /usr/bin/clang++
execute_sudo ln -sf $(which lld-$llvm_v) /usr/bin/lld
execute_sudo ln -sf $(which lldb-$llvm_v) /usr/bin/lldb
execute_sudo ln -sf $(which clangd-$llvm_v) /usr/bin/clangd
execute_sudo ln -sf $(which llvm-ar-$llvm_v) /usr/bin/llvm-ar
execute_sudo ln -sf $(which ld.lld-$llvm_v) /usr/bin/ld
execute_sudo ln -sf $(which clang) /usr/bin/cc
execute_sudo ln -sf $(which clang++) /usr/bin/c++
}
install_ccache() {
case "$pm" in
apt | apk | brew)
@@ -865,9 +1014,23 @@ install_rust() {
cargo
;;
*)
rust_home="/opt/rust"
create_directory "$rust_home"
append_to_profile "export RUSTUP_HOME=$rust_home"
append_to_profile "export CARGO_HOME=$rust_home"
sh="$(require sh)"
script=$(download_file "https://sh.rustup.rs")
execute_as_user "$sh" "$script" -y
rustup_script=$(download_file "https://sh.rustup.rs")
execute "$sh" -lc "$rustup_script -y --no-modify-path"
append_to_path "$rust_home/bin"
;;
esac
case "$osxcross" in
1)
rustup="$(require rustup)"
execute_as_user "$rustup" target add aarch64-apple-darwin
execute_as_user "$rustup" target add x86_64-apple-darwin
;;
esac
}
@@ -910,6 +1073,46 @@ install_docker() {
fi
}
macos_sdk_version() {
# https://github.com/alexey-lysiuk/macos-sdk/releases
print "13.3"
}
install_osxcross() {
if ! [ "$os" = "linux" ] || ! [ "$osxcross" = "1" ]; then
return
fi
install_packages \
libssl-dev \
lzma-dev \
libxml2-dev \
zlib1g-dev \
bzip2 \
cpio
osxcross_path="/opt/osxcross"
create_directory "$osxcross_path"
osxcross_commit="29fe6dd35522073c9df5800f8cd1feb4b9a993a8"
osxcross_tar="$(download_file "https://github.com/tpoechtrager/osxcross/archive/$osxcross_commit.tar.gz")"
execute tar -xzf "$osxcross_tar" -C "$osxcross_path"
osxcross_build_path="$osxcross_path/build"
execute mv "$osxcross_path/osxcross-$osxcross_commit" "$osxcross_build_path"
osxcross_sdk_tar="$(download_file "https://github.com/alexey-lysiuk/macos-sdk/releases/download/$(macos_sdk_version)/MacOSX$(macos_sdk_version).tar.xz")"
execute mv "$osxcross_sdk_tar" "$osxcross_build_path/tarballs/MacOSX$(macos_sdk_version).sdk.tar.xz"
bash="$(require bash)"
execute_sudo ln -sf "$(which clang-$(llvm_version))" /usr/bin/clang
execute_sudo ln -sf "$(which clang++-$(llvm_version))" /usr/bin/clang++
execute_sudo "$bash" -lc "UNATTENDED=1 TARGET_DIR='$osxcross_path' $osxcross_build_path/build.sh"
execute_sudo rm -rf "$osxcross_build_path"
grant_to_user "$osxcross_path"
}
install_tailscale() {
if [ "$docker" = "1" ]; then
return
@@ -975,14 +1178,12 @@ create_buildkite_user() {
buildkite_paths="$home /var/cache/buildkite-agent /var/log/buildkite-agent /var/run/buildkite-agent /var/run/buildkite-agent/buildkite-agent.sock"
for path in $buildkite_paths; do
execute_sudo mkdir -p "$path"
execute_sudo chown -R "$user:$group" "$path"
create_directory "$path"
done
buildkite_files="/var/run/buildkite-agent/buildkite-agent.pid"
for file in $buildkite_files; do
execute_sudo touch "$file"
execute_sudo chown "$user:$group" "$file"
create_file "$file"
done
}
@@ -992,27 +1193,22 @@ install_buildkite() {
fi
buildkite_version="3.87.0"
case "$os-$arch" in
linux-aarch64)
buildkite_filename="buildkite-agent-linux-arm64-$buildkite_version.tar.gz"
case "$arch" in
aarch64)
buildkite_arch="arm64"
;;
linux-x64)
buildkite_filename="buildkite-agent-linux-amd64-$buildkite_version.tar.gz"
;;
darwin-aarch64)
buildkite_filename="buildkite-agent-darwin-arm64-$buildkite_version.tar.gz"
;;
darwin-x64)
buildkite_filename="buildkite-agent-darwin-amd64-$buildkite_version.tar.gz"
x64)
buildkite_arch="amd64"
;;
esac
buildkite_url="https://github.com/buildkite/agent/releases/download/v$buildkite_version/$buildkite_filename"
buildkite_filepath="$(download_file "$buildkite_url" "$buildkite_filename")"
buildkite_tmpdir="$(dirname "$buildkite_filepath")"
execute tar -xzf "$buildkite_filepath" -C "$buildkite_tmpdir"
buildkite_filename="buildkite-agent-$os-$buildkite_arch-$buildkite_version.tar.gz"
buildkite_url="https://github.com/buildkite/agent/releases/download/v$buildkite_version/$buildkite_filename"
buildkite_tar="$(download_file "$buildkite_url")"
buildkite_tmpdir="$(dirname "$buildkite_tar")"
execute tar -xzf "$buildkite_tar" -C "$buildkite_tmpdir"
move_to_bin "$buildkite_tmpdir/buildkite-agent"
execute rm -rf "$buildkite_tmpdir"
}
install_chromium() {
@@ -1103,6 +1299,19 @@ install_chromium() {
esac
}
clean_system() {
if ! [ "$ci" = "1" ]; then
return
fi
print "Cleaning system..."
tmp_paths="/tmp /var/tmp"
for path in $tmp_paths; do
execute_sudo rm -rf "$path"/*
done
}
main() {
check_features "$@"
check_operating_system
@@ -1114,6 +1323,7 @@ main() {
install_common_software
install_build_essentials
install_chromium
clean_system
}
main "$@"

41
scripts/check-node-all.sh Executable file
View File

@@ -0,0 +1,41 @@
#!/bin/sh
# How to use this script:
# 1. Pick a module from node's standard library (e.g. 'assert', 'fs')
# 2. Copy over relevant tests from node's parallel test suite into test/js/node/test/parallel
# 3. Run this script, e.g. `./scripts/check-node.sh fs`
# 4. Tests that passed get staged for commit
i=0
j=0
if [ -z "$1" ]
then
echo "Usage: $0 <module-name>"
exit 1
fi
case $1 in
-h|--help)
echo "Usage: $0 <module-name>"
echo "Run all parallel tests for a single module in node's standard library"
exit 0
;;
esac
export BUN_DEBUG_QUIET_LOGS=1
for x in $(find test/js/node/test/parallel -type f -name "test-$1*.js")
do
i=$((i+1))
echo ./$x
if timeout 2 $PWD/build/debug/bun-debug ./$x
then
j=$((j+1))
git add ./$x
fi
echo
done
echo $i tests tested
echo $j tests passed

View File

@@ -1,8 +1,30 @@
#!/bin/bash
#!/bin/sh
# How to use this script:
# 1. Pick a module from node's standard library (e.g. 'assert', 'fs')
# 2. Copy over relevant tests from node's parallel test suite into test/js/node/test/parallel
# 3. Run this script, e.g. `./scripts/check-node.sh fs`
# 4. Tests that passed get staged for commit
i=0
j=0
if [ -z "$1" ]
then
echo "Usage: $0 <module-name>"
exit 1
fi
case $1 in
-h|--help)
echo "Usage: $0 <module-name>"
echo "Run all unstaged parallel tests for a single module in node's standard library"
exit 0
;;
esac
export BUN_DEBUG_QUIET_LOGS=1
for x in $(git ls-files test/js/node/test/parallel --exclude-standard --others | grep test-$1)
do
i=$((i+1))
@@ -13,7 +35,6 @@ do
git add ./$x
fi
echo
echo
done
echo $i tests tested

300
scripts/docker.mjs Normal file
View File

@@ -0,0 +1,300 @@
import { inspect } from "node:util";
import { $, isCI, spawn, spawnSafe, which } from "./utils.mjs";
export const docker = {
get name() {
return "docker";
},
/**
* @typedef {"linux" | "darwin" | "windows"} DockerOs
* @typedef {"amd64" | "arm64"} DockerArch
* @typedef {`${DockerOs}/${DockerArch}`} DockerPlatform
*/
/**
* @param {Platform} platform
* @returns {DockerPlatform}
*/
getPlatform(platform) {
const { os, arch } = platform;
if (arch === "aarch64") {
return `${os}/arm64`;
} else if (arch === "x64") {
return `${os}/amd64`;
}
throw new Error(`Unsupported platform: ${inspect(platform)}`);
},
/**
* @typedef DockerSpawnOptions
* @property {DockerPlatform} [platform]
* @property {boolean} [json]
*/
/**
* @param {string[]} args
* @param {DockerSpawnOptions & import("./utils.mjs").SpawnOptions} [options]
* @returns {Promise<unknown>}
*/
async spawn(args, options = {}) {
const docker = which("docker", { required: true });
let env = { ...process.env };
if (isCI) {
env["BUILDKIT_PROGRESS"] = "plain";
}
const { json, platform } = options;
if (json) {
args.push("--format=json");
}
if (platform) {
args.push(`--platform=${platform}`);
}
const { error, stdout } = await spawnSafe($`${docker} ${args}`, { env, ...options });
if (error) {
return;
}
if (!json) {
return stdout;
}
try {
return JSON.parse(stdout);
} catch {
return;
}
},
/**
* @typedef {Object} DockerImage
* @property {string} Id
* @property {string[]} RepoTags
* @property {string[]} RepoDigests
* @property {string} Created
* @property {DockerOs} Os
* @property {DockerArch} Architecture
* @property {number} Size
*/
/**
* @param {string} url
* @param {DockerPlatform} [platform]
* @returns {Promise<boolean>}
*/
async pullImage(url, platform) {
const done = await this.spawn($`pull ${url}`, {
platform,
throwOnError: error => !/No such image|manifest unknown/i.test(inspect(error)),
});
return !!done;
},
/**
* @param {string} url
* @param {DockerPlatform} [platform]
* @returns {Promise<DockerImage | undefined>}
*/
async inspectImage(url, platform) {
/** @type {DockerImage[]} */
const images = await this.spawn($`image inspect ${url}`, {
json: true,
throwOnError: error => !/No such image/i.test(inspect(error)),
});
if (!images) {
const pulled = await this.pullImage(url, platform);
if (pulled) {
return this.inspectImage(url, platform);
}
}
const { os, arch } = platform || {};
return images
?.filter(({ Os, Architecture }) => !os || !arch || (Os === os && Architecture === arch))
?.find((a, b) => (a.Created < b.Created ? 1 : -1));
},
/**
* @typedef {Object} DockerContainer
* @property {string} Id
* @property {string} Name
* @property {string} Image
* @property {string} Created
* @property {DockerContainerState} State
* @property {DockerContainerNetworkSettings} NetworkSettings
*/
/**
* @typedef {Object} DockerContainerState
* @property {"exited" | "running"} Status
* @property {number} [Pid]
* @property {number} ExitCode
* @property {string} [Error]
* @property {string} StartedAt
* @property {string} FinishedAt
*/
/**
* @typedef {Object} DockerContainerNetworkSettings
* @property {string} [IPAddress]
*/
/**
* @param {string} containerId
* @returns {Promise<DockerContainer | undefined>}
*/
async inspectContainer(containerId) {
const containers = await this.spawn($`container inspect ${containerId}`, { json: true });
return containers?.find(a => a.Id === containerId);
},
/**
* @returns {Promise<DockerContainer[]>}
*/
async listContainers() {
const containers = await this.spawn($`container ls --all`, { json: true });
return containers || [];
},
/**
* @typedef {Object} DockerRunOptions
* @property {string[]} [command]
* @property {DockerPlatform} [platform]
* @property {string} [name]
* @property {boolean} [detach]
* @property {"always" | "never"} [pull]
* @property {boolean} [rm]
* @property {"no" | "on-failure" | "always"} [restart]
*/
/**
* @param {string} url
* @param {DockerRunOptions} [options]
* @returns {Promise<DockerContainer>}
*/
async runContainer(url, options = {}) {
const { detach, command = [], ...containerOptions } = options;
const args = Object.entries(containerOptions)
.filter(([_, value]) => typeof value !== "undefined")
.map(([key, value]) => (typeof value === "boolean" ? `--${key}` : `--${key}=${value}`));
if (detach) {
args.push("--detach");
} else {
args.push("--tty", "--interactive");
}
const stdio = detach ? "pipe" : "inherit";
const result = await this.spawn($`run ${args} ${url} ${command}`, { stdio });
if (!detach) {
return;
}
const containerId = result.trim();
const container = await this.inspectContainer(containerId);
if (!container) {
throw new Error(`Failed to run container: ${inspect(result)}`);
}
return container;
},
/**
* @param {Platform} platform
* @returns {Promise<DockerImage>}
*/
async getBaseImage(platform) {
const { os, distro, release } = platform;
const dockerPlatform = this.getPlatform(platform);
let url;
if (os === "linux") {
if (distro === "debian" || distro === "ubuntu" || distro === "alpine") {
url = `docker.io/library/${distro}:${release}`;
} else if (distro === "amazonlinux") {
url = `public.ecr.aws/amazonlinux/amazonlinux:${release}`;
}
}
if (url) {
const image = await this.inspectImage(url, dockerPlatform);
if (image) {
return image;
}
}
throw new Error(`Unsupported platform: ${inspect(platform)}`);
},
/**
* @param {DockerContainer} container
* @param {MachineOptions} [options]
* @returns {Machine}
*/
toMachine(container, options = {}) {
const { Id: containerId } = container;
const exec = (command, options) => {
return spawn(["docker", "exec", containerId, ...command], options);
};
const execSafe = (command, options) => {
return spawnSafe(["docker", "exec", containerId, ...command], options);
};
const upload = async (source, destination) => {
await spawn(["docker", "cp", source, `${containerId}:${destination}`]);
};
const attach = async () => {
const { exitCode, error } = await spawn(["docker", "exec", "-it", containerId, "sh"], {
stdio: "inherit",
});
if (exitCode === 0 || exitCode === 130) {
return;
}
throw error;
};
const snapshot = async name => {
await spawn(["docker", "commit", containerId]);
};
const kill = async () => {
await spawn(["docker", "kill", containerId]);
};
return {
cloud: "docker",
id: containerId,
spawn: exec,
spawnSafe: execSafe,
upload,
attach,
snapshot,
close: kill,
[Symbol.asyncDispose]: kill,
};
},
/**
* @param {MachineOptions} options
* @returns {Promise<Machine>}
*/
async createMachine(options) {
const { Id: imageId, Os, Architecture } = await docker.getBaseImage(options);
const container = await docker.runContainer(imageId, {
platform: `${Os}/${Architecture}`,
command: ["sleep", "1d"],
detach: true,
rm: true,
restart: "no",
});
return this.toMachine(container, options);
},
};

510
scripts/google.mjs Normal file
View File

@@ -0,0 +1,510 @@
import { $, spawnSafe, which, getUsernameForDistro } from "./utils.mjs";
export const google = {
get cloud() {
return "google";
},
/**
* @param {string[]} args
* @param {import("./utils.mjs").SpawnOptions} [options]
* @returns {Promise<unknown>}
*/
async spawn(args, options = {}) {
const gcloud = which("gcloud", { required: true });
let env = { ...process.env };
// if (isCI) {
// env; // TODO: Add Google Cloud credentials
// } else {
// env["TERM"] = "dumb";
// }
const { stdout } = await spawnSafe($`${gcloud} ${args} --format json`, {
env,
...options,
});
try {
return JSON.parse(stdout);
} catch {
return;
}
},
/**
* @param {Record<string, string | undefined>} [options]
* @returns {string[]}
*/
getFilters(options = {}) {
const filter = Object.entries(options)
.filter(([, value]) => value !== undefined)
.map(([key, value]) => [value.includes("*") ? `${key}~${value}` : `${key}=${value}`])
.join(" AND ");
return filter ? ["--filter", filter] : [];
},
/**
* @param {Record<string, string | boolean | undefined>} options
* @returns {string[]}
*/
getFlags(options) {
return Object.entries(options)
.filter(([, value]) => value !== undefined)
.flatMap(([key, value]) => {
if (typeof value === "boolean") {
return value ? [`--${key}`] : [];
}
return [`--${key}=${value}`];
});
},
/**
* @param {Record<string, string | boolean | undefined>} options
* @returns {string}
* @link https://cloud.google.com/sdk/gcloud/reference/topic/escaping
*/
getMetadata(options) {
const delimiter = Math.random().toString(36).substring(2, 15);
const entries = Object.entries(options)
.map(([key, value]) => `${key}=${value}`)
.join(delimiter);
return `^${delimiter}^${entries}`;
},
/**
* @param {string} name
* @returns {string}
*/
getLabel(name) {
return name.replace(/[^a-z0-9_-]/g, "-").toLowerCase();
},
/**
* @typedef {Object} GoogleImage
* @property {string} id
* @property {string} name
* @property {string} family
* @property {"X86_64" | "ARM64"} architecture
* @property {string} diskSizeGb
* @property {string} selfLink
* @property {"READY"} status
* @property {string} creationTimestamp
*/
/**
* @param {Partial<GoogleImage>} [options]
* @returns {Promise<GoogleImage[]>}
* @link https://cloud.google.com/sdk/gcloud/reference/compute/images/list
*/
async listImages(options) {
const filters = google.getFilters(options);
const images = await google.spawn($`compute images list ${filters} --preview-images --show-deprecated`);
return images.sort((a, b) => (a.creationTimestamp < b.creationTimestamp ? 1 : -1));
},
/**
* @param {Record<string, string | boolean | undefined>} options
* @returns {Promise<GoogleImage>}
* @link https://cloud.google.com/sdk/gcloud/reference/compute/images/create
*/
async createImage(options) {
const { name, ...otherOptions } = options;
const flags = this.getFlags(otherOptions);
const imageId = name || "i-" + Math.random().toString(36).substring(2, 15);
return this.spawn($`compute images create ${imageId} ${flags}`);
},
/**
* @typedef {Object} GoogleInstance
* @property {string} id
* @property {string} name
* @property {"RUNNING"} status
* @property {string} machineType
* @property {string} zone
* @property {GoogleDisk[]} disks
* @property {GoogleNetworkInterface[]} networkInterfaces
* @property {object} [scheduling]
* @property {"STANDARD" | "SPOT"} [scheduling.provisioningModel]
* @property {boolean} [scheduling.preemptible]
* @property {Record<string, string | undefined>} [labels]
* @property {string} selfLink
* @property {string} creationTimestamp
*/
/**
* @typedef {Object} GoogleDisk
* @property {string} deviceName
* @property {boolean} boot
* @property {"X86_64" | "ARM64"} architecture
* @property {string[]} [licenses]
* @property {number} diskSizeGb
*/
/**
* @typedef {Object} GoogleNetworkInterface
* @property {"IPV4_ONLY" | "IPV4_IPV6" | "IPV6_ONLY"} stackType
* @property {string} name
* @property {string} network
* @property {string} networkIP
* @property {string} subnetwork
* @property {GoogleAccessConfig[]} accessConfigs
*/
/**
* @typedef {Object} GoogleAccessConfig
* @property {string} name
* @property {"ONE_TO_ONE_NAT" | "INTERNAL_NAT"} type
* @property {string} [natIP]
*/
/**
* @param {Record<string, string | boolean | undefined>} options
* @returns {Promise<GoogleInstance>}
* @link https://cloud.google.com/sdk/gcloud/reference/compute/instances/create
*/
async createInstance(options) {
const { name, ...otherOptions } = options || {};
const flags = this.getFlags(otherOptions);
const instanceId = name || "i-" + Math.random().toString(36).substring(2, 15);
const [instance] = await this.spawn($`compute instances create ${instanceId} ${flags}`);
return instance;
},
/**
* @param {string} instanceId
* @param {string} zoneId
* @returns {Promise<void>}
* @link https://cloud.google.com/sdk/gcloud/reference/compute/instances/stop
*/
async stopInstance(instanceId, zoneId) {
await this.spawn($`compute instances stop ${instanceId} --zone=${zoneId}`);
},
/**
* @param {string} instanceId
* @param {string} zoneId
* @returns {Promise<void>}
* @link https://cloud.google.com/sdk/gcloud/reference/compute/instances/delete
*/
async deleteInstance(instanceId, zoneId) {
await this.spawn($`compute instances delete ${instanceId} --delete-disks=all --zone=${zoneId}`, {
throwOnError: error => !/not found/i.test(inspect(error)),
});
},
/**
* @param {string} instanceId
* @param {string} username
* @param {string} zoneId
* @param {object} [options]
* @param {boolean} [options.wait]
* @returns {Promise<string | undefined>}
* @link https://cloud.google.com/sdk/gcloud/reference/compute/reset-windows-password
*/
async resetWindowsPassword(instanceId, username, zoneId, options = {}) {
const attempts = options.wait ? 15 : 1;
for (let i = 0; i < attempts; i++) {
const result = await this.spawn(
$`compute reset-windows-password ${instanceId} --user=${username} --zone=${zoneId}`,
{
throwOnError: error => !/instance may not be ready for use/i.test(inspect(error)),
},
);
if (result) {
const { password } = result;
if (password) {
return password;
}
}
await new Promise(resolve => setTimeout(resolve, 60000 * i));
}
},
/**
* @param {Partial<GoogleInstance>} options
* @returns {Promise<GoogleInstance[]>}
*/
async listInstances(options) {
const filters = this.getFilters(options);
const instances = await this.spawn($`compute instances list ${filters}`);
return instances.sort((a, b) => (a.creationTimestamp < b.creationTimestamp ? 1 : -1));
},
/**
* @param {MachineOptions} options
* @returns {Promise<GoogleImage>}
*/
async getMachineImage(options) {
const { os, arch, distro, release } = options;
const architecture = arch === "aarch64" ? "ARM64" : "X86_64";
/** @type {string | undefined} */
let family;
if (os === "linux") {
if (!distro || distro === "debian") {
family = `debian-${release || "*"}`;
} else if (distro === "ubuntu") {
family = `ubuntu-${release?.replace(/\./g, "") || "*"}`;
} else if (distro === "fedora") {
family = `fedora-coreos-${release || "*"}`;
} else if (distro === "rhel") {
family = `rhel-${release || "*"}`;
}
} else if (os === "windows" && arch === "x64") {
if (!distro || distro === "server") {
family = `windows-${release || "*"}`;
}
}
if (family) {
const images = await this.listImages({ family, architecture });
if (images.length) {
const [image] = images;
return image;
}
}
throw new Error(`Unsupported platform: ${inspect(options)}`);
},
/**
* @param {MachineOptions} options
* @returns {Promise<Machine>}
*/
async createMachine(options) {
const { name, os, arch, distro, instanceType, tags, preemptible, detached } = options;
const image = await google.getMachineImage(options);
const { selfLink: imageUrl } = image;
const username = getUsername(distro || os);
const userData = getUserData({ ...options, username });
/** @type {Record<string, string>} */
let metadata;
if (os === "windows") {
metadata = {
"enable-windows-ssh": "TRUE",
"sysprep-specialize-script-ps1": userData,
};
} else {
metadata = {
"user-data": userData,
};
}
const instance = await google.createInstance({
"name": name,
"zone": "us-central1-a",
"image": imageUrl,
"machine-type": instanceType || (arch === "aarch64" ? "t2a-standard-2" : "t2d-standard-2"),
"boot-disk-auto-delete": true,
"boot-disk-size": `${getDiskSize(options)}GB`,
"metadata": this.getMetadata(metadata),
"labels": Object.entries(tags || {})
.filter(([, value]) => value !== undefined)
.map(([key, value]) => `${this.getLabel(key)}=${value}`)
.join(","),
"provisioning-model": preemptible ? "SPOT" : "STANDARD",
"instance-termination-action": preemptible || !detached ? "DELETE" : undefined,
"no-restart-on-failure": true,
"threads-per-core": 1,
"max-run-duration": detached ? undefined : "6h",
});
return this.toMachine(instance, options);
},
/**
* @param {GoogleInstance} instance
* @param {MachineOptions} [options]
* @returns {Machine}
*/
toMachine(instance, options = {}) {
const { id: instanceId, name, zone: zoneUrl, machineType: machineTypeUrl, labels } = instance;
const machineType = machineTypeUrl.split("/").pop();
const zoneId = zoneUrl.split("/").pop();
let os, arch, distro, release;
const { disks = [] } = instance;
for (const { boot, architecture, licenses = [] } of disks) {
if (!boot) {
continue;
}
if (architecture === "X86_64") {
arch = "x64";
} else if (architecture === "ARM64") {
arch = "aarch64";
}
for (const license of licenses) {
const linuxMatch = /(debian|ubuntu|fedora|rhel)-(\d+)/i.exec(license);
if (linuxMatch) {
os = "linux";
[, distro, release] = linuxMatch;
} else {
const windowsMatch = /windows-server-(\d+)-dc-core/i.exec(license);
if (windowsMatch) {
os = "windows";
distro = "windowsserver";
[, release] = windowsMatch;
}
}
}
}
let publicIp;
const { networkInterfaces = [] } = instance;
for (const { accessConfigs = [] } of networkInterfaces) {
for (const { type, natIP } of accessConfigs) {
if (type === "ONE_TO_ONE_NAT" && natIP) {
publicIp = natIP;
}
}
}
let preemptible;
const { scheduling } = instance;
if (scheduling) {
const { provisioningModel, preemptible: isPreemptible } = scheduling;
preemptible = provisioningModel === "SPOT" || isPreemptible;
}
/**
* @returns {SshOptions}
*/
const connect = () => {
if (!publicIp) {
throw new Error(`Failed to find public IP for instance: ${name}`);
}
/** @type {string | undefined} */
let username;
const { os, distro } = options;
if (os || distro) {
username = getUsernameForDistro(distro || os);
}
return { hostname: publicIp, username };
};
const spawn = async (command, options) => {
const connectOptions = connect();
return spawnSsh({ ...connectOptions, command }, options);
};
const spawnSafe = async (command, options) => {
const connectOptions = connect();
return spawnSshSafe({ ...connectOptions, command }, options);
};
const rdp = async () => {
const { hostname, username } = connect();
const rdpUsername = `${username}-rdp`;
const password = await google.resetWindowsPassword(instanceId, rdpUsername, zoneId, { wait: true });
return { hostname, username: rdpUsername, password };
};
const attach = async () => {
const connectOptions = connect();
await spawnSshSafe({ ...connectOptions });
};
const upload = async (source, destination) => {
const connectOptions = connect();
await spawnScp({ ...connectOptions, source, destination });
};
const snapshot = async name => {
const stopResult = await this.stopInstance(instanceId, zoneId);
console.log(stopResult);
const image = await this.createImage({
["source-disk"]: instanceId,
["zone"]: zoneId,
["name"]: name || `${instanceId}-snapshot-${Date.now()}`,
});
console.log(image);
return;
};
const terminate = async () => {
await google.deleteInstance(instanceId, zoneId);
};
return {
cloud: "google",
os,
arch,
distro,
release,
id: instanceId,
imageId: undefined,
name,
instanceType: machineType,
region: zoneId,
publicIp,
preemptible,
labels,
spawn,
spawnSafe,
rdp,
attach,
upload,
snapshot,
close: terminate,
[Symbol.asyncDispose]: terminate,
};
},
/**
* @param {Record<string, string>} [labels]
* @returns {Promise<Machine[]>}
*/
async getMachines(labels) {
const filters = labels ? this.getFilters({ labels }) : {};
const instances = await google.listInstances(filters);
return instances.map(instance => this.toMachine(instance));
},
/**
* @param {MachineOptions} options
* @returns {Promise<MachineImage>}
*/
async getImage(options) {
const { os, arch, distro, release } = options;
const architecture = arch === "aarch64" ? "ARM64" : "X86_64";
let name;
let username;
if (os === "linux") {
if (distro === "debian") {
name = `debian-${release}-*`;
username = "admin";
} else if (distro === "ubuntu") {
name = `ubuntu-${release.replace(/\./g, "")}-*`;
username = "ubuntu";
}
} else if (os === "windows" && arch === "x64") {
if (distro === "server") {
name = `windows-server-${release}-dc-core-*`;
username = "administrator";
}
}
if (name && username) {
const images = await google.listImages({ name, architecture });
if (images.length) {
const [image] = images;
const { name, selfLink } = image;
return {
id: selfLink,
name,
username,
};
}
}
throw new Error(`Unsupported platform: ${inspect(platform)}`);
},
};

File diff suppressed because it is too large Load Diff

195
scripts/orbstack.mjs Normal file
View File

@@ -0,0 +1,195 @@
import { inspect } from "node:util";
import { $, mkdtemp, rm, spawnSafe, writeFile, getUsernameForDistro, spawnSshSafe, setupUserData } from "./utils.mjs";
import { getUserData } from "./machine.mjs";
/**
* @link https://docs.orbstack.dev/
*/
export const orbstack = {
get name() {
return "orbstack";
},
/**
* @typedef {Object} OrbstackImage
* @property {string} distro
* @property {string} version
* @property {string} arch
*/
/**
* @param {Platform} platform
* @returns {OrbstackImage}
*/
getImage(platform) {
const { os, arch, distro, release } = platform;
if (os !== "linux" || !/^debian|ubuntu|alpine|fedora|centos$/.test(distro)) {
throw new Error(`Unsupported platform: ${inspect(platform)}`);
}
return {
distro,
version: release,
arch: arch === "aarch64" ? "arm64" : "amd64",
};
},
/**
* @typedef {Object} OrbstackVm
* @property {string} id
* @property {string} name
* @property {"running"} state
* @property {OrbstackImage} image
* @property {OrbstackConfig} config
*/
/**
* @typedef {Object} OrbstackConfig
* @property {string} default_username
* @property {boolean} isolated
*/
/**
* @typedef {Object} OrbstackVmOptions
* @property {string} [name]
* @property {OrbstackImage} image
* @property {string} [username]
* @property {string} [password]
* @property {string} [userData]
*/
/**
* @param {OrbstackVmOptions} options
* @returns {Promise<OrbstackVm>}
*/
async createVm(options) {
const { name, image, username, password, userData } = options;
const { distro, version, arch } = image;
const uniqueId = name || `linux-${distro}-${version}-${arch}-${Math.random().toString(36).slice(2, 11)}`;
const args = [`--arch=${arch}`, `${distro}:${version}`, uniqueId];
if (username) {
args.push(`--user=${username}`);
}
if (password) {
args.push(`--set-password=${password}`);
}
let userDataPath;
if (userData) {
userDataPath = mkdtemp("orbstack-user-data-", "user-data.txt");
console.log("User data path:", userData);
writeFile(userDataPath, userData);
args.push(`--user-data=${userDataPath}`);
}
try {
await spawnSafe($`orbctl create ${args}`);
} finally {
if (userDataPath) {
rm(userDataPath);
}
}
return this.inspectVm(uniqueId);
},
/**
* @param {string} name
*/
async deleteVm(name) {
await spawnSafe($`orbctl delete ${name}`, {
throwOnError: error => !/machine not found/i.test(inspect(error)),
});
},
/**
* @param {string} name
* @returns {Promise<OrbstackVm | undefined>}
*/
async inspectVm(name) {
const { exitCode, stdout } = await spawnSafe($`orbctl info ${name} --format=json`, {
throwOnError: error => !/machine not found/i.test(inspect(error)),
});
if (exitCode === 0) {
return JSON.parse(stdout);
}
},
/**
* @returns {Promise<OrbstackVm[]>}
*/
async listVms() {
const { stdout } = await spawnSafe($`orbctl list --format=json`);
return JSON.parse(stdout);
},
/**
* @param {MachineOptions} options
* @returns {Promise<Machine>}
*/
async createMachine(options) {
const { distro } = options;
const username = getUsernameForDistro(distro);
const userData = getUserData({ ...options, username });
const image = this.getImage(options);
const vm = await this.createVm({
image,
username,
userData,
});
const machine = this.toMachine(vm, options);
await setupUserData(machine, options);
return machine;
},
/**
* @param {OrbstackVm} vm
* @returns {Machine}
*/
toMachine(vm) {
const { id, name, config } = vm;
const { default_username: username } = config;
const connectOptions = {
username,
hostname: `${name}@orb`,
};
const exec = async (command, options) => {
return spawnSsh({ ...connectOptions, command }, options);
};
const execSafe = async (command, options) => {
return spawnSshSafe({ ...connectOptions, command }, options);
};
const attach = async () => {
await spawnSshSafe({ ...connectOptions });
};
const upload = async (source, destination) => {
await spawnSafe(["orbctl", "push", `--machine=${name}`, source, destination]);
};
const close = async () => {
await this.deleteVm(name);
};
return {
cloud: "orbstack",
id,
name,
spawn: exec,
spawnSafe: execSafe,
upload,
attach,
close,
[Symbol.asyncDispose]: close,
};
},
};

Some files were not shown because too many files have changed in this diff Show More