Compare commits

..

57 Commits

Author SHA1 Message Date
snwy
a93ce9f4a6 init 2024-12-13 16:10:27 -08:00
snwy
1de386bca7 100% punycode 2024-12-10 14:54:03 -08:00
dave caruso
b39632c921 feat: new binding generator (#15638) 2024-12-10 12:43:17 -08:00
Jarred Sumner
38325aa41c Introduce env option in Bun.build() and bun build to let you inject FOO_PUBLIC_*-style env vars (#15678) 2024-12-10 01:09:46 -08:00
Jarred Sumner
969bab3848 [build images] 2024-12-10 00:54:04 -08:00
Jarred Sumner
5bd4972d5b Add passing node tests (#15675) 2024-12-10 00:02:09 -08:00
Meghan Denny
68780faee2 fix windows build 2024-12-09 22:30:44 -08:00
Jarred Sumner
0bbc18fd19 Fix rare crash in bun install (#15651) 2024-12-09 20:59:29 -08:00
Meghan Denny
53318c8b13 ci: run re-enable node tests on all platforms (#15572) 2024-12-09 19:08:30 -08:00
Jarred Sumner
abe69901b2 make the helper quieter 2024-12-09 17:42:40 -08:00
Jarred Sumner
c0cf0414a0 Add helper for running node tests 2024-12-09 17:37:53 -08:00
Natt Nguyen
3dc3527171 fix: testing library docs (#15667) 2024-12-09 16:29:34 -08:00
Don Isaac
af4f1c7d39 test: fix case to allow bun-debug (#15660)
Co-authored-by: Don Isaac <don@bun.sh>
2024-12-08 23:48:43 -08:00
github-actions[bot]
2c1dea818c deps: update sqlite to 3.470.200 (#15652)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2024-12-08 02:16:41 -08:00
Kai Tamkun
cc125b475f Fix missing "readable" events (#15629) 2024-12-06 23:59:47 -08:00
Don Isaac
cbbf88f3a6 refactor: remove unused main_api.zig file (#15635)
Co-authored-by: Don Isaac <don@bun.sh>
2024-12-06 21:01:25 -08:00
Don Isaac
8064a55a48 test(bake): fix double free (#15634)
Co-authored-by: Don Isaac <don@bun.sh>
2024-12-06 20:06:26 -08:00
Jarred Sumner
0531d6756c Ci is doing too much 2024-12-06 19:28:28 -08:00
Ciro Spaciari
6135b3dec9 fix(CI) deflaky node-http.test.ts (#15625) 2024-12-06 19:16:59 -08:00
Don Isaac
b08dd8795e test(web): fix setTimeout refresh test (#15630)
Co-authored-by: Don Isaac <don@bun.sh>
2024-12-06 19:14:07 -08:00
Ciro Spaciari
c1eba5886f fix(net) signal should destroy the connection and propagate the error properly (#15624) 2024-12-06 16:10:33 -08:00
Ciro Spaciari
fcca2cc398 fix(fetch) fix redirect + Connection: close (#15623) 2024-12-06 15:06:11 -08:00
Yuto Ogino
dd32e6b416 Fix zsh auto-completion for package.json scripts with name containing colons (#15619) 2024-12-06 10:53:43 -08:00
Jarred Sumner
b453360dff Fixes #15480 (#15611) 2024-12-05 21:15:21 -08:00
pfg
1476e4c958 implement toThrowErrorMatchingSnapshot, toThrowErrorMatchingInlineSnapshot (#15607) 2024-12-05 19:07:18 -08:00
Ashcon Partovi
eacf89e5bf ci: Fix CPU count on build runners 2024-12-05 14:20:05 -08:00
Ashcon Partovi
fa6ac405a4 ci: Add bootstrap.ps1 and automate Windows build images (#15606) 2024-12-05 15:16:37 -07:00
pfg
4c8cbecb08 Support flag parameter in readFileSync (#15595)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-12-05 13:41:44 -08:00
Michael H
00b7d6479b bun repl disable inspector/debugger (#15594) 2024-12-05 13:41:21 -08:00
pfg
bcf023c829 Implement expect().toMatchInlineSnapshot() (#15570) 2024-12-05 13:07:10 -08:00
Jarred Sumner
b7b1ca8ebe Fixes https://github.com/oven-sh/bun/issues/15307 2024-12-05 02:39:34 -08:00
Jarred Sumner
784bc4e012 Introduce high-performance native addon API in Bun.build, starting with build.onBeforeParse hook (#14971)
Co-authored-by: Zack Radisic <56137411+zackradisic@users.noreply.github.com>
Co-authored-by: zackradisic <zackradisic@users.noreply.github.com>
2024-12-04 22:35:43 -08:00
Ciro Spaciari
dd5c40dab7 fix(node:http) fix node:http chunked encoding on server and add chunked encoding support on the client (#15579)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-12-04 17:58:21 -08:00
190n
3a4a9ae4e9 Add v8::api_internal::FromJustIsNothing (#15583)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-12-04 17:57:40 -08:00
Jarred Sumner
9d1a35b658 Fixes https://github.com/oven-sh/bun/issues/15556 (#15582)
Co-authored-by: Andres Gutierrez <andresgutierrez535@gmail.com>
2024-12-04 17:57:05 -08:00
Meghan Denny
61cc9c3947 Revert "ci: Add bootstrap.ps1 and automate Windows build images" (#15591) 2024-12-04 17:07:35 -08:00
Ashcon Partovi
e904a181d8 ci: Add bootstrap.ps1 and automate Windows build images (#15466)
Co-authored-by: Electroid <Electroid@users.noreply.github.com>
2024-12-04 17:33:00 -07:00
Jarred Sumner
55a0bdc68d deflake process.test.js 2024-12-04 13:52:37 -08:00
Meghan Denny
55454f7910 [publish images] 2024-12-04 13:46:17 -08:00
Jarred Sumner
e4aeb761e4 Ensure we always drain the dependency list in runTasks() (#15511) 2024-12-04 12:40:11 -08:00
pfg
f9efe94b85 Fixes ^C on bun vite (#15545) 2024-12-04 12:39:55 -08:00
Robert Shuford
7eb8a3feae Fixes #14433 - global .npmrc not using auth (#15539) 2024-12-04 12:37:18 -08:00
Dylan Conway
d7ed9c673e add a --config test for bun install (#15546) 2024-12-04 12:36:10 -08:00
Ashcon Partovi
b4dce96c40 ci: Publish musl releases to npm 2024-12-04 10:19:15 -08:00
Meghan Denny
52ef8b1778 ci: make annotations always link to file content by commit (#15573) 2024-12-04 01:30:26 -08:00
dave caruso
baff3c900e bake: fix the big regressions (#15544)
Co-authored-by: paperdave <paperdave@users.noreply.github.com>
2024-12-03 22:15:59 -08:00
Meghan Denny
23299dadf6 ci: run node tests directly instead of translated files (#15565)
Co-authored-by: nektro <nektro@users.noreply.github.com>
2024-12-03 22:10:50 -08:00
Jarred Sumner
0d5e4e162b spawnSync shouldn't throw (#15561)
Co-authored-by: Meghan Denny <meghan@bun.sh>
2024-12-03 19:26:36 -08:00
Don Isaac
d27594ecf4 fix(deps/boringssl): re-enable BIO_new_mem_buf (#15559)
Co-authored-by: Don Isaac <don@bun.sh>
Co-authored-by: DonIsaac <DonIsaac@users.noreply.github.com>
2024-12-03 16:11:42 -08:00
Ciro Spaciari
a2e2d114e9 fix(net/tls) fix backpressure pause on socket (#15543) 2024-12-03 12:53:48 -08:00
Kai Tamkun
da3d64b1ef Remove a duplicate if statement (#15555) 2024-12-03 12:33:27 -08:00
Jarred Sumner
ce64e04b16 Reduce memory usage of WebSocket server (#15553) 2024-12-03 12:33:04 -08:00
Dylan Conway
55473cb64a fix(node:crypto): use options from createHash(alg, options) (#15547) 2024-12-03 12:32:41 -08:00
Meghan Denny
752441d911 package.json: put :local builds into their own folder (#15540) 2024-12-03 12:22:46 -08:00
Leah Lundqvist
da5d4d791c docs: add .env.test to guides/runtime/set-env for consistency with do… (#15542) 2024-12-02 15:01:08 -08:00
Dylan Conway
6d453be7d9 fix 14540 (#15498) 2024-12-02 14:57:49 -08:00
Meghan Denny
2d441d868b zig: make throw use JSError (#15444) 2024-12-02 14:19:18 -08:00
2642 changed files with 77858 additions and 81362 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -8,4 +8,4 @@ function run_command() {
{ set +x; } 2>/dev/null
}
run_command node ".buildkite/ci.mjs"
run_command node ".buildkite/ci.mjs" "$@"

2
.gitignore vendored
View File

@@ -116,8 +116,10 @@ scripts/env.local
sign.*.json
sign.json
src/bake/generated.ts
src/generated_enum_extractor.zig
src/bun.js/bindings-obj
src/bun.js/bindings/GeneratedJS2Native.zig
src/bun.js/bindings/GeneratedBindings.zig
src/bun.js/debug-bindings-obj
src/deps/zig-clap/.gitattributes
src/deps/zig-clap/.github

View File

@@ -5,6 +5,5 @@ test/js/deno
test/node.js
src/react-refresh.js
*.min.js
test/js/node/test/fixtures
test/js/node/test/common
test/snippets
test/js/node/test

117
.vscode/launch.json generated vendored
View File

@@ -16,7 +16,6 @@
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "1",
@@ -33,7 +32,6 @@
"args": ["test", "--only", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "1",
"BUN_DEBUG_jest": "1",
@@ -56,7 +54,6 @@
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
@@ -73,7 +70,6 @@
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "0",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
@@ -90,7 +86,6 @@
"args": ["test", "--watch", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
@@ -107,7 +102,6 @@
"args": ["test", "--hot", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
@@ -124,7 +118,6 @@
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
@@ -147,7 +140,6 @@
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
@@ -187,7 +179,6 @@
"args": ["run", "${fileBasename}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
"BUN_DEBUG_IncrementalGraph": "1",
@@ -207,7 +198,6 @@
"args": ["run", "${fileBasename}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "0",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
@@ -223,7 +213,6 @@
"args": ["run", "--watch", "${fileBasename}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
// "BUN_DEBUG_DEBUGGER": "1",
// "BUN_DEBUG_INTERNAL_DEBUGGER": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -242,7 +231,6 @@
"args": ["run", "--hot", "${fileBasename}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
@@ -303,7 +291,6 @@
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
@@ -320,7 +307,6 @@
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
@@ -337,7 +323,6 @@
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
@@ -354,7 +339,6 @@
"args": ["test", "--watch", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
@@ -371,7 +355,6 @@
"args": ["test", "--hot", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
@@ -388,7 +371,6 @@
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
@@ -411,7 +393,6 @@
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
@@ -435,7 +416,6 @@
"args": ["exec", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
@@ -452,7 +432,6 @@
"args": ["test"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
@@ -468,7 +447,6 @@
"args": ["test"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
@@ -484,7 +462,6 @@
"args": ["test"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
"BUN_INSPECT": "ws://localhost:0/",
@@ -506,7 +483,6 @@
"args": ["install"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
@@ -522,7 +498,6 @@
"args": ["test/runner.node.mjs"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
@@ -542,10 +517,6 @@
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -571,10 +542,6 @@
"args": ["test", "--only", "${file}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -600,10 +567,6 @@
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -629,10 +592,6 @@
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "0",
@@ -658,10 +617,6 @@
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -696,10 +651,6 @@
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -735,10 +686,6 @@
"args": ["run", "${fileBasename}"],
"cwd": "${fileDirname}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -764,10 +711,6 @@
"args": ["install"],
"cwd": "${fileDirname}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -789,10 +732,6 @@
"args": ["run", "${fileBasename}"],
"cwd": "${fileDirname}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -814,10 +753,6 @@
"args": ["run", "${fileBasename}"],
"cwd": "${fileDirname}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -848,10 +783,6 @@
"args": ["run", "${fileBasename}"],
"cwd": "${fileDirname}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -883,10 +814,6 @@
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -912,10 +839,6 @@
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -941,10 +864,6 @@
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "0",
@@ -970,10 +889,6 @@
"args": ["test", "--watch", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -999,10 +914,6 @@
"args": ["test", "--hot", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -1028,10 +939,6 @@
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -1066,10 +973,6 @@
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -1105,10 +1008,6 @@
"args": ["exec", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -1131,10 +1030,6 @@
"args": ["test"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -1156,10 +1051,6 @@
"args": ["test"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -1185,10 +1076,6 @@
"args": ["test"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -1223,10 +1110,6 @@
"args": ["test/runner.node.mjs"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",

View File

@@ -63,7 +63,7 @@
"editor.tabSize": 4,
"editor.defaultFormatter": "xaver.clang-format",
},
"clangd.arguments": ["-header-insertion=never"],
"clangd.arguments": ["-header-insertion=never", "-no-unused-includes"],
// JavaScript
"prettier.enable": true,

View File

@@ -327,6 +327,19 @@ pub fn build(b: *Build) !void {
.{ .os = .windows, .arch = .x86_64 },
});
}
// zig build enum-extractor
{
// const step = b.step("enum-extractor", "Extract enum definitions (invoked by a code generator)");
// const exe = b.addExecutable(.{
// .name = "enum_extractor",
// .root_source_file = b.path("./src/generated_enum_extractor.zig"),
// .target = b.graph.host,
// .optimize = .Debug,
// });
// const run = b.addRunArtifact(exe);
// step.dependOn(&run.step);
}
}
pub fn addMultiCheck(
@@ -414,6 +427,15 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
}
addInternalPackages(b, obj, opts);
obj.root_module.addImport("build_options", opts.buildOptionsModule(b));
const translate_plugin_api = b.addTranslateC(.{
.root_source_file = b.path("./packages/bun-native-bundler-plugin-api/bundler_plugin.h"),
.target = opts.target,
.optimize = opts.optimize,
.link_libc = true,
});
obj.root_module.addImport("bun-native-bundler-plugin-api", translate_plugin_api.createModule());
return obj;
}

BIN
bun.lockb

Binary file not shown.

View File

@@ -20,7 +20,7 @@ else()
setx(RELEASE OFF)
endif()
if(CMAKE_BUILD_TYPE MATCHES "Debug|RelWithDebInfo")
if(CMAKE_BUILD_TYPE MATCHES "Debug")
setx(DEBUG ON)
else()
setx(DEBUG OFF)

View File

@@ -318,13 +318,13 @@ register_command(
TARGET
bun-bake-codegen
COMMENT
"Bundling Kit Runtime"
"Bundling Bake Runtime"
COMMAND
${BUN_EXECUTABLE}
run
${BUN_BAKE_RUNTIME_CODEGEN_SCRIPT}
--debug=${DEBUG}
--codegen_root=${CODEGEN_PATH}
--codegen-root=${CODEGEN_PATH}
SOURCES
${BUN_BAKE_RUNTIME_SOURCES}
${BUN_BAKE_RUNTIME_CODEGEN_SOURCES}
@@ -334,6 +334,39 @@ register_command(
${BUN_BAKE_RUNTIME_OUTPUTS}
)
set(BUN_BINDGEN_SCRIPT ${CWD}/src/codegen/bindgen.ts)
file(GLOB_RECURSE BUN_BINDGEN_SOURCES ${CONFIGURE_DEPENDS}
${CWD}/src/**/*.bind.ts
)
set(BUN_BINDGEN_CPP_OUTPUTS
${CODEGEN_PATH}/GeneratedBindings.cpp
)
set(BUN_BINDGEN_ZIG_OUTPUTS
${CWD}/src/bun.js/bindings/GeneratedBindings.zig
)
register_command(
TARGET
bun-binding-generator
COMMENT
"Processing \".bind.ts\" files"
COMMAND
${BUN_EXECUTABLE}
run
${BUN_BINDGEN_SCRIPT}
--debug=${DEBUG}
--codegen-root=${CODEGEN_PATH}
SOURCES
${BUN_BINDGEN_SOURCES}
${BUN_BINDGEN_SCRIPT}
OUTPUTS
${BUN_BINDGEN_CPP_OUTPUTS}
${BUN_BINDGEN_ZIG_OUTPUTS}
)
set(BUN_JS_SINK_SCRIPT ${CWD}/src/codegen/generate-jssink.ts)
set(BUN_JS_SINK_SOURCES
@@ -385,7 +418,6 @@ set(BUN_OBJECT_LUT_OUTPUTS
${CODEGEN_PATH}/NodeModuleModule.lut.h
)
macro(WEBKIT_ADD_SOURCE_DEPENDENCIES _source _deps)
set(_tmp)
get_source_file_property(_tmp ${_source} OBJECT_DEPENDS)
@@ -461,6 +493,7 @@ list(APPEND BUN_ZIG_SOURCES
${CWD}/build.zig
${CWD}/root.zig
${CWD}/root_wasm.zig
${BUN_BINDGEN_ZIG_OUTPUTS}
)
set(BUN_ZIG_GENERATED_SOURCES
@@ -482,7 +515,6 @@ endif()
set(BUN_ZIG_OUTPUT ${BUILD_PATH}/bun-zig.o)
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm|ARM|arm64|ARM64|aarch64|AARCH64")
if(APPLE)
set(ZIG_CPU "apple_m1")
@@ -606,6 +638,7 @@ list(APPEND BUN_CPP_SOURCES
${BUN_JS_SINK_OUTPUTS}
${BUN_JAVASCRIPT_OUTPUTS}
${BUN_OBJECT_LUT_OUTPUTS}
${BUN_BINDGEN_CPP_OUTPUTS}
)
if(WIN32)

View File

@@ -49,6 +49,8 @@ register_command(
CARGO_TERM_VERBOSE=true
CARGO_TERM_DIAGNOSTIC=true
CARGO_ENCODED_RUSTFLAGS=${RUSTFLAGS}
CARGO_HOME=${CARGO_HOME}
RUSTUP_HOME=${RUSTUP_HOME}
)
target_link_libraries(${bun} PRIVATE ${LOLHTML_LIBRARY})

View File

@@ -1,15 +1,42 @@
if(DEFINED ENV{CARGO_HOME})
set(CARGO_HOME $ENV{CARGO_HOME})
elseif(CMAKE_HOST_WIN32)
set(CARGO_HOME $ENV{USERPROFILE}/.cargo)
if(NOT EXISTS ${CARGO_HOME})
set(CARGO_HOME $ENV{PROGRAMFILES}/Rust/cargo)
endif()
else()
set(CARGO_HOME $ENV{HOME}/.cargo)
endif()
if(DEFINED ENV{RUSTUP_HOME})
set(RUSTUP_HOME $ENV{RUSTUP_HOME})
elseif(CMAKE_HOST_WIN32)
set(RUSTUP_HOME $ENV{USERPROFILE}/.rustup)
if(NOT EXISTS ${RUSTUP_HOME})
set(RUSTUP_HOME $ENV{PROGRAMFILES}/Rust/rustup)
endif()
else()
set(RUSTUP_HOME $ENV{HOME}/.rustup)
endif()
find_command(
VARIABLE
CARGO_EXECUTABLE
COMMAND
cargo
PATHS
$ENV{HOME}/.cargo/bin
${CARGO_HOME}/bin
REQUIRED
OFF
)
if(EXISTS ${CARGO_EXECUTABLE})
if(CARGO_EXECUTABLE MATCHES "^${CARGO_HOME}")
setx(CARGO_HOME ${CARGO_HOME})
setx(RUSTUP_HOME ${RUSTUP_HOME})
endif()
return()
endif()

View File

@@ -671,7 +671,7 @@ _bun() {
cmd)
local -a scripts_list
IFS=$'\n' scripts_list=($(SHELL=zsh bun getcompletes i))
scripts="scripts:scripts:(($scripts_list))"
scripts="scripts:scripts:((${scripts_list//:/\\\\:}))"
IFS=$'\n' files_list=($(SHELL=zsh bun getcompletes j))
main_commands=(
@@ -871,8 +871,8 @@ _bun_run_param_script_completion() {
IFS=$'\n' scripts_list=($(SHELL=zsh bun getcompletes s))
IFS=$'\n' bins=($(SHELL=zsh bun getcompletes b))
_alternative "scripts:scripts:(($scripts_list))"
_alternative "bin:bin:(($bins))"
_alternative "scripts:scripts:((${scripts_list//:/\\\\:}))"
_alternative "bin:bin:((${bins//:/\\\\:}))"
_alternative "files:file:_files -g '*.(js|ts|jsx|tsx|wasm)'"
}

View File

@@ -546,6 +546,113 @@ export type ImportKind =
By design, the manifest is a simple JSON object that can easily be serialized or written to disk. It is also compatible with esbuild's [`metafile`](https://esbuild.github.io/api/#metafile) format. -->
### `env`
Controls how environment variables are handled during bundling. Internally, this uses `define` to inject environment variables into the bundle, but makes it easier to specify the environment variables to inject.
#### `env: "inline"`
Injects environment variables into the bundled output by converting `process.env.FOO` references to string literals containing the actual environment variable values.
{% codetabs group="a" %}
```ts#JavaScript
await Bun.build({
entrypoints: ['./index.tsx'],
outdir: './out',
env: "inline",
})
```
```bash#CLI
$ FOO=bar BAZ=123 bun build ./index.tsx --outdir ./out --env inline
```
{% /codetabs %}
For the input below:
```js#input.js
console.log(process.env.FOO);
console.log(process.env.BAZ);
```
The generated bundle will contain the following code:
```js#output.js
console.log("bar");
console.log("123");
```
#### `env: "PUBLIC_*"` (prefix)
Inlines environment variables matching the given prefix (the part before the `*` character), replacing `process.env.FOO` with the actual environment variable value. This is useful for selectively inlining environment variables for things like public-facing URLs or client-side tokens, without worrying about injecting private credentials into output bundles.
{% codetabs group="a" %}
```ts#JavaScript
await Bun.build({
entrypoints: ['./index.tsx'],
outdir: './out',
// Inline all env vars that start with "ACME_PUBLIC_"
env: "ACME_PUBLIC_*",
})
```
```bash#CLI
$ FOO=bar BAZ=123 ACME_PUBLIC_URL=https://acme.com bun build ./index.tsx --outdir ./out --env 'ACME_PUBLIC_*'
```
{% /codetabs %}
For example, given the following environment variables:
```bash
$ FOO=bar BAZ=123 ACME_PUBLIC_URL=https://acme.com
```
And source code:
```ts#index.tsx
console.log(process.env.FOO);
console.log(process.env.ACME_PUBLIC_URL);
console.log(process.env.BAZ);
```
The generated bundle will contain the following code:
```js
console.log(process.env.FOO);
console.log("https://acme.com");
console.log(process.env.BAZ);
```
#### `env: "disable"`
Disables environment variable injection entirely.
For example, given the following environment variables:
```bash
$ FOO=bar BAZ=123 ACME_PUBLIC_URL=https://acme.com
```
And source code:
```ts#index.tsx
console.log(process.env.FOO);
console.log(process.env.ACME_PUBLIC_URL);
console.log(process.env.BAZ);
```
The generated bundle will contain the following code:
```js
console.log(process.env.FOO);
console.log(process.env.BAZ);
```
### `sourcemap`
Specifies the type of sourcemap to generate.

View File

@@ -16,7 +16,7 @@ Set these variables in a `.env` file.
Bun reads the following files automatically (listed in order of increasing precedence).
- `.env`
- `.env.production` or `.env.development` (depending on value of `NODE_ENV`)
- `.env.production`, `.env.development`, `.env.test` (depending on value of `NODE_ENV`)
- `.env.local`
```txt#.env

View File

@@ -30,7 +30,6 @@ Bun implements the vast majority of Jest's matchers, but compatibility isn't 100
Some notable missing features:
- `expect().toMatchInlineSnapshot()`
- `expect().toHaveReturned()`
---

View File

@@ -4,10 +4,6 @@ name: Use snapshot testing in `bun test`
Bun's test runner supports Jest-style snapshot testing via `.toMatchSnapshot()`.
{% callout %}
The `.toMatchInlineSnapshot()` method is not yet supported.
{% /callout %}
```ts#snap.test.ts
import { test, expect } from "bun:test";
@@ -96,4 +92,4 @@ Ran 1 tests across 1 files. [102.00ms]
---
See [Docs > Test Runner > Snapshots](https://bun.sh/docs/test/mocks) for complete documentation on mocking with the Bun test runner.
See [Docs > Test Runner > Snapshots](https://bun.sh/docs/test/snapshots) for complete documentation on snapshots with the Bun test runner.

View File

@@ -49,7 +49,7 @@ Next, add these preload scripts to your `bunfig.toml` (you can also have everyth
```toml#bunfig.toml
[test]
preload = ["happydom.ts", "testing-library.ts"]
preload = ["./happydom.ts", "./testing-library.ts"]
```
---
@@ -84,4 +84,4 @@ test('Can use Testing Library', () => {
---
Refer to the [Testing Library docs](https://testing-library.com/), [Happy DOM repo](https://github.com/capricorn86/happy-dom) and [Docs > Test runner > DOM](https://bun.sh/docs/test/dom) for complete documentation on writing browser tests with Bun.
Refer to the [Testing Library docs](https://testing-library.com/), [Happy DOM repo](https://github.com/capricorn86/happy-dom) and [Docs > Test runner > DOM](https://bun.sh/docs/test/dom) for complete documentation on writing browser tests with Bun.

View File

@@ -4,10 +4,6 @@ name: Update snapshots in `bun test`
Bun's test runner supports Jest-style snapshot testing via `.toMatchSnapshot()`.
{% callout %}
The `.toMatchInlineSnapshot()` method is not yet supported.
{% /callout %}
```ts#snap.test.ts
import { test, expect } from "bun:test";
@@ -47,4 +43,4 @@ Ran 1 tests across 1 files. [102.00ms]
---
See [Docs > Test Runner > Snapshots](https://bun.sh/docs/test/mocks) for complete documentation on mocking with the Bun test runner.
See [Docs > Test Runner > Snapshots](https://bun.sh/docs/test/snapshots) for complete documentation on snapshots with the Bun test runner.

View File

@@ -402,6 +402,9 @@ export default {
page("project/building-windows", "Building Windows", {
description: "Learn how to setup a development environment for contributing to the Windows build of Bun.",
}),
page("project/bindgen", "Bindgen", {
description: "About the bindgen code generator",
}),
page("project/licensing", "License", {
description: `Bun is a MIT-licensed project with a large number of statically-linked dependencies with various licenses.`,
}),

199
docs/project/bindgen.md Normal file
View File

@@ -0,0 +1,199 @@
{% callout %}
This document is for maintainers and contributors to Bun, and describes internal implementation details.
{% /callout %}
The new bindings generator, introduced to the codebase in Dec 2024, scans for
`*.bind.ts` to find function and class definition, and generates glue code to
interop between JavaScript and native code.
There are currently other code generators and systems that achieve similar
purposes. The following will all eventually be completely phased out in favor of
this one:
- "Classes generator", converting `*.classes.ts` for custom classes.
- "JS2Native", allowing ad-hoc calls from `src/js` to native code.
## Creating JS Functions in Zig
Given a file implementing a simple function, such as `add`
```zig#src/bun.js/math.zig
pub fn add(global: *JSC.JSGlobalObject, a: i32, b: i32) !i32 {
return std.math.add(i32, a, b) catch {
// Binding functions can return `error.OutOfMemory` and `error.JSError`.
// Others like `error.Overflow` from `std.math.add` must be converted.
// Remember to be descriptive.
return global.throwPretty("Integer overflow while adding", .{});
};
}
const gen = bun.gen.math; // "math" being this file's basename
const std = @import("std");
const bun = @import("root").bun;
const JSC = bun.JSC;
```
Then describe the API schema using a `.bind.ts` function. The binding file goes next to the Zig file.
```ts#src/bun.js/math.bind.ts
import { t, fn } from 'bindgen';
export const add = fn({
args: {
global: t.globalObject,
a: t.i32,
b: t.i32.default(1),
},
ret: t.i32,
});
```
This function declaration is equivalent to:
```ts
/**
* Throws if zero arguments are provided.
* Wraps out of range numbers using modulo.
*/
declare function add(a: number, b: number = 1): number;
```
The code generator will provide `bun.gen.math.jsAdd`, which is the native function implementation. To pass to JavaScript, use `bun.gen.math.createAddCallback(global)`
## Strings
The type for receiving strings is one of [`t.DOMString`](https://webidl.spec.whatwg.org/#idl-DOMString), [`t.ByteString`](https://webidl.spec.whatwg.org/#idl-ByteString), and [`t.USVString`](https://webidl.spec.whatwg.org/#idl-USVString). These map directly to their WebIDL counterparts, and have slightly different conversion logic. Bindgen will pass BunString to native code in all cases.
When in doubt, use DOMString.
`t.UTF8String` can be used in place of `t.DOMString`, but will call `bun.String.toUTF8`. The native callback gets `[]const u8` (WTF-8 data) passed to native code, freeing it after the function returns.
TLDRs from WebIDL spec:
- ByteString can only contain valid latin1 characters. It is not safe to assume bun.String is already in 8-bit format, but it is extremely likely.
- USVString will not contain invalid surrogate pairs, aka text that can be represented correctly in UTF-8.
- DOMString is the loosest but also most recommended strategy.
## Function Variants
A `variants` can specify multiple variants (also known as overloads).
```ts#src/bun.js/math.bind.ts
import { t, fn } from 'bindgen';
export const action = fn({
variants: [
{
args: {
a: t.i32,
},
ret: t.i32,
},
{
args: {
a: t.DOMString,
},
ret: t.DOMString,
},
]
});
```
In Zig, each variant gets a number, based on the order the schema defines.
```
fn action1(a: i32) i32 {
return a;
}
fn action2(a: bun.String) bun.String {
return a;
}
```
## `t.dictionary`
A `dictionary` is a definition for a JavaScript object, typically as a function inputs. For function outputs, it is usually a smarter idea to declare a class type to add functions and destructuring.
## Enumerations
To use [WebIDL's enumeration](https://webidl.spec.whatwg.org/#idl-enums) type, use either:
- `t.stringEnum`: Create and codegen a new enum type.
- `t.zigEnum`: Derive a bindgen type off of an existing enum in the codebase.
An example of `stringEnum` as used in `fmt.zig` / `bun:internal-for-testing`
```ts
export const Formatter = t.stringEnum(
"highlight-javascript",
"escape-powershell",
);
export const fmtString = fn({
args: {
global: t.globalObject,
code: t.UTF8String,
formatter: Formatter,
},
ret: t.DOMString,
});
```
WebIDL strongly encourages using kebab case for enumeration values, to be consistent with existing Web APIs.
### Deriving enums from Zig code
TODO: zigEnum
## `t.oneOf`
A `oneOf` is a union between two or more types. It is represented by `union(enum)` in Zig.
TODO:
## Attributes
There are set of attributes that can be chained onto `t.*` types. On all types there are:
- `.required`, in dictionary parameters only
- `.optional`, in function arguments only
- `.default(T)`
When a value is optional, it is lowered to a Zig optional.
Depending on the type, there are more attributes available. See the type definitions in auto-complete for more details. Note that one of the above three can only be applied, and they must be applied at the end.
### Integer Attributes
Integer types allow customizing the overflow behavior with `clamp` or `enforceRange`
```ts
import { t, fn } from "bindgen";
export const add = fn({
args: {
global: t.globalObject,
// enforce in i32 range
a: t.i32.enforceRange(),
// clamp to u16 range
c: t.u16,
// enforce in arbitrary range, with a default if not provided
b: t.i32.enforceRange(0, 1000).default(5),
// clamp to arbitrary range, or null
d: t.u16.clamp(0, 10).optional,
},
ret: t.i32,
});
```
## Callbacks
TODO
## Classes
TODO

View File

@@ -355,7 +355,7 @@ Bun.build({
{% /callout %}
## Lifecycle callbacks
## Lifecycle hooks
Plugins can register callbacks to be run at various points in the lifecycle of a bundle:
@@ -363,6 +363,8 @@ Plugins can register callbacks to be run at various points in the lifecycle of a
- [`onResolve()`](#onresolve): Run before a module is resolved
- [`onLoad()`](#onload): Run before a module is loaded.
### Reference
A rough overview of the types (please refer to Bun's `bun.d.ts` for the full type definitions):
```ts
@@ -603,3 +605,98 @@ plugin({
```
Note that the `.defer()` function currently has the limitation that it can only be called once per `onLoad` callback.
## Native plugins
{% callout %}
**NOTE** — This is an advanced and experiemental API recommended for plugin developers who are familiar with systems programming and the C ABI. Use with caution.
{% /callout %}
One of the reasons why Bun's bundler is so fast is that it is written in native code and leverages multi-threading to load and parse modules in parallel.
However, one limitation of plugins written in JavaScript is that JavaScript itself is single-threaded.
Native plugins are written as [NAPI](/docs/node-api) modules and can be run on multiple threads. This allows native plugins to run much faster than JavaScript plugins.
In addition, native plugins can skip unnecessary work such as the UTF-8 -> UTF-16 conversion needed to pass strings to JavaScript.
These are the following lifecycle hooks which are available to native plugins:
- [`onBeforeParse()`](#onbeforeparse): Called on any thread before a file is parsed by Bun's bundler.
### Creating a native plugin
Native plugins are NAPI modules which expose lifecycle hooks as C ABI functions.
To create a native plugin, you must export a C ABI function which matches the signature of the native lifecycle hook you want to implement.
#### Example: Rust with napi-rs
First initialize a napi project (see [here](https://napi.rs/docs/introduction/getting-started) for a more comprehensive guide).
Then install Bun's official safe plugin wrapper crate:
```bash
cargo add bun-native-plugin
```
Now you can export an `extern "C" fn` which is the implementation of your plugin:
```rust
#[no_mangle]
extern "C" fn on_before_parse_impl(
args: *const bun_native_plugin::sys::OnBeforeParseArguments,
result: *mut bun_native_plugin::sys::OnBeforeParseResult,
) {
let args = unsafe { &*args };
let result = unsafe { &mut *result };
let mut handle = match bun_native_plugin::OnBeforeParse::from_raw(args, result) {
Ok(handle) => handle,
Err(_) => {
return;
}
};
let source_code = match handle.input_source_code() {
Ok(source_code) => source_code,
Err(_) => {
handle.log_error("Fetching source code failed!");
return;
}
};
let loader = handle.output_loader();
handle.set_output_source_code(source_code.replace("foo", "bar"), loader);
```
Use napi-rs to compile the plugin to a `.node` file, then you can `require()` it from JS and use it:
```js
await Bun.build({
entrypoints: ["index.ts"],
setup(build) {
const myNativePlugin = require("./path/to/plugin.node");
build.onBeforeParse(
{ filter: /\.ts/ },
{ napiModule: myNativePlugin, symbol: "on_before_parse_impl" },
);
},
});
```
### `onBeforeParse`
```ts
onBeforeParse(
args: { filter: RegExp; namespace?: string },
callback: { napiModule: NapiModule; symbol: string; external?: unknown },
): void;
```
This lifecycle callback is run immediately before a file is parsed by Bun's bundler.
As input, it receives the file's contents and can optionally return new source code.
This callback can be called from any thread and so the napi module implementation must be thread-safe.

View File

@@ -531,17 +531,17 @@ Bun implements the following matchers. Full Jest compatibility is on the roadmap
---
-
-
- [`.toMatchInlineSnapshot()`](https://jestjs.io/docs/expect#tomatchinlinesnapshotpropertymatchers-inlinesnapshot)
---
-
-
- [`.toThrowErrorMatchingSnapshot()`](https://jestjs.io/docs/expect#tothrowerrormatchingsnapshothint)
---
-
-
- [`.toThrowErrorMatchingInlineSnapshot()`](https://jestjs.io/docs/expect#tothrowerrormatchinginlinesnapshotinlinesnapshot)
{% /table %}

View File

@@ -21,7 +21,7 @@
"react": "^18.3.1",
"react-dom": "^18.3.1",
"source-map-js": "^1.2.0",
"typescript": "^5.4.5",
"typescript": "^5.7.2",
"caniuse-lite": "^1.0.30001620",
"autoprefixer": "^10.4.19",
"@mdn/browser-compat-data": "~5.5.28"
@@ -30,8 +30,8 @@
"bun-types": "workspace:packages/bun-types"
},
"scripts": {
"build": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -B build/debug",
"build:debug": "bun run build",
"build": "bun run build:debug",
"build:debug": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -B build/debug",
"build:valgrind": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DENABLE_BASELINE=ON -ENABLE_VALGRIND=ON -B build/debug-valgrind",
"build:release": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -B build/release",
"build:ci": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DCMAKE_VERBOSE_MAKEFILE=ON -DCI=true -B build/release-ci --verbose --fresh",
@@ -39,8 +39,8 @@
"build:logs": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DENABLE_LOGS=ON -B build/release-logs",
"build:safe": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DZIG_OPTIMIZE=ReleaseSafe -B build/release-safe",
"build:smol": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=MinSizeRel -B build/release-smol",
"build:local": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DWEBKIT_LOCAL=ON -B build/debug",
"build:release:local": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DWEBKIT_LOCAL=ON -B build/release",
"build:local": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DWEBKIT_LOCAL=ON -B build/debug-local",
"build:release:local": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DWEBKIT_LOCAL=ON -B build/release-local",
"build:release:with_logs": "cmake . -DCMAKE_BUILD_TYPE=Release -DENABLE_LOGS=true -GNinja -Bbuild-release && ninja -Cbuild-release",
"build:debug-zig-release": "cmake . -DCMAKE_BUILD_TYPE=Release -DZIG_OPTIMIZE=Debug -GNinja -Bbuild-debug-zig-release && ninja -Cbuild-debug-zig-release",
"css-properties": "bun run src/css/properties/generate_properties.ts",
@@ -73,6 +73,7 @@
"prettier": "bun run analysis:no-llvm --target prettier",
"prettier:check": "bun run analysis:no-llvm --target prettier-check",
"prettier:extra": "bun run analysis:no-llvm --target prettier-extra",
"prettier:diff": "bun run analysis:no-llvm --target prettier-diff"
"prettier:diff": "bun run analysis:no-llvm --target prettier-diff",
"node:test": "node ./scripts/runner.node.mjs --quiet --exec-path=$npm_execpath --node-tests "
}
}

View File

@@ -0,0 +1,5 @@
[target.aarch64-unknown-linux-musl]
linker = "aarch64-linux-musl-gcc"
rustflags = ["-C", "target-feature=-crt-static"]
[target.x86_64-pc-windows-msvc]
rustflags = ["-C", "target-feature=+crt-static"]

202
packages/bun-build-mdx-rs/.gitignore vendored Normal file
View File

@@ -0,0 +1,202 @@
# Created by https://www.toptal.com/developers/gitignore/api/node
# Edit at https://www.toptal.com/developers/gitignore?templates=node
### Node ###
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# TypeScript v1 declaration files
typings/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variables file
.env
.env.test
# parcel-bundler cache (https://parceljs.org/)
.cache
# Next.js build output
.next
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# End of https://www.toptal.com/developers/gitignore/api/node
# Created by https://www.toptal.com/developers/gitignore/api/macos
# Edit at https://www.toptal.com/developers/gitignore?templates=macos
### macOS ###
# General
.DS_Store
.AppleDouble
.LSOverride
# Icon must end with two
Icon
# Thumbnails
._*
# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk
### macOS Patch ###
# iCloud generated files
*.icloud
# End of https://www.toptal.com/developers/gitignore/api/macos
# Created by https://www.toptal.com/developers/gitignore/api/windows
# Edit at https://www.toptal.com/developers/gitignore?templates=windows
### Windows ###
# Windows thumbnail cache files
Thumbs.db
Thumbs.db:encryptable
ehthumbs.db
ehthumbs_vista.db
# Dump file
*.stackdump
# Folder config file
[Dd]esktop.ini
# Recycle Bin used on file shares
$RECYCLE.BIN/
# Windows Installer files
*.cab
*.msi
*.msix
*.msm
*.msp
# Windows shortcuts
*.lnk
# End of https://www.toptal.com/developers/gitignore/api/windows
#Added by cargo
/target
Cargo.lock
.pnp.*
.yarn/*
!.yarn/patches
!.yarn/plugins
!.yarn/releases
!.yarn/sdks
!.yarn/versions
*.node
dist/
index.js
index.d.ts

View File

@@ -0,0 +1,13 @@
target
Cargo.lock
.cargo
.github
npm
.eslintrc
.prettierignore
rustfmt.toml
yarn.lock
*.node
.yarn
__test__
renovate.json

View File

@@ -0,0 +1,21 @@
[package]
edition = "2021"
name = "bun-mdx-rs"
version = "0.0.0"
[lib]
crate-type = ["cdylib"]
[dependencies]
# Default enable napi4 feature, see https://nodejs.org/api/n-api.html#node-api-version-matrix
napi = { version = "2.12.2", default-features = false, features = ["napi4"] }
napi-derive = "2.12.2"
mdxjs = "0.2.11"
bun-native-plugin = { path = "../bun-native-plugin-rs" }
[build-dependencies]
napi-build = "2.0.1"
[profile.release]
lto = true
strip = "symbols"

View File

@@ -0,0 +1,34 @@
# bun-build-mdx-rs
This is a proof of concept for using a third-party native addon in `Bun.build()`.
This uses `mdxjs-rs` to convert MDX to JSX.
TODO: **This needs to be built & published to npm.**
## Building locally:
```sh
cargo build --release
```
```js
import { build } from "bun";
import mdx from "./index.js";
// TODO: This needs to be prebuilt for the current platform
// Probably use a napi-rs template for this
import addon from "./target/release/libmdx_bun.dylib" with { type: "file" };
const results = await build({
entrypoints: ["./hello.jsx"],
plugins: [mdx({ addon })],
minify: true,
outdir: "./dist",
define: {
"process.env.NODE_ENV": JSON.stringify("production"),
},
});
console.log(results);
```

View File

@@ -0,0 +1,7 @@
import test from 'ava'
import { sum } from '../index.js'
test('sum from native', (t) => {
t.is(sum(1, 2), 3)
})

View File

@@ -0,0 +1,5 @@
extern crate napi_build;
fn main() {
napi_build::setup();
}

View File

@@ -0,0 +1,6 @@
import page1 from "./page1.mdx";
import page2 from "./page2.mdx";
import page3 from "./page3.mdx";
import page4 from "./page4.mdx";
console.log(page1, page2, page3, page4);

View File

@@ -0,0 +1,11 @@
# Hello World
This is a sample MDX file that demonstrates various MDX features.
## Components
You can use JSX components directly in MDX:
<Button onClick={() => alert("Hello!")}>Click me</Button>
## Code Blocks

View File

@@ -0,0 +1,11 @@
# Hello World
This is a sample MDX file that demonstrates various MDX features.
## Components
You can use JSX components directly in MDX:
<Button onClick={() => alert("Hello!")}>Click me</Button>
## Code Blocks

View File

@@ -0,0 +1,11 @@
# Hello World
This is a sample MDX file that demonstrates various MDX features.
## Components
You can use JSX components directly in MDX:
<Button onClick={() => alert("Hello!")}>Click me</Button>
## Code Blocks

View File

@@ -0,0 +1,11 @@
# Hello World
This is a sample MDX file that demonstrates various MDX features.
## Components
You can use JSX components directly in MDX:
<Button onClick={() => alert("Hello!")}>Click me</Button>
## Code Blocks

View File

@@ -0,0 +1,3 @@
# `bun-mdx-rs-darwin-arm64`
This is the **aarch64-apple-darwin** binary for `bun-mdx-rs`

View File

@@ -0,0 +1,18 @@
{
"name": "bun-mdx-rs-darwin-arm64",
"version": "0.0.0",
"os": [
"darwin"
],
"cpu": [
"arm64"
],
"main": "bun-mdx-rs.darwin-arm64.node",
"files": [
"bun-mdx-rs.darwin-arm64.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View File

@@ -0,0 +1,3 @@
# `bun-mdx-rs-darwin-x64`
This is the **x86_64-apple-darwin** binary for `bun-mdx-rs`

View File

@@ -0,0 +1,18 @@
{
"name": "bun-mdx-rs-darwin-x64",
"version": "0.0.0",
"os": [
"darwin"
],
"cpu": [
"x64"
],
"main": "bun-mdx-rs.darwin-x64.node",
"files": [
"bun-mdx-rs.darwin-x64.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View File

@@ -0,0 +1,3 @@
# `bun-mdx-rs-linux-arm64-gnu`
This is the **aarch64-unknown-linux-gnu** binary for `bun-mdx-rs`

View File

@@ -0,0 +1,21 @@
{
"name": "bun-mdx-rs-linux-arm64-gnu",
"version": "0.0.0",
"os": [
"linux"
],
"cpu": [
"arm64"
],
"main": "bun-mdx-rs.linux-arm64-gnu.node",
"files": [
"bun-mdx-rs.linux-arm64-gnu.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
},
"libc": [
"glibc"
]
}

View File

@@ -0,0 +1,3 @@
# `bun-mdx-rs-linux-arm64-musl`
This is the **aarch64-unknown-linux-musl** binary for `bun-mdx-rs`

View File

@@ -0,0 +1,21 @@
{
"name": "bun-mdx-rs-linux-arm64-musl",
"version": "0.0.0",
"os": [
"linux"
],
"cpu": [
"arm64"
],
"main": "bun-mdx-rs.linux-arm64-musl.node",
"files": [
"bun-mdx-rs.linux-arm64-musl.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
},
"libc": [
"musl"
]
}

View File

@@ -0,0 +1,3 @@
# `bun-mdx-rs-linux-x64-gnu`
This is the **x86_64-unknown-linux-gnu** binary for `bun-mdx-rs`

View File

@@ -0,0 +1,21 @@
{
"name": "bun-mdx-rs-linux-x64-gnu",
"version": "0.0.0",
"os": [
"linux"
],
"cpu": [
"x64"
],
"main": "bun-mdx-rs.linux-x64-gnu.node",
"files": [
"bun-mdx-rs.linux-x64-gnu.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
},
"libc": [
"glibc"
]
}

View File

@@ -0,0 +1,3 @@
# `bun-mdx-rs-linux-x64-musl`
This is the **x86_64-unknown-linux-musl** binary for `bun-mdx-rs`

View File

@@ -0,0 +1,21 @@
{
"name": "bun-mdx-rs-linux-x64-musl",
"version": "0.0.0",
"os": [
"linux"
],
"cpu": [
"x64"
],
"main": "bun-mdx-rs.linux-x64-musl.node",
"files": [
"bun-mdx-rs.linux-x64-musl.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
},
"libc": [
"musl"
]
}

View File

@@ -0,0 +1,3 @@
# `bun-mdx-rs-win32-x64-msvc`
This is the **x86_64-pc-windows-msvc** binary for `bun-mdx-rs`

View File

@@ -0,0 +1,18 @@
{
"name": "bun-mdx-rs-win32-x64-msvc",
"version": "0.0.0",
"os": [
"win32"
],
"cpu": [
"x64"
],
"main": "bun-mdx-rs.win32-x64-msvc.node",
"files": [
"bun-mdx-rs.win32-x64-msvc.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View File

@@ -0,0 +1,37 @@
{
"name": "bun-mdx-rs",
"version": "0.0.0",
"main": "index.js",
"types": "index.d.ts",
"napi": {
"name": "bun-mdx-rs",
"triples": {
"additional": [
"aarch64-apple-darwin",
"aarch64-unknown-linux-gnu",
"aarch64-unknown-linux-musl",
"x86_64-unknown-linux-musl"
]
}
},
"license": "MIT",
"devDependencies": {
"@napi-rs/cli": "^2.18.4",
"ava": "^6.0.1"
},
"ava": {
"timeout": "3m"
},
"engines": {
"node": ">= 10"
},
"scripts": {
"artifacts": "napi artifacts",
"build": "napi build --platform --release",
"build:debug": "napi build --platform",
"prepublishOnly": "napi prepublish -t npm",
"test": "ava",
"universal": "napi universal",
"version": "napi version"
}
}

View File

@@ -0,0 +1,2 @@
tab_spaces = 2
edition = "2021"

View File

@@ -0,0 +1,55 @@
use bun_native_plugin::{define_bun_plugin, BunLoader, OnBeforeParse};
use mdxjs::{compile, Options as CompileOptions};
use napi_derive::napi;
#[macro_use]
extern crate napi;
define_bun_plugin!("bun-mdx-rs");
#[no_mangle]
pub extern "C" fn bun_mdx_rs(
args: *const bun_native_plugin::sys::OnBeforeParseArguments,
result: *mut bun_native_plugin::sys::OnBeforeParseResult,
) {
let args = unsafe { &*args };
let mut handle = match OnBeforeParse::from_raw(args, result) {
Ok(handle) => handle,
Err(_) => {
return;
}
};
let source_str = match handle.input_source_code() {
Ok(source_str) => source_str,
Err(_) => {
handle.log_error("Failed to fetch source code");
return;
}
};
let mut options = CompileOptions::gfm();
// Leave it as JSX for Bun to handle
options.jsx = true;
let path = match handle.path() {
Ok(path) => path,
Err(e) => {
handle.log_error(&format!("Failed to get path: {:?}", e));
return;
}
};
options.filepath = Some(path.to_string());
match compile(&source_str, &options) {
Ok(compiled) => {
handle.set_output_source_code(compiled, BunLoader::BUN_LOADER_JSX);
}
Err(_) => {
handle.log_error("Failed to compile MDX");
return;
}
}
}

View File

@@ -0,0 +1,73 @@
#ifndef BUN_NATIVE_BUNDLER_PLUGIN_API_H
#define BUN_NATIVE_BUNDLER_PLUGIN_API_H
#include <stddef.h>
#include <stdint.h>
typedef enum {
BUN_LOADER_JSX = 0,
BUN_LOADER_JS = 1,
BUN_LOADER_TS = 2,
BUN_LOADER_TSX = 3,
BUN_LOADER_CSS = 4,
BUN_LOADER_FILE = 5,
BUN_LOADER_JSON = 6,
BUN_LOADER_TOML = 7,
BUN_LOADER_WASM = 8,
BUN_LOADER_NAPI = 9,
BUN_LOADER_BASE64 = 10,
BUN_LOADER_DATAURL = 11,
BUN_LOADER_TEXT = 12,
} BunLoader;
const BunLoader BUN_LOADER_MAX = BUN_LOADER_TEXT;
typedef struct BunLogOptions {
size_t __struct_size;
const uint8_t *message_ptr;
size_t message_len;
const uint8_t *path_ptr;
size_t path_len;
const uint8_t *source_line_text_ptr;
size_t source_line_text_len;
int8_t level;
int line;
int lineEnd;
int column;
int columnEnd;
} BunLogOptions;
typedef struct {
size_t __struct_size;
void *bun;
const uint8_t *path_ptr;
size_t path_len;
const uint8_t *namespace_ptr;
size_t namespace_len;
uint8_t default_loader;
void *external;
} OnBeforeParseArguments;
typedef struct OnBeforeParseResult {
size_t __struct_size;
uint8_t *source_ptr;
size_t source_len;
uint8_t loader;
int (*fetchSourceCode)(const OnBeforeParseArguments *args,
struct OnBeforeParseResult *result);
void *plugin_source_code_context;
void (*free_plugin_source_code_context)(void *ctx);
void (*log)(const OnBeforeParseArguments *args, BunLogOptions *options);
} OnBeforeParseResult;
typedef enum {
BUN_LOG_LEVEL_VERBOSE = 0,
BUN_LOG_LEVEL_DEBUG = 1,
BUN_LOG_LEVEL_INFO = 2,
BUN_LOG_LEVEL_WARN = 3,
BUN_LOG_LEVEL_ERROR = 4,
} BunLogLevel;
const BunLogLevel BUN_LOG_MAX = BUN_LOG_LEVEL_ERROR;
#endif // BUN_NATIVE_BUNDLER_PLUGIN_API_H

View File

@@ -0,0 +1 @@
target/

286
packages/bun-native-plugin-rs/Cargo.lock generated Normal file
View File

@@ -0,0 +1,286 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "aho-corasick"
version = "1.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916"
dependencies = [
"memchr",
]
[[package]]
name = "bindgen"
version = "0.70.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f49d8fed880d473ea71efb9bf597651e77201bdd4893efe54c9e5d65ae04ce6f"
dependencies = [
"bitflags",
"cexpr",
"clang-sys",
"itertools",
"log",
"prettyplease",
"proc-macro2",
"quote",
"regex",
"rustc-hash",
"shlex",
"syn",
]
[[package]]
name = "bitflags"
version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de"
[[package]]
name = "bun-native-plugin"
version = "0.1.0"
dependencies = [
"bindgen",
]
[[package]]
name = "cexpr"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766"
dependencies = [
"nom",
]
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "clang-sys"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4"
dependencies = [
"glob",
"libc",
"libloading",
]
[[package]]
name = "either"
version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0"
[[package]]
name = "glob"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
[[package]]
name = "itertools"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186"
dependencies = [
"either",
]
[[package]]
name = "libc"
version = "0.2.166"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2ccc108bbc0b1331bd061864e7cd823c0cab660bbe6970e66e2c0614decde36"
[[package]]
name = "libloading"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4"
dependencies = [
"cfg-if",
"windows-targets",
]
[[package]]
name = "log"
version = "0.4.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
[[package]]
name = "memchr"
version = "2.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
[[package]]
name = "minimal-lexical"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
[[package]]
name = "nom"
version = "7.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
dependencies = [
"memchr",
"minimal-lexical",
]
[[package]]
name = "prettyplease"
version = "0.2.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64d1ec885c64d0457d564db4ec299b2dae3f9c02808b8ad9c3a089c591b18033"
dependencies = [
"proc-macro2",
"syn",
]
[[package]]
name = "proc-macro2"
version = "1.0.92"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af"
dependencies = [
"proc-macro2",
]
[[package]]
name = "regex"
version = "1.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
dependencies = [
"aho-corasick",
"memchr",
"regex-automata",
"regex-syntax",
]
[[package]]
name = "regex-automata"
version = "0.4.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax",
]
[[package]]
name = "regex-syntax"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
[[package]]
name = "rustc-hash"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
[[package]]
name = "shlex"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
[[package]]
name = "syn"
version = "2.0.89"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44d46482f1c1c87acd84dea20c1bf5ebff4c757009ed6bf19cfd36fb10e92c4e"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "unicode-ident"
version = "1.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83"
[[package]]
name = "windows-targets"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
dependencies = [
"windows_aarch64_gnullvm",
"windows_aarch64_msvc",
"windows_i686_gnu",
"windows_i686_gnullvm",
"windows_i686_msvc",
"windows_x86_64_gnu",
"windows_x86_64_gnullvm",
"windows_x86_64_msvc",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
[[package]]
name = "windows_aarch64_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
[[package]]
name = "windows_i686_gnu"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
[[package]]
name = "windows_i686_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
[[package]]
name = "windows_i686_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
[[package]]
name = "windows_x86_64_gnu"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
[[package]]
name = "windows_x86_64_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"

View File

@@ -0,0 +1,7 @@
[package]
name = "bun-native-plugin"
version = "0.1.0"
edition = "2021"
[build-dependencies]
bindgen = "0.70.1"

View File

@@ -0,0 +1,248 @@
> ⚠️ Note: This is an advanced and experimental API recommended only for plugin developers who are familiar with systems proramming and the C ABI. Use with caution.
# Bun Native Plugins
This crate provides a Rustified wrapper over the Bun's native bundler plugin C API.
Some advantages to _native_ bundler plugins as opposed to regular ones implemented in JS:
- Native plugins take full advantage of Bun's parallelized bundler pipeline and run on multiple threads at the same time
- Unlike JS, native plugins don't need to do the UTF-8 <-> UTF-16 source code string conversions
What are native bundler plugins exactly? Precisely, they are NAPI modules which expose a C ABI function which implement a plugin lifecycle hook.
The currently supported lifecycle hooks are:
- `onBeforeParse` (called immediately before a file is parsed, allows you to modify the source code of the file)
## Getting started
Since native bundler plugins are NAPI modules, the easiest way to get started is to create a new [napi-rs](https://github.com/napi-rs/napi-rs) project:
```bash
bun add -g @napi-rs/cli
napi new
```
Then install this crate:
```bash
cargo add bun-native-plugin
```
Now, inside the `lib.rs` file, expose a C ABI function which has the same function signature as the plugin lifecycle hook that you want to implement.
For example, implementing `onBeforeParse`:
```rs
use bun_native_plugin::{define_bun_plugin, OnBeforeParse};
use napi_derive::napi;
/// Define with the name of the plugin
define_bun_plugin!("replace-foo-with-bar");
/// This is necessary for napi-rs to compile this into a proper NAPI module
#[napi]
pub fn register_bun_plugin() {}
/// Use `no_mangle` so that we can reference this symbol by name later
/// when registering this native plugin in JS.
///
/// Here we'll create a dummy plugin which replaces all occurences of
/// `foo` with `bar`
#[no_mangle]
pub extern "C" fn on_before_parse_plugin_impl(
args: *const bun_native_plugin::sys::OnBeforeParseArguments,
result: *mut bun_native_plugin::sys::OnBeforeParseResult,
) {
let args = unsafe { &*args };
// This returns a handle which is a safe wrapper over the raw
// C API.
let mut handle = OnBeforeParse::from_raw(args, result) {
Ok(handle) => handle,
Err(_) => {
// `OnBeforeParse::from_raw` handles error logging
// so it fine to return here.
return;
}
};
let input_source_code = match handle.input_source_code() {
Ok(source_str) => source_str,
Err(_) => {
// If we encounter an error, we must log it so that
// Bun knows this plugin failed.
handle.log_error("Failed to fetch source code!");
return;
}
};
let loader = handle.output_loader();
let output_source_code = source_str.replace("foo", "bar");
handle.set_output_source_code(output_source_code, loader);
}
```
Then compile this NAPI module. If you using napi-rs, the `package.json` should have a `build` script you can run:
```bash
bun run build
```
This will produce a `.node` file in the project directory.
With the compiled NAPI module, you can now register the plugin from JS:
```js
const result = await Bun.build({
entrypoints: ["index.ts"],
plugins: [
{
name: "replace-foo-with-bar",
setup(build) {
const napiModule = require("path/to/napi_module.node");
// Register the `onBeforeParse` hook to run on all `.ts` files.
// We tell it to use function we implemented inside of our `lib.rs` code.
build.onBeforeParse(
{ filter: /\.ts/ },
{ napiModule, symbol: "on_before_parse_plugin_impl" },
);
},
},
],
});
```
## Very important information
### Error handling and panics
It is highly recommended to avoid panicking as this will crash the runtime. Instead, you must handle errors and log them:
```rs
let input_source_code = match handle.input_source_code() {
Ok(source_str) => source_str,
Err(_) => {
// If we encounter an error, we must log it so that
// Bun knows this plugin failed.
handle.log_error("Failed to fetch source code!");
return;
}
};
```
### Passing state to and from JS: `External`
One way to communicate data from your plugin and JS and vice versa is through the NAPI's [External](https://napi.rs/docs/concepts/external) type.
An External in NAPI is like an opaque pointer to data that can be passed to and from JS. Inside your NAPI module, you can retrieve
the pointer and modify the data.
As an example that extends our getting started example above, let's say you wanted to count the number of `foo`'s that the native plugin encounters.
You would expose a NAPI module function which creates this state. Recall that state in native plugins must be threadsafe. This usually means
that your state must be `Sync`:
```rs
struct PluginState {
foo_count: std::sync::atomic::AtomicU32,
}
#[napi]
pub fn create_plugin_state() -> External<PluginState> {
let external = External::new(PluginState {
foo_count: 0,
});
external
}
#[napi]
pub fn get_foo_count(plugin_state: External<PluginState>) -> u32 {
let plugin_state: &PluginState = &plugin_state;
plugin_state.foo_count.load(std::sync::atomic::Ordering::Relaxed)
}
```
When you register your plugin from Javascript, you call the napi module function to create the external and then pass it:
```js
const napiModule = require("path/to/napi_module.node");
const pluginState = napiModule.createPluginState();
const result = await Bun.build({
entrypoints: ["index.ts"],
plugins: [
{
name: "replace-foo-with-bar",
setup(build) {
build.onBeforeParse(
{ filter: /\.ts/ },
{
napiModule,
symbol: "on_before_parse_plugin_impl",
// pass our NAPI external which contains our plugin state here
external: pluginState,
},
);
},
},
],
});
console.log("Total `foo`s encountered: ", pluginState.getFooCount());
```
Finally, from the native implementation of your plugin, you can extract the external:
```rs
pub extern "C" fn on_before_parse_plugin_impl(
args: *const bun_native_plugin::sys::OnBeforeParseArguments,
result: *mut bun_native_plugin::sys::OnBeforeParseResult,
) {
let args = unsafe { &*args };
let mut handle = OnBeforeParse::from_raw(args, result) {
Ok(handle) => handle,
Err(_) => {
// `OnBeforeParse::from_raw` handles error logging
// so it fine to return here.
return;
}
};
let plugin_state: &PluginState =
// This operation is only safe if you pass in an external when registering the plugin.
// If you don't, this could lead to a segfault or access of undefined memory.
match unsafe { handle.external().and_then(|state| state.ok_or(Error::Unknown)) } {
Ok(state) => state,
Err(_) => {
handle.log_error("Failed to get external!");
return;
}
};
// Fetch our source code again
let input_source_code = match handle.input_source_code() {
Ok(source_str) => source_str,
Err(_) => {
handle.log_error("Failed to fetch source code!");
return;
}
};
// Count the number of `foo`s and add it to our state
let foo_count = source_code.matches("foo").count() as u32;
plugin_state.foo_count.fetch_add(foo_count, std::sync::atomic::Ordering::Relaxed);
}
```
### Concurrency
Your `extern "C"` plugin function can be called _on any thread_ at _any time_ and _multiple times at once_.
Therefore, you must design any state management to be threadsafe

View File

@@ -0,0 +1,20 @@
use std::path::PathBuf;
fn main() {
println!("cargo:rustc-link-search=./headers");
let bindings = bindgen::Builder::default()
.header("wrapper.h")
// Add absolute path to headers directory
.clang_arg("-I./headers")
.parse_callbacks(Box::new(bindgen::CargoCallbacks))
.rustified_enum("BunLogLevel")
.rustified_enum("BunLoader")
.generate()
.expect("Unable to generate bindings");
let out_path = PathBuf::from(std::env::var("OUT_DIR").unwrap());
bindings
.write_to_file(out_path.join("bindings.rs"))
.expect("Couldn't write bindings!");
}

View File

@@ -0,0 +1,6 @@
import { join } from "node:path";
const dirname = join(import.meta.dir, "../", "bun-native-bundler-plugin-api");
await Bun.$`rm -rf headers`;
await Bun.$`mkdir -p headers`;
await Bun.$`cp -R ${dirname} headers/bun-native-bundler-plugin-api`;

View File

@@ -0,0 +1,79 @@
#ifndef BUN_NATIVE_BUNDLER_PLUGIN_API_H
#define BUN_NATIVE_BUNDLER_PLUGIN_API_H
#include <stddef.h>
#include <stdint.h>
typedef enum {
BUN_LOADER_JSX = 0,
BUN_LOADER_JS = 1,
BUN_LOADER_TS = 2,
BUN_LOADER_TSX = 3,
BUN_LOADER_CSS = 4,
BUN_LOADER_FILE = 5,
BUN_LOADER_JSON = 6,
BUN_LOADER_TOML = 7,
BUN_LOADER_WASM = 8,
BUN_LOADER_NAPI = 9,
BUN_LOADER_BASE64 = 10,
BUN_LOADER_DATAURL = 11,
BUN_LOADER_TEXT = 12,
BUN_LOADER_BUNSH = 13,
BUN_LOADER_SQLITE = 14,
BUN_LOADER_SQLITE_EMBEDDED = 15
} BunLoader;
const BunLoader BUN_LOADER_MAX = BUN_LOADER_SQLITE_EMBEDDED;
typedef struct BunLogOptions {
size_t __struct_size;
const uint8_t* message_ptr;
size_t message_len;
const uint8_t* path_ptr;
size_t path_len;
const uint8_t* source_line_text_ptr;
size_t source_line_text_len;
int8_t level;
int line;
int lineEnd;
int column;
int columnEnd;
} BunLogOptions;
typedef struct {
size_t __struct_size;
void* bun;
const uint8_t* path_ptr;
size_t path_len;
const uint8_t* namespace_ptr;
size_t namespace_len;
uint8_t default_loader;
void *external;
} OnBeforeParseArguments;
typedef struct OnBeforeParseResult {
size_t __struct_size;
uint8_t* source_ptr;
size_t source_len;
uint8_t loader;
int (*fetchSourceCode)(
const OnBeforeParseArguments* args,
struct OnBeforeParseResult* result
);
void* plugin_source_code_context;
void (*free_plugin_source_code_context)(void* ctx);
void (*log)(const OnBeforeParseArguments* args, BunLogOptions* options);
} OnBeforeParseResult;
typedef enum {
BUN_LOG_LEVEL_VERBOSE = 0,
BUN_LOG_LEVEL_DEBUG = 1,
BUN_LOG_LEVEL_INFO = 2,
BUN_LOG_LEVEL_WARN = 3,
BUN_LOG_LEVEL_ERROR = 4,
} BunLogLevel;
const BunLogLevel BUN_LOG_MAX = BUN_LOG_LEVEL_ERROR;
#endif // BUN_NATIVE_BUNDLER_PLUGIN_API_H

View File

@@ -0,0 +1,627 @@
//! > ⚠️ Note: This is an advanced and experimental API recommended only for plugin developers who are familiar with systems proramming and the C ABI. Use with caution.
//!
//! # Bun Native Plugins
//!
//! This crate provides a Rustified wrapper over the Bun's native bundler plugin C API.
//!
//! Some advantages to _native_ bundler plugins as opposed to regular ones implemented in JS:
//!
//! - Native plugins take full advantage of Bun's parallelized bundler pipeline and run on multiple threads at the same time
//! - Unlike JS, native plugins don't need to do the UTF-8 <-> UTF-16 source code string conversions
//!
//! What are native bundler plugins exactly? Precisely, they are NAPI modules which expose a C ABI function which implement a plugin lifecycle hook.
//!
//! The currently supported lifecycle hooks are:
//!
//! - `onBeforeParse` (called immediately before a file is parsed, allows you to modify the source code of the file)
//!
//! ## Getting started
//!
//! Since native bundler plugins are NAPI modules, the easiest way to get started is to create a new [napi-rs](https://github.com/napi-rs/napi-rs) project:
//!
//! ```bash
//! bun add -g @napi-rs/cli
//! napi new
//! ```
//!
//! Then install this crate:
//!
//! ```bash
//! cargo add bun-native-plugin
//! ```
//!
//! Now, inside the `lib.rs` file, expose a C ABI function which has the same function signature as the plugin lifecycle hook that you want to implement.
//!
//! For example, implementing `onBeforeParse`:
//!
//! ```rust
//! use bun_native_plugin::{OnBeforeParse};
//!
//! /// This is necessary for napi-rs to compile this into a proper NAPI module
//! #[napi]
//! pub fn register_bun_plugin() {}
//!
//! /// Use `no_mangle` so that we can reference this symbol by name later
//! /// when registering this native plugin in JS.
//! ///
//! /// Here we'll create a dummy plugin which replaces all occurences of
//! /// `foo` with `bar`
//! #[no_mangle]
//! pub extern "C" fn on_before_parse_plugin_impl(
//! args: *const bun_native_plugin::sys::OnBeforeParseArguments,
//! result: *mut bun_native_plugin::sys::OnBeforeParseResult,
//! ) {
//! let args = unsafe { &*args };
//! let result = unsafe { &mut *result };
//!
//! // This returns a handle which is a safe wrapper over the raw
//! // C API.
//! let mut handle = OnBeforeParse::from_raw(args, result) {
//! Ok(handle) => handle,
//! Err(_) => {
//! // `OnBeforeParse::from_raw` handles error logging
//! // so it fine to return here.
//! return;
//! }
//! };
//!
//! let input_source_code = match handle.input_source_code() {
//! Ok(source_str) => source_str,
//! Err(_) => {
//! // If we encounter an error, we must log it so that
//! // Bun knows this plugin failed.
//! handle.log_error("Failed to fetch source code!");
//! return;
//! }
//! };
//!
//! let loader = handle.output_loader();
//! let output_source_code = source_str.replace("foo", "bar");
//! handle.set_output_source_code(output_source_code, loader);
//! }
//! ```
//!
//! Then compile this NAPI module. If you using napi-rs, the `package.json` should have a `build` script you can run:
//!
//! ```bash
//! bun run build
//! ```
//!
//! This will produce a `.node` file in the project directory.
//!
//! With the compiled NAPI module, you can now register the plugin from JS:
//!
//! ```js
//! const result = await Bun.build({
//! entrypoints: ["index.ts"],
//! plugins: [
//! {
//! name: "replace-foo-with-bar",
//! setup(build) {
//! const napiModule = require("path/to/napi_module.node");
//!
//! // Register the `onBeforeParse` hook to run on all `.ts` files.
//! // We tell it to use function we implemented inside of our `lib.rs` code.
//! build.onBeforeParse(
//! { filter: /\.ts/ },
//! { napiModule, symbol: "on_before_parse_plugin_impl" },
//! );
//! },
//! },
//! ],
//! });
//! ```
//!
//! ## Very important information
//!
//! ### Error handling and panics
//!
//! It is highly recommended to avoid panicking as this will crash the runtime. Instead, you must handle errors and log them:
//!
//! ```rust
//! let input_source_code = match handle.input_source_code() {
//! Ok(source_str) => source_str,
//! Err(_) => {
//! // If we encounter an error, we must log it so that
//! // Bun knows this plugin failed.
//! handle.log_error("Failed to fetch source code!");
//! return;
//! }
//! };
//! ```
//!
//! ### Passing state to and from JS: `External`
//!
//! One way to communicate data from your plugin and JS and vice versa is through the NAPI's [External](https://napi.rs/docs/concepts/external) type.
//!
//! An External in NAPI is like an opaque pointer to data that can be passed to and from JS. Inside your NAPI module, you can retrieve
//! the pointer and modify the data.
//!
//! As an example that extends our getting started example above, let's say you wanted to count the number of `foo`'s that the native plugin encounters.
//!
//! You would expose a NAPI module function which creates this state. Recall that state in native plugins must be threadsafe. This usually means
//! that your state must be `Sync`:
//!
//! ```rust
//! struct PluginState {
//! foo_count: std::sync::atomic::AtomicU32,
//! }
//!
//! #[napi]
//! pub fn create_plugin_state() -> External<PluginState> {
//! let external = External::new(PluginState {
//! foo_count: 0,
//! });
//!
//! external
//! }
//!
//!
//! #[napi]
//! pub fn get_foo_count(plugin_state: External<PluginState>) -> u32 {
//! let plugin_state: &PluginState = &plugin_state;
//! plugin_state.foo_count.load(std::sync::atomic::Ordering::Relaxed)
//! }
//! ```
//!
//! When you register your plugin from Javascript, you call the napi module function to create the external and then pass it:
//!
//! ```js
//! const napiModule = require("path/to/napi_module.node");
//! const pluginState = napiModule.createPluginState();
//!
//! const result = await Bun.build({
//! entrypoints: ["index.ts"],
//! plugins: [
//! {
//! name: "replace-foo-with-bar",
//! setup(build) {
//! build.onBeforeParse(
//! { filter: /\.ts/ },
//! {
//! napiModule,
//! symbol: "on_before_parse_plugin_impl",
//! // pass our NAPI external which contains our plugin state here
//! external: pluginState,
//! },
//! );
//! },
//! },
//! ],
//! });
//!
//! console.log("Total `foo`s encountered: ", pluginState.getFooCount());
//! ```
//!
//! Finally, from the native implementation of your plugin, you can extract the external:
//!
//! ```rust
//! pub extern "C" fn on_before_parse_plugin_impl(
//! args: *const bun_native_plugin::sys::OnBeforeParseArguments,
//! result: *mut bun_native_plugin::sys::OnBeforeParseResult,
//! ) {
//! let args = unsafe { &*args };
//! let result = unsafe { &mut *result };
//!
//! let mut handle = OnBeforeParse::from_raw(args, result) {
//! Ok(handle) => handle,
//! Err(_) => {
//! // `OnBeforeParse::from_raw` handles error logging
//! // so it fine to return here.
//! return;
//! }
//! };
//!
//! let plugin_state: &PluginState =
//! // This operation is only safe if you pass in an external when registering the plugin.
//! // If you don't, this could lead to a segfault or access of undefined memory.
//! match unsafe { handle.external().and_then(|state| state.ok_or(Error::Unknown)) } {
//! Ok(state) => state,
//! Err(_) => {
//! handle.log_error("Failed to get external!");
//! return;
//! }
//! };
//!
//!
//! // Fetch our source code again
//! let input_source_code = match handle.input_source_code() {
//! Ok(source_str) => source_str,
//! Err(_) => {
//! handle.log_error("Failed to fetch source code!");
//! return;
//! }
//! };
//!
//! // Count the number of `foo`s and add it to our state
//! let foo_count = source_code.matches("foo").count() as u32;
//! plugin_state.foo_count.fetch_add(foo_count, std::sync::atomic::Ordering::Relaxed);
//! }
//! ```
//!
//! ### Concurrency
//!
//! Your `extern "C"` plugin function can be called _on any thread_ at _any time_ and _multiple times at once_.
//!
//! Therefore, you must design any state management to be threadsafe
#![allow(non_upper_case_globals)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
#[repr(transparent)]
pub struct BunPluginName(*const c_char);
impl BunPluginName {
pub const fn new(ptr: *const c_char) -> Self {
Self(ptr)
}
}
#[macro_export]
macro_rules! define_bun_plugin {
($name:expr) => {
pub static BUN_PLUGIN_NAME_STRING: &str = $name;
#[no_mangle]
pub static BUN_PLUGIN_NAME: bun_native_plugin::BunPluginName =
bun_native_plugin::BunPluginName::new(BUN_PLUGIN_NAME_STRING.as_ptr() as *const _);
#[napi]
fn bun_plugin_register() {}
};
}
unsafe impl Sync for BunPluginName {}
use std::{
any::TypeId,
borrow::Cow,
cell::UnsafeCell,
ffi::{c_char, c_void},
str::Utf8Error,
};
pub mod sys {
include!(concat!(env!("OUT_DIR"), "/bindings.rs"));
}
#[repr(C)]
pub struct TaggedObject<T> {
type_id: TypeId,
pub(crate) object: Option<T>,
}
struct SourceCodeContext {
source_ptr: *mut u8,
source_len: usize,
source_cap: usize,
}
extern "C" fn free_plugin_source_code_context(ctx: *mut c_void) {
// SAFETY: The ctx pointer is a pointer to the `SourceCodeContext` struct we allocated.
unsafe {
drop(Box::from_raw(ctx as *mut SourceCodeContext));
}
}
impl Drop for SourceCodeContext {
fn drop(&mut self) {
if !self.source_ptr.is_null() {
// SAFETY: These fields come from a `String` that we allocated.
unsafe {
drop(String::from_raw_parts(
self.source_ptr,
self.source_len,
self.source_cap,
));
}
}
}
}
pub type BunLogLevel = sys::BunLogLevel;
pub type BunLoader = sys::BunLoader;
fn get_from_raw_str<'a>(ptr: *const u8, len: usize) -> Result<Cow<'a, str>> {
let slice: &'a [u8] = unsafe { std::slice::from_raw_parts(ptr, len) };
// Windows allows invalid UTF-16 strings in the filesystem. These get converted to WTF-8 in Zig.
// Meaning the string may contain invalid UTF-8, we'll have to use the safe checked version.
#[cfg(target_os = "windows")]
{
std::str::from_utf8(slice)
.map(Into::into)
.or_else(|_| Ok(String::from_utf8_lossy(slice)))
}
#[cfg(not(target_os = "windows"))]
{
// SAFETY: The source code comes from Zig, which uses UTF-8, so this should be safe.
std::str::from_utf8(slice)
.map(Into::into)
.or_else(|_| Ok(String::from_utf8_lossy(slice)))
}
}
#[derive(Debug, Clone)]
pub enum Error {
Utf8(Utf8Error),
IncompatiblePluginVersion,
ExternalTypeMismatch,
Unknown,
}
pub type Result<T> = std::result::Result<T, Error>;
impl From<Utf8Error> for Error {
fn from(value: Utf8Error) -> Self {
Self::Utf8(value)
}
}
/// A safe handle for the arguments + result struct for the
/// `OnBeforeParse` bundler lifecycle hook.
///
/// This struct acts as a safe wrapper around the raw C API structs
/// (`sys::OnBeforeParseArguments`/`sys::OnBeforeParseResult`) needed to
/// implement the `OnBeforeParse` bundler lifecycle hook.
///
/// To initialize this struct, see the `from_raw` method.
pub struct OnBeforeParse<'a> {
args_raw: &'a sys::OnBeforeParseArguments,
result_raw: *mut sys::OnBeforeParseResult,
compilation_context: *mut SourceCodeContext,
}
impl<'a> OnBeforeParse<'a> {
/// Initialize this struct from references to their raw counterparts.
///
/// This function will do a versioning check to ensure that the plugin
/// is compatible with the current version of Bun. If the plugin is not
/// compatible, it will log an error and return an error result.
///
/// # Example
/// ```rust
/// extern "C" fn on_before_parse_impl(args: *const sys::OnBeforeParseArguments, result: *mut sys::OnBeforeParseResult) {
/// let args = unsafe { &*args };
/// let result = unsafe { &mut *result };
/// let handle = match OnBeforeParse::from_raw(args, result) {
/// Ok(handle) => handle,
/// Err(()) => return,
/// };
/// }
/// ```
pub fn from_raw(
args: &'a sys::OnBeforeParseArguments,
result: *mut sys::OnBeforeParseResult,
) -> Result<Self> {
if args.__struct_size < std::mem::size_of::<sys::OnBeforeParseArguments>()
|| unsafe { (*result).__struct_size } < std::mem::size_of::<sys::OnBeforeParseResult>()
{
let message = "This plugin is not compatible with the current version of Bun.";
let mut log_options = sys::BunLogOptions {
__struct_size: std::mem::size_of::<sys::BunLogOptions>(),
message_ptr: message.as_ptr(),
message_len: message.len(),
path_ptr: args.path_ptr,
path_len: args.path_len,
source_line_text_ptr: std::ptr::null(),
source_line_text_len: 0,
level: BunLogLevel::BUN_LOG_LEVEL_ERROR as i8,
line: 0,
lineEnd: 0,
column: 0,
columnEnd: 0,
};
// SAFETY: The `log` function pointer is guaranteed to be valid by the Bun runtime.
unsafe {
((*result).log.unwrap())(args, &mut log_options);
}
return Err(Error::IncompatiblePluginVersion);
}
Ok(Self {
args_raw: args,
result_raw: result,
compilation_context: std::ptr::null_mut() as *mut _,
})
}
pub fn path(&self) -> Result<Cow<'_, str>> {
get_from_raw_str(self.args_raw.path_ptr, self.args_raw.path_len)
}
pub fn namespace(&self) -> Result<Cow<'_, str>> {
get_from_raw_str(self.args_raw.namespace_ptr, self.args_raw.namespace_len)
}
/// Get the external object from the `OnBeforeParse` arguments.
///
/// The external object is set by the plugin definition inside of JS:
/// ```js
/// await Bun.build({
/// plugins: [
/// {
/// name: "my-plugin",
/// setup(builder) {
/// const native_plugin = require("./native_plugin.node");
/// const external = native_plugin.createExternal();
/// builder.external({ napiModule: native_plugin, symbol: 'onBeforeParse', external });
/// },
/// },
/// ],
/// });
/// ```
///
/// The external object must be created from NAPI for this function to be safe!
///
/// This function will return an error if the external object is not a
/// valid tagged object for the given type.
///
/// This function will return `Ok(None)` if there is no external object
/// set.
///
/// # Example
/// The code to create the external from napi-rs:
/// ```rs
/// #[no_mangle]
/// #[napi]
/// pub fn create_my_external() -> External<MyStruct> {
/// let external = External::new(MyStruct::new());
///
/// external
/// }
/// ```
///
/// The code to extract the external:
/// ```rust
/// let external = match handle.external::<MyStruct>() {
/// Ok(Some(external)) => external,
/// _ => {
/// handle.log_error("Could not get external object.");
/// return;
/// },
/// };
/// ```
pub unsafe fn external<T: 'static + Sync>(&self) -> Result<Option<&'static T>> {
if self.args_raw.external.is_null() {
return Ok(None);
}
let external: *mut TaggedObject<T> = self.args_raw.external as *mut TaggedObject<T>;
unsafe {
if (*external).type_id != TypeId::of::<T>() {
return Err(Error::ExternalTypeMismatch);
}
Ok((*external).object.as_ref())
}
}
/// The same as [`crate::bun_native_plugin::OnBeforeParse::external`], but returns a mutable reference.
///
/// This is unsafe as you must ensure that no other invocation of the plugin
/// simultaneously holds a mutable reference to the external.
pub unsafe fn external_mut<T: 'static + Sync>(&mut self) -> Result<Option<&mut T>> {
if self.args_raw.external.is_null() {
return Ok(None);
}
let external: *mut TaggedObject<T> = self.args_raw.external as *mut TaggedObject<T>;
unsafe {
if (*external).type_id != TypeId::of::<T>() {
return Err(Error::ExternalTypeMismatch);
}
Ok((*external).object.as_mut())
}
}
/// Get the input source code for the current file.
///
/// On Windows, this function may return an `Err(Error::Utf8(...))` if the
/// source code contains invalid UTF-8.
pub fn input_source_code(&self) -> Result<Cow<'_, str>> {
let fetch_result = unsafe {
((*self.result_raw).fetchSourceCode.unwrap())(self.args_raw, self.result_raw)
};
if fetch_result != 0 {
Err(Error::Unknown)
} else {
// SAFETY: We don't hand out mutable references to `result_raw` so dereferencing here is safe.
unsafe {
get_from_raw_str((*self.result_raw).source_ptr, (*self.result_raw).source_len)
}
}
}
/// Set the output source code for the current file.
pub fn set_output_source_code(&mut self, source: String, loader: BunLoader) {
let source_cap = source.capacity();
let source = source.leak();
let source_ptr = source.as_mut_ptr();
let source_len = source.len();
if self.compilation_context.is_null() {
self.compilation_context = Box::into_raw(Box::new(SourceCodeContext {
source_ptr,
source_len,
source_cap,
}));
// SAFETY: We don't hand out mutable references to `result_raw` so dereferencing it is safe.
unsafe {
(*self.result_raw).plugin_source_code_context =
self.compilation_context as *mut c_void;
(*self.result_raw).free_plugin_source_code_context =
Some(free_plugin_source_code_context);
}
} else {
unsafe {
// SAFETY: If we're here we know that `compilation_context` is not null.
let context = &mut *self.compilation_context;
drop(String::from_raw_parts(
context.source_ptr,
context.source_len,
context.source_cap,
));
context.source_ptr = source_ptr;
context.source_len = source_len;
context.source_cap = source_cap;
}
}
// SAFETY: We don't hand out mutable references to `result_raw` so dereferencing it is safe.
unsafe {
(*self.result_raw).loader = loader as u8;
(*self.result_raw).source_ptr = source_ptr;
(*self.result_raw).source_len = source_len;
}
}
/// Set the output loader for the current file.
pub fn set_output_loader(&self, loader: BunLogLevel) {
// SAFETY: We don't hand out mutable references to `result_raw` so dereferencing it is safe.
unsafe {
(*self.result_raw).loader = loader as u8;
}
}
/// Get the output loader for the current file.
pub fn output_loader(&self) -> BunLoader {
unsafe { std::mem::transmute((*self.result_raw).loader as u32) }
}
/// Log an error message.
pub fn log_error(&self, message: &str) {
self.log(message, BunLogLevel::BUN_LOG_LEVEL_ERROR)
}
/// Log a message with the given level.
pub fn log(&self, message: &str, level: BunLogLevel) {
let mut log_options = sys::BunLogOptions {
__struct_size: std::mem::size_of::<sys::BunLogOptions>(),
message_ptr: message.as_ptr(),
message_len: message.len(),
path_ptr: self.args_raw.path_ptr,
path_len: self.args_raw.path_len,
source_line_text_ptr: std::ptr::null(),
source_line_text_len: 0,
level: level as i8,
line: 0,
lineEnd: 0,
column: 0,
columnEnd: 0,
};
unsafe {
((*self.result_raw).log.unwrap())(self.args_raw, &mut log_options);
}
}
}

View File

@@ -0,0 +1 @@
#include <bun-native-bundler-plugin-api/bundler_plugin.h>

Binary file not shown.

View File

@@ -97,6 +97,8 @@ export async function getBuild(): Promise<number> {
}
export async function getSemver(tag?: string, build?: number): Promise<string> {
const { tag_name: latest_tag_name } = await getRelease();
const version = latest_tag_name.replace("bun-v", "");
const { tag_name } = await getRelease(tag);
if (tag_name !== "canary") {
return tag_name.replace("bun-v", "");
@@ -106,7 +108,7 @@ export async function getSemver(tag?: string, build?: number): Promise<string> {
}
const sha = await getSha(tag_name, "short");
const date = new Date().toISOString().split("T")[0].replace(/-/g, "");
return `${Bun.version}-canary.${date}.${build}+${sha}`;
return `${version}-canary.${date}.${build}+${sha}`;
}
export function formatTag(tag: string): string {

View File

@@ -3,7 +3,7 @@ import { spawn } from "../spawn";
import { chmod, join, rename, rm, tmp, write } from "../fs";
import { unzipSync } from "zlib";
import type { Platform } from "../platform";
import { os, arch, supportedPlatforms } from "../platform";
import { os, arch, abi, supportedPlatforms } from "../platform";
import { debug, error } from "../console";
declare const version: string;
@@ -12,7 +12,7 @@ declare const owner: string;
export async function importBun(): Promise<string> {
if (!supportedPlatforms.length) {
throw new Error(`Unsupported platform: ${os} ${arch}`);
throw new Error(`Unsupported platform: ${os} ${arch} ${abi || ""}`);
}
for (const platform of supportedPlatforms) {
try {
@@ -121,7 +121,8 @@ async function downloadBun(platform: Platform, dst: string): Promise<void> {
}
export function optimizeBun(path: string): void {
const installScript = os === "win32" ? 'powershell -c "irm bun.sh/install.ps1 | iex"' : "curl -fsSL https://bun.sh/install | bash";
const installScript =
os === "win32" ? 'powershell -c "irm bun.sh/install.ps1 | iex"' : "curl -fsSL https://bun.sh/install | bash";
try {
rename(path, join(__dirname, "bin", "bun.exe"));
return;

View File

@@ -1,5 +1,5 @@
import { spawn } from "./spawn";
import { read } from "./fs";
import { exists, read } from "./fs";
import { debug } from "./console";
export const os = process.platform;
@@ -10,9 +10,12 @@ export const avx2 =
arch === "x64" &&
((os === "linux" && isLinuxAVX2()) || (os === "darwin" && isDarwinAVX2()) || (os === "win32" && isWindowsAVX2()));
export const abi = os === "linux" && isLinuxMusl() ? "musl" : undefined;
export type Platform = {
os: string;
arch: string;
abi?: "musl";
avx2?: boolean;
bin: string;
exe: string;
@@ -57,6 +60,28 @@ export const platforms: Platform[] = [
bin: "bun-linux-x64-baseline",
exe: "bin/bun",
},
{
os: "linux",
arch: "aarch64",
abi: "musl",
bin: "bun-linux-aarch64-musl",
exe: "bin/bun",
},
{
os: "linux",
arch: "x64",
abi: "musl",
avx2: true,
bin: "bun-linux-x64-musl",
exe: "bin/bun",
},
{
os: "linux",
arch: "x64",
abi: "musl",
bin: "bun-linux-x64-musl-baseline",
exe: "bin/bun",
},
{
os: "win32",
arch: "x64",
@@ -73,9 +98,24 @@ export const platforms: Platform[] = [
];
export const supportedPlatforms: Platform[] = platforms
.filter(platform => platform.os === os && platform.arch === arch && (!platform.avx2 || avx2))
.filter(
platform =>
platform.os === os &&
platform.arch === arch &&
(!platform.avx2 || avx2) &&
(!platform.abi || abi === platform.abi),
)
.sort((a, b) => (a.avx2 === b.avx2 ? 0 : a.avx2 ? -1 : 1));
function isLinuxMusl(): boolean {
try {
return exists("/etc/alpine-release");
} catch (error) {
debug("isLinuxMusl failed", error);
return false;
}
}
function isLinuxAVX2(): boolean {
try {
return read("/proc/cpuinfo").includes("avx2");

9
packages/bun-types/ambient.d.ts vendored Normal file
View File

@@ -0,0 +1,9 @@
declare module "*.txt" {
var text: string;
export = text;
}
declare module "*.toml" {
var contents: any;
export = contents;
}

View File

@@ -14,6 +14,7 @@
* This module aliases `globalThis.Bun`.
*/
declare module "bun" {
import type { FFIFunctionCallableSymbol } from "bun:ffi";
import type { Encoding as CryptoEncoding } from "crypto";
import type { CipherNameAndProtocol, EphemeralKeyInfo, PeerCertificate } from "tls";
interface Env {
@@ -1552,6 +1553,26 @@ declare module "bun" {
* https://nodejs.org/api/packages.html#exports
*/
conditions?: Array<string> | string;
/**
* Controls how environment variables are handled during bundling.
*
* Can be one of:
* - `"inline"`: Injects environment variables into the bundled output by converting `process.env.FOO`
* references to string literals containing the actual environment variable values
* - `"disable"`: Disables environment variable injection entirely
* - A string ending in `*`: Inlines environment variables that match the given prefix.
* For example, `"MY_PUBLIC_*"` will only include env vars starting with "MY_PUBLIC_"
*
* @example
* ```ts
* Bun.build({
* env: "MY_PUBLIC_*",
* entrypoints: ["src/index.ts"],
* })
* ```
*/
env?: "inline" | "disable" | `${string}*`;
minify?:
| boolean
| {
@@ -3881,7 +3902,7 @@ declare module "bun" {
defer: () => Promise<void>;
}
type OnLoadResult = OnLoadResultSourceCode | OnLoadResultObject | undefined;
type OnLoadResult = OnLoadResultSourceCode | OnLoadResultObject | undefined | void;
type OnLoadCallback = (args: OnLoadArgs) => OnLoadResult | Promise<OnLoadResult>;
type OnStartCallback = () => void | Promise<void>;
@@ -3898,7 +3919,7 @@ declare module "bun" {
* The namespace of the importer.
*/
namespace: string;
/**
/**
* The directory to perform file-based resolutions in.
*/
resolveDir: string;
@@ -3931,7 +3952,30 @@ declare module "bun" {
args: OnResolveArgs,
) => OnResolveResult | Promise<OnResolveResult | undefined | null> | undefined | null;
type FFIFunctionCallable = Function & {
// Making a nominally typed function so that the user must get it from dlopen
readonly __ffi_function_callable: typeof FFIFunctionCallableSymbol;
};
interface PluginBuilder {
/**
* Register a callback which will be invoked when bundling starts.
* @example
* ```ts
* Bun.plugin({
* setup(builder) {
* builder.onStart(() => {
* console.log("bundle just started!!")
* });
* },
* });
* ```
*/
onStart(callback: OnStartCallback): void;
onBeforeParse(
constraints: PluginConstraints,
callback: { napiModule: unknown; symbol: string; external?: unknown | undefined },
): void;
/**
* Register a callback to load imports with a specific import specifier
* @param constraints The constraints to apply the plugin to
@@ -3964,20 +4008,6 @@ declare module "bun" {
* ```
*/
onResolve(constraints: PluginConstraints, callback: OnResolveCallback): void;
/**
* Register a callback which will be invoked when bundling starts.
* @example
* ```ts
* Bun.plugin({
* setup(builder) {
* builder.onStart(() => {
* console.log("bundle just started!!")
* });
* },
* });
* ```
*/
onStart(callback: OnStartCallback): void;
/**
* The config object passed to `Bun.build` as is. Can be mutated.
*/

View File

@@ -566,17 +566,21 @@ declare module "bun:ffi" {
type ToFFIType<T extends FFITypeOrString> = T extends FFIType ? T : T extends string ? FFITypeStringToType[T] : never;
const FFIFunctionCallableSymbol: unique symbol;
type ConvertFns<Fns extends Symbols> = {
[K in keyof Fns]: (
...args: Fns[K]["args"] extends infer A extends readonly FFITypeOrString[]
? { [L in keyof A]: FFITypeToArgsType[ToFFIType<A[L]>] }
: // eslint-disable-next-line @definitelytyped/no-single-element-tuple-type
[unknown] extends [Fns[K]["args"]]
? []
: never
) => [unknown] extends [Fns[K]["returns"]] // eslint-disable-next-line @definitelytyped/no-single-element-tuple-type
? undefined
: FFITypeToReturnsType[ToFFIType<NonNullable<Fns[K]["returns"]>>];
[K in keyof Fns]: {
(
...args: Fns[K]["args"] extends infer A extends readonly FFITypeOrString[]
? { [L in keyof A]: FFITypeToArgsType[ToFFIType<A[L]>] }
: // eslint-disable-next-line @definitelytyped/no-single-element-tuple-type
[unknown] extends [Fns[K]["args"]]
? []
: never
): [unknown] extends [Fns[K]["returns"]] // eslint-disable-next-line @definitelytyped/no-single-element-tuple-type
? undefined
: FFITypeToReturnsType[ToFFIType<NonNullable<Fns[K]["returns"]>>];
__ffi_function_callable: typeof FFIFunctionCallableSymbol;
};
};
/**

View File

@@ -1,5 +1,3 @@
export {};
type _ReadableStream<T> = typeof globalThis extends {
onerror: any;
ReadableStream: infer T;
@@ -141,16 +139,6 @@ import type { TextDecoder as NodeTextDecoder, TextEncoder as NodeTextEncoder } f
import type { MessagePort } from "worker_threads";
import type { WebSocket as _WebSocket } from "ws";
declare module "*.txt" {
var text: string;
export = text;
}
declare module "*.toml" {
var contents: any;
export = contents;
}
declare global {
var Bun: typeof import("bun");
@@ -1835,10 +1823,10 @@ declare global {
readonly main: boolean;
/** Alias of `import.meta.dir`. Exists for Node.js compatibility */
readonly dirname: string;
dirname: string;
/** Alias of `import.meta.path`. Exists for Node.js compatibility */
readonly filename: string;
filename: string;
}
/**

View File

@@ -20,3 +20,4 @@
/// <reference path="./sqlite.d.ts" />
/// <reference path="./wasm.d.ts" />
/// <reference path="./deprecated.d.ts" />
/// <reference path="./ambient.d.ts" />

View File

@@ -1127,7 +1127,7 @@ declare module "bun:sqlite" {
*
* @since Bun v1.1.14
*/
interface Changes {
export interface Changes {
/**
* The number of rows changed by the last `run` or `exec` call.
*/

View File

@@ -1295,6 +1295,57 @@ declare module "bun:test" {
* @param hint Hint used to identify the snapshot in the snapshot file.
*/
toMatchSnapshot(propertyMatchers?: object, hint?: string): void;
/**
* Asserts that a value matches the most recent inline snapshot.
*
* @example
* expect("Hello").toMatchInlineSnapshot();
* expect("Hello").toMatchInlineSnapshot(`"Hello"`);
*
* @param value The latest automatically-updated snapshot value.
*/
toMatchInlineSnapshot(value?: string): void;
/**
* Asserts that a value matches the most recent inline snapshot.
*
* @example
* expect({ c: new Date() }).toMatchInlineSnapshot({ c: expect.any(Date) });
* expect({ c: new Date() }).toMatchInlineSnapshot({ c: expect.any(Date) }, `
* {
* "v": Any<Date>,
* }
* `);
*
* @param propertyMatchers Object containing properties to match against the value.
* @param value The latest automatically-updated snapshot value.
*/
toMatchInlineSnapshot(propertyMatchers?: object, value?: string): void;
/**
* Asserts that a function throws an error matching the most recent snapshot.
*
* @example
* function fail() {
* throw new Error("Oops!");
* }
* expect(fail).toThrowErrorMatchingSnapshot();
* expect(fail).toThrowErrorMatchingSnapshot("This one should say Oops!");
*
* @param value The latest automatically-updated snapshot value.
*/
toThrowErrorMatchingSnapshot(hint?: string): void;
/**
* Asserts that a function throws an error matching the most recent snapshot.
*
* @example
* function fail() {
* throw new Error("Oops!");
* }
* expect(fail).toThrowErrorMatchingInlineSnapshot();
* expect(fail).toThrowErrorMatchingInlineSnapshot(`"Oops!"`);
*
* @param value The latest automatically-updated snapshot value.
*/
toThrowErrorMatchingInlineSnapshot(value?: string): void;
/**
* Asserts that an object matches a subset of properties.
*

View File

@@ -1,4 +1,4 @@
import { CString, dlopen, FFIType, Pointer, read, suffix } from "bun:ffi";
import { CString, dlopen, FFIType, JSCallback, Pointer, read, suffix } from "bun:ffi";
import * as tsd from "./utilities.test";
// `suffix` is either "dylib", "so", or "dll" depending on the platform
@@ -62,12 +62,14 @@ const lib = dlopen(
},
);
declare const ptr: Pointer;
tsd.expectType<CString>(lib.symbols.sqlite3_libversion());
tsd.expectType<number>(lib.symbols.add(1, 2));
tsd.expectType<Pointer | null>(lib.symbols.ptr_type(0));
tsd.expectType<Pointer | null>(lib.symbols.ptr_type(ptr));
tsd.expectType<Pointer | null>(lib.symbols.fn_type(0));
tsd.expectType<Pointer | null>(lib.symbols.fn_type(new JSCallback(() => {}, {})));
function _arg(
...params: [
@@ -166,16 +168,16 @@ tsd.expectType<void>(lib2.symbols.multi_args(1, 2));
tsd.expectTypeEquals<ReturnType<(typeof lib2)["symbols"]["no_returns"]>, undefined>(true);
tsd.expectTypeEquals<Parameters<(typeof lib2)["symbols"]["no_args"]>, []>(true);
tsd.expectType<number>(read.u8(0));
tsd.expectType<number>(read.u8(0, 0));
tsd.expectType<number>(read.i8(0, 0));
tsd.expectType<number>(read.u16(0, 0));
tsd.expectType<number>(read.i16(0, 0));
tsd.expectType<number>(read.u32(0, 0));
tsd.expectType<number>(read.i32(0, 0));
tsd.expectType<bigint>(read.u64(0, 0));
tsd.expectType<bigint>(read.i64(0, 0));
tsd.expectType<number>(read.f32(0, 0));
tsd.expectType<number>(read.f64(0, 0));
tsd.expectType<number>(read.ptr(0, 0));
tsd.expectType<number>(read.intptr(0, 0));
tsd.expectType<number>(read.u8(ptr));
tsd.expectType<number>(read.u8(ptr, 0));
tsd.expectType<number>(read.i8(ptr, 0));
tsd.expectType<number>(read.u16(ptr, 0));
tsd.expectType<number>(read.i16(ptr, 0));
tsd.expectType<number>(read.u32(ptr, 0));
tsd.expectType<number>(read.i32(ptr, 0));
tsd.expectType<bigint>(read.u64(ptr, 0));
tsd.expectType<bigint>(read.i64(ptr, 0));
tsd.expectType<number>(read.f32(ptr, 0));
tsd.expectType<number>(read.f64(ptr, 0));
tsd.expectType<number>(read.ptr(ptr, 0));
tsd.expectType<number>(read.intptr(ptr, 0));

View File

@@ -1,4 +1,4 @@
import { Database } from "bun:sqlite";
import { Changes, Database } from "bun:sqlite";
import { expectType } from "./utilities.test";
const db = new Database(":memory:");
@@ -22,7 +22,7 @@ expectType<Array<{ name: string; dob: number }>>(allResults);
expectType<{ name: string; dob: number } | null>(getResults);
// tslint:disable-next-line:invalid-void
// eslint-disable-next-line @typescript-eslint/no-invalid-void-type
expectType<void>(runResults);
expectType<Changes>(runResults);
const query3 = db.prepare<
{ name: string; dob: number }, // return type first

View File

@@ -1,16 +1,14 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"lib": ["ESNext"],
"skipLibCheck": false,
"strict": true,
"target": "esnext",
"module": "esnext",
"moduleResolution": "node",
"allowSyntheticDefaultImports": true,
"disableSolutionSearching": true,
"noUnusedLocals": true,
"noEmit": true,
"resolveJsonModule": true
"declaration": true,
"emitDeclarationOnly": true,
"noEmit": false,
"declarationDir": "out"
},
"files": ["ambient.d.ts"], // ambient defines .txt and .toml loaders
"include": ["**/*.ts"],
"exclude": ["dist", "node_modules"]
}

View File

@@ -965,8 +965,10 @@ int bsd_connect_udp_socket(LIBUS_SOCKET_DESCRIPTOR fd, const char *host, int por
char port_string[16];
snprintf(port_string, 16, "%d", port);
if (getaddrinfo(host, port_string, &hints, &result)) {
return -1;
int gai_error = getaddrinfo(host, port_string, &hints, &result);
if (gai_error != 0) {
return gai_error;
}
if (result == NULL) {

View File

@@ -2153,6 +2153,8 @@ us_socket_context_on_socket_connect_error(
socket->ssl_read_wants_write = 0;
socket->fatal_error = 0;
socket->handshake_state = HANDSHAKE_PENDING;
// always resume the socket
us_socket_resume(1, &socket->s);
return socket;
}

View File

@@ -20,6 +20,8 @@ import {
getEnv,
writeFile,
spawnSafe,
spawn,
mkdir,
} from "./utils.mjs";
import { parseArgs } from "node:util";
@@ -49,16 +51,19 @@ async function doBuildkiteAgent(action) {
const args = [realpathSync(process.argv[1]), "start"];
if (isWindows) {
const serviceCommand = [
"New-Service",
"-Name",
"buildkite-agent",
"-StartupType",
"Automatic",
"-BinaryPathName",
`${escape(command)} ${escape(args.map(escape).join(" "))}`,
mkdir(logsPath);
const nssm = which("nssm", { required: true });
const nssmCommands = [
[nssm, "install", "buildkite-agent", command, ...args],
[nssm, "set", "buildkite-agent", "Start", "SERVICE_AUTO_START"],
[nssm, "set", "buildkite-agent", "AppDirectory", homePath],
[nssm, "set", "buildkite-agent", "AppStdout", agentLogPath],
[nssm, "set", "buildkite-agent", "AppStderr", agentLogPath],
];
await spawnSafe(["powershell", "-Command", serviceCommand.join(" ")], { stdio: "inherit" });
for (const command of nssmCommands) {
await spawnSafe(command, { stdio: "inherit" });
}
}
if (isOpenRc()) {
@@ -124,13 +129,21 @@ async function doBuildkiteAgent(action) {
token = await getCloudMetadataTag("buildkite:token");
}
if (!token) {
throw new Error(
"Buildkite token not found: either set BUILDKITE_AGENT_TOKEN or add a buildkite:token label to the instance",
);
}
let shell;
if (isWindows) {
const pwsh = which(["pwsh", "powershell"], { required: true });
shell = `${pwsh} -Command`;
// Command Prompt has a faster startup time than PowerShell.
// Also, it propogates the exit code of the command, which PowerShell does not.
const cmd = which("cmd", { required: true });
shell = `"${cmd}" /S /C`;
} else {
const sh = which(["bash", "sh"], { required: true });
shell = `${sh} -c`;
const sh = which("sh", { required: true });
shell = `${sh} -e -c`;
}
const flags = ["enable-job-log-tmpfile", "no-feature-reporting"];

View File

@@ -1,6 +1,6 @@
# Version: 4
# A powershell script that installs the dependencies needed to build and test Bun.
# This should work on Windows 10 or newer.
# Version: 7
# A script that installs the dependencies needed to build and test Bun.
# This should work on Windows 10 or newer with PowerShell.
# If this script does not work on your machine, please open an issue:
# https://github.com/oven-sh/bun/issues
@@ -16,6 +16,9 @@ param (
[switch]$Optimize = $CI
)
$ErrorActionPreference = "Stop"
Set-ExecutionPolicy -Scope Process -ExecutionPolicy Bypass -Force
function Execute-Command {
$command = $args -join ' '
Write-Output "$ $command"
@@ -43,6 +46,47 @@ function Which {
}
}
function Execute-Script {
param (
[Parameter(Mandatory = $true, Position = 0)]
[string]$Path
)
$pwsh = Which pwsh powershell -Required
Execute-Command $pwsh $Path
}
function Download-File {
param (
[Parameter(Mandatory = $true, Position = 0)]
[string]$Url,
[Parameter(Mandatory = $false)]
[string]$Name,
[Parameter(Mandatory = $false)]
[string]$Path
)
if (-not $Name) {
$Name = [System.IO.Path]::ChangeExtension([System.IO.Path]::GetRandomFileName(), [System.IO.Path]::GetExtension($Url))
}
if (-not $Path) {
$Path = "$env:TEMP\$Name"
}
$client = New-Object System.Net.WebClient
for ($i = 0; $i -lt 10 -and -not (Test-Path $Path); $i++) {
try {
$client.DownloadFile($Url, $Path)
} catch {
Write-Warning "Failed to download $Url, retry $i..."
Start-Sleep -s $i
}
}
return $Path
}
function Install-Chocolatey {
if (Which choco) {
return
@@ -50,7 +94,8 @@ function Install-Chocolatey {
Write-Output "Installing Chocolatey..."
[System.Net.ServicePointManager]::SecurityProtocol = [System.Net.ServicePointManager]::SecurityProtocol -bor 3072
iex -Command ((New-Object System.Net.WebClient).DownloadString('https://community.chocolatey.org/install.ps1'))
$installScript = Download-File "https://community.chocolatey.org/install.ps1"
Execute-Script $installScript
Refresh-Path
}
@@ -96,10 +141,23 @@ function Add-To-Path {
}
Write-Output "Adding $absolutePath to PATH..."
[Environment]::SetEnvironmentVariable("Path", $newPath, "Machine")
[Environment]::SetEnvironmentVariable("Path", "$newPath", "Machine")
Refresh-Path
}
function Set-Env {
param (
[Parameter(Mandatory = $true, Position = 0)]
[string]$Name,
[Parameter(Mandatory = $true, Position = 1)]
[string]$Value
)
Write-Output "Setting environment variable $Name=$Value..."
[System.Environment]::SetEnvironmentVariable("$Name", "$Value", "Machine")
[System.Environment]::SetEnvironmentVariable("$Name", "$Value", "Process")
}
function Install-Package {
param (
[Parameter(Mandatory = $true, Position = 0)]
@@ -137,7 +195,7 @@ function Install-Package {
function Install-Packages {
foreach ($package in $args) {
Install-Package -Name $package
Install-Package $package
}
}
@@ -145,12 +203,13 @@ function Install-Common-Software {
Install-Chocolatey
Install-Pwsh
Install-Git
Install-Packages curl 7zip
Install-Packages curl 7zip nssm
Install-NodeJs
Install-Bun
Install-Cygwin
if ($CI) {
Install-Tailscale
# FIXME: Installing tailscale causes the AWS metadata server to become unreachable
# Install-Tailscale
Install-Buildkite
}
}
@@ -204,12 +263,13 @@ function Install-Buildkite {
Write-Output "Installing Buildkite agent..."
$env:buildkiteAgentToken = "xxx"
iex ((New-Object System.Net.WebClient).DownloadString("https://raw.githubusercontent.com/buildkite/agent/main/install.ps1"))
$installScript = Download-File "https://raw.githubusercontent.com/buildkite/agent/main/install.ps1"
Execute-Script $installScript
Refresh-Path
}
function Install-Build-Essentials {
# Install-Visual-Studio
Install-Visual-Studio
Install-Packages `
cmake `
make `
@@ -219,41 +279,42 @@ function Install-Build-Essentials {
golang `
nasm `
ruby `
strawberryperl `
mingw
Install-Rust
Install-Llvm
}
function Install-Visual-Studio {
$components = @(
"Microsoft.VisualStudio.Workload.NativeDesktop",
"Microsoft.VisualStudio.Component.Windows10SDK.18362",
"Microsoft.VisualStudio.Component.Windows11SDK.22000",
"Microsoft.VisualStudio.Component.Windows11Sdk.WindowsPerformanceToolkit",
"Microsoft.VisualStudio.Component.VC.ASAN", # C++ AddressSanitizer
"Microsoft.VisualStudio.Component.VC.ATL", # C++ ATL for latest v143 build tools (x86 & x64)
"Microsoft.VisualStudio.Component.VC.DiagnosticTools", # C++ Diagnostic Tools
"Microsoft.VisualStudio.Component.VC.CLI.Support", # C++/CLI support for v143 build tools (Latest)
"Microsoft.VisualStudio.Component.VC.CoreIde", # C++ core features
"Microsoft.VisualStudio.Component.VC.Redist.14.Latest" # C++ 2022 Redistributable Update
param (
[Parameter(Mandatory = $false)]
[string]$Edition = "community"
)
$arch = (Get-WmiObject Win32_Processor).Architecture
if ($arch -eq 9) {
$components += @(
"Microsoft.VisualStudio.Component.VC.Tools.x86.x64", # MSVC v143 build tools (x86 & x64)
"Microsoft.VisualStudio.Component.VC.Modules.x86.x64" # MSVC v143 C++ Modules for latest v143 build tools (x86 & x64)
)
} elseif ($arch -eq 5) {
$components += @(
"Microsoft.VisualStudio.Component.VC.Tools.ARM64", # MSVC v143 build tools (ARM64)
"Microsoft.VisualStudio.Component.UWP.VC.ARM64" # C++ Universal Windows Platform support for v143 build tools (ARM64/ARM64EC)
)
}
Write-Output "Downloading Visual Studio installer..."
$vsInstaller = Download-File "https://aka.ms/vs/17/release/vs_$Edition.exe"
$packageParameters = $components | ForEach-Object { "--add $_" }
Install-Package visualstudio2022community `
-ExtraArgs "--package-parameters '--add Microsoft.VisualStudio.Workload.NativeDesktop --includeRecommended --includeOptional'"
Write-Output "Installing Visual Studio..."
$vsInstallArgs = @(
"--passive",
"--norestart",
"--wait",
"--force",
"--locale en-US",
"--add Microsoft.VisualStudio.Workload.NativeDesktop",
"--includeRecommended"
)
$startInfo = New-Object System.Diagnostics.ProcessStartInfo
$startInfo.FileName = $vsInstaller
$startInfo.Arguments = $vsInstallArgs -join ' '
$startInfo.CreateNoWindow = $true
$process = New-Object System.Diagnostics.Process
$process.StartInfo = $startInfo
$process.Start()
$process.WaitForExit()
if ($process.ExitCode -ne 0) {
throw "Failed to install Visual Studio: code $($process.ExitCode)"
}
}
function Install-Rust {
@@ -261,18 +322,31 @@ function Install-Rust {
return
}
Write-Output "Installing Rustup..."
$rustupInit = Download-File "https://win.rustup.rs/" -Name "rustup-init.exe"
Write-Output "Installing Rust..."
$rustupInit = "$env:TEMP\rustup-init.exe"
(New-Object System.Net.WebClient).DownloadFile("https://win.rustup.rs/", $rustupInit)
Execute-Command $rustupInit -y
Add-To-Path "$env:USERPROFILE\.cargo\bin"
Write-Output "Moving Rust to $env:ProgramFiles..."
$rustPath = Join-Path $env:ProgramFiles "Rust"
if (-not (Test-Path $rustPath)) {
New-Item -Path $rustPath -ItemType Directory
}
Move-Item "$env:UserProfile\.cargo" "$rustPath\cargo" -Force
Move-Item "$env:UserProfile\.rustup" "$rustPath\rustup" -Force
Write-Output "Setting environment variables for Rust..."
Set-Env "CARGO_HOME" "$rustPath\cargo"
Set-Env "RUSTUP_HOME" "$rustPath\rustup"
Add-To-Path "$rustPath\cargo\bin"
}
function Install-Llvm {
Install-Package llvm `
-Command clang-cl `
-Version "18.1.8"
Add-To-Path "C:\Program Files\LLVM\bin"
Add-To-Path "$env:ProgramFiles\LLVM\bin"
}
function Optimize-System {
@@ -280,6 +354,9 @@ function Optimize-System {
Disable-Windows-Threat-Protection
Disable-Windows-Services
Disable-Power-Management
}
function Optimize-System-Needs-Reboot {
Uninstall-Windows-Defender
}
@@ -319,7 +396,7 @@ function Disable-Windows-Services {
}
function Disable-Power-Management {
Write-Output "Disabling power management features..."
Write-Output "Disabling Power Management..."
powercfg /setactive 8c5e7fda-e8bf-4a96-9a85-a6e23a8c635c # High performance
powercfg /change monitor-timeout-ac 0
powercfg /change monitor-timeout-dc 0
@@ -329,7 +406,6 @@ function Disable-Power-Management {
powercfg /change hibernate-timeout-dc 0
}
Set-ExecutionPolicy -Scope Process -ExecutionPolicy Bypass -Force
if ($Optimize) {
Optimize-System
}
@@ -337,3 +413,6 @@ if ($Optimize) {
Install-Common-Software
Install-Build-Essentials
if ($Optimize) {
Optimize-System-Needs-Reboot
}

View File

@@ -1,5 +1,5 @@
#!/bin/sh
# Version: 5
# Version: 7
# A script that installs the dependencies needed to build and test Bun.
# This should work on macOS and Linux with a POSIX shell.
@@ -92,7 +92,7 @@ download_file() {
execute chmod 755 "$tmp"
path="$tmp/$filename"
fetch "$url" > "$path"
fetch "$url" >"$path"
execute chmod 644 "$path"
print "$path"
@@ -112,14 +112,23 @@ append_to_file() {
file="$1"
content="$2"
if ! [ -f "$file" ]; then
file_needs_sudo="0"
if [ -f "$file" ]; then
if ! [ -r "$file" ] || ! [ -w "$file" ]; then
file_needs_sudo="1"
fi
else
execute_as_user mkdir -p "$(dirname "$file")"
execute_as_user touch "$file"
fi
echo "$content" | while read -r line; do
if ! grep -q "$line" "$file"; then
echo "$line" >>"$file"
if [ "$file_needs_sudo" = "1" ]; then
execute_sudo sh -c "echo '$line' >> '$file'"
else
echo "$line" >>"$file"
fi
fi
done
}
@@ -135,7 +144,7 @@ append_to_file_sudo() {
echo "$content" | while read -r line; do
if ! grep -q "$line" "$file"; then
echo "$line" | execute_sudo tee "$file" > /dev/null
echo "$line" | execute_sudo tee "$file" >/dev/null
fi
done
}
@@ -161,18 +170,21 @@ append_to_path() {
export PATH="$path:$PATH"
}
link_to_bin() {
path="$1"
if ! [ -d "$path" ]; then
error "Could not find directory: \"$path\""
move_to_bin() {
exe_path="$1"
if ! [ -f "$exe_path" ]; then
error "Could not find executable: \"$exe_path\""
fi
for file in "$path"/*; do
if [ -f "$file" ]; then
grant_to_user "$file"
execute_sudo ln -sf "$file" "/usr/bin/$(basename "$file")"
usr_paths="/usr/bin /usr/local/bin"
for usr_path in $usr_paths; do
if [ -d "$usr_path" ] && [ -w "$usr_path" ]; then
break
fi
done
grant_to_user "$exe_path"
execute_sudo mv -f "$exe_path" "$usr_path/$(basename "$exe_path")"
}
check_features() {
@@ -384,6 +396,74 @@ check_user() {
fi
}
check_ulimit() {
if ! [ "$ci" = "1" ]; then
return
fi
print "Checking ulimits..."
systemd_conf="/etc/systemd/system.conf"
if [ -f "$systemd_conf" ]; then
limits_conf="/etc/security/limits.d/99-unlimited.conf"
if ! [ -f "$limits_conf" ]; then
execute_sudo mkdir -p "$(dirname "$limits_conf")"
execute_sudo touch "$limits_conf"
fi
fi
limits="core data fsize memlock nofile rss stack cpu nproc as locks sigpending msgqueue"
for limit in $limits; do
limit_upper="$(echo "$limit" | tr '[:lower:]' '[:upper:]')"
limit_value="unlimited"
case "$limit" in
nofile | nproc)
limit_value="1048576"
;;
esac
if [ -f "$limits_conf" ]; then
limit_users="root *"
for limit_user in $limit_users; do
append_to_file "$limits_conf" "$limit_user soft $limit $limit_value"
append_to_file "$limits_conf" "$limit_user hard $limit $limit_value"
done
fi
if [ -f "$systemd_conf" ]; then
append_to_file "$systemd_conf" "DefaultLimit$limit_upper=$limit_value"
fi
done
rc_conf="/etc/rc.conf"
if [ -f "$rc_conf" ]; then
rc_ulimit=""
limit_flags="c d e f i l m n q r s t u v x"
for limit_flag in $limit_flags; do
limit_value="unlimited"
case "$limit_flag" in
n | u)
limit_value="1048576"
;;
esac
rc_ulimit="$rc_ulimit -$limit_flag $limit_value"
done
append_to_file "$rc_conf" "rc_ulimit=\"$rc_ulimit\""
fi
pam_confs="/etc/pam.d/common-session /etc/pam.d/common-session-noninteractive"
for pam_conf in $pam_confs; do
if [ -f "$pam_conf" ]; then
append_to_file "$pam_conf" "session optional pam_limits.so"
fi
done
systemctl="$(which systemctl)"
if [ -f "$systemctl" ]; then
execute_sudo "$systemctl" daemon-reload
fi
}
package_manager() {
case "$pm" in
apt)
@@ -602,6 +682,14 @@ install_nodejs_headers() {
}
install_bun() {
case "$pm" in
apk)
install_packages \
libgcc \
libstdc++
;;
esac
bash="$(require bash)"
script=$(download_file "https://bun.sh/install")
@@ -615,7 +703,10 @@ install_bun() {
;;
esac
link_to_bin "$home/.bun/bin"
move_to_bin "$home/.bun/bin/bun"
bun_path="$(which bun)"
bunx_path="$(dirname "$bun_path")/bunx"
execute_sudo ln -sf "$bun_path" "$bunx_path"
}
install_cmake() {
@@ -628,14 +719,14 @@ install_cmake() {
cmake_version="3.30.5"
case "$arch" in
x64)
url="https://github.com/Kitware/CMake/releases/download/v$cmake_version/cmake-$cmake_version-linux-x86_64.sh"
cmake_url="https://github.com/Kitware/CMake/releases/download/v$cmake_version/cmake-$cmake_version-linux-x86_64.sh"
;;
aarch64)
url="https://github.com/Kitware/CMake/releases/download/v$cmake_version/cmake-$cmake_version-linux-aarch64.sh"
cmake_url="https://github.com/Kitware/CMake/releases/download/v$cmake_version/cmake-$cmake_version-linux-aarch64.sh"
;;
esac
script=$(download_file "$url")
execute_sudo "$sh" "$script" \
cmake_script=$(download_file "$cmake_url")
execute_sudo "$sh" "$cmake_script" \
--skip-license \
--prefix=/usr
;;
@@ -732,13 +823,13 @@ install_llvm() {
case "$pm" in
apt)
bash="$(require bash)"
script="$(download_file "https://apt.llvm.org/llvm.sh")"
llvm_script="$(download_file "https://apt.llvm.org/llvm.sh")"
case "$distro-$release" in
ubuntu-24*)
execute_sudo "$bash" "$script" "$(llvm_version)" all -njammy
execute_sudo "$bash" "$llvm_script" "$(llvm_version)" all -njammy
;;
*)
execute_sudo "$bash" "$script" "$(llvm_version)" all
execute_sudo "$bash" "$llvm_script" "$(llvm_version)" all
;;
esac
;;
@@ -779,11 +870,6 @@ install_rust() {
execute_as_user "$sh" "$script" -y
;;
esac
# FIXME: This causes cargo to fail to build:
# > error: rustup could not choose a version of cargo to run,
# > because one wasn't specified explicitly, and no default is configured.
# link_to_bin "$home/.cargo/bin"
}
install_docker() {
@@ -796,7 +882,7 @@ install_docker() {
*)
case "$distro-$release" in
amzn-2 | amzn-1)
execute amazon-linux-extras install docker
execute_sudo amazon-linux-extras install docker
;;
amzn-* | alpine-*)
install_packages docker
@@ -832,8 +918,8 @@ install_tailscale() {
case "$os" in
linux)
sh="$(require sh)"
script=$(download_file "https://tailscale.com/install.sh")
execute "$sh" "$script"
tailscale_script=$(download_file "https://tailscale.com/install.sh")
execute "$sh" "$tailscale_script"
;;
darwin)
install_packages go
@@ -862,24 +948,39 @@ create_buildkite_user() {
esac
if [ -z "$(getent passwd "$user")" ]; then
execute_sudo useradd "$user" \
--system \
--no-create-home \
--home-dir "$home"
case "$distro" in
alpine)
execute_sudo addgroup \
--system "$group"
execute_sudo adduser "$user" \
--system \
--ingroup "$group" \
--shell "$(require sh)" \
--home "$home" \
--disabled-password
;;
*)
execute_sudo useradd "$user" \
--system \
--shell "$(require sh)" \
--no-create-home \
--home-dir "$home"
;;
esac
fi
if [ -n "$(getent group docker)" ]; then
execute_sudo usermod -aG docker "$user"
fi
paths="$home /var/cache/buildkite-agent /var/log/buildkite-agent /var/run/buildkite-agent /var/run/buildkite-agent/buildkite-agent.sock"
for path in $paths; do
buildkite_paths="$home /var/cache/buildkite-agent /var/log/buildkite-agent /var/run/buildkite-agent /var/run/buildkite-agent/buildkite-agent.sock"
for path in $buildkite_paths; do
execute_sudo mkdir -p "$path"
execute_sudo chown -R "$user:$group" "$path"
done
files="/var/run/buildkite-agent/buildkite-agent.pid"
for file in $files; do
buildkite_files="/var/run/buildkite-agent/buildkite-agent.pid"
for file in $buildkite_files; do
execute_sudo touch "$file"
execute_sudo chown "$user:$group" "$file"
done
@@ -890,19 +991,42 @@ install_buildkite() {
return
fi
bash="$(require bash)"
script="$(download_file "https://raw.githubusercontent.com/buildkite/agent/main/install.sh")"
tmp_dir="$(execute dirname "$script")"
HOME="$tmp_dir" execute "$bash" "$script"
buildkite_version="3.87.0"
case "$os-$arch" in
linux-aarch64)
buildkite_filename="buildkite-agent-linux-arm64-$buildkite_version.tar.gz"
;;
linux-x64)
buildkite_filename="buildkite-agent-linux-amd64-$buildkite_version.tar.gz"
;;
darwin-aarch64)
buildkite_filename="buildkite-agent-darwin-arm64-$buildkite_version.tar.gz"
;;
darwin-x64)
buildkite_filename="buildkite-agent-darwin-amd64-$buildkite_version.tar.gz"
;;
esac
buildkite_url="https://github.com/buildkite/agent/releases/download/v$buildkite_version/$buildkite_filename"
buildkite_filepath="$(download_file "$buildkite_url" "$buildkite_filename")"
buildkite_tmpdir="$(dirname "$buildkite_filepath")"
out_dir="$tmp_dir/.buildkite-agent"
execute_sudo mv -f "$out_dir/bin/buildkite-agent" "/usr/bin/buildkite-agent"
execute tar -xzf "$buildkite_filepath" -C "$buildkite_tmpdir"
move_to_bin "$buildkite_tmpdir/buildkite-agent"
execute rm -rf "$buildkite_tmpdir"
}
install_chrome_dependencies() {
install_chromium() {
# https://github.com/puppeteer/puppeteer/blob/main/docs/troubleshooting.md#chrome-doesnt-launch-on-linux
# https://github.com/puppeteer/puppeteer/blob/main/docs/troubleshooting.md#running-puppeteer-in-the-cloud
case "$pm" in
apk)
install_packages \
chromium \
nss \
freetype \
harfbuzz \
ttf-freefont
;;
apt)
install_packages \
fonts-liberation \
@@ -979,22 +1103,17 @@ install_chrome_dependencies() {
esac
}
raise_file_descriptor_limit() {
append_to_file_sudo /etc/security/limits.conf '* soft nofile 262144'
append_to_file_sudo /etc/security/limits.conf '* hard nofile 262144'
}
main() {
check_features "$@"
check_operating_system
check_inside_docker
check_user
check_ulimit
check_package_manager
create_buildkite_user
install_common_software
install_build_essentials
install_chrome_dependencies
raise_file_descriptor_limit # XXX: temporary
install_chromium
}
main "$@"

View File

@@ -3,6 +3,7 @@
import { spawn as nodeSpawn } from "node:child_process";
import { existsSync, readFileSync, mkdirSync, cpSync, chmodSync } from "node:fs";
import { basename, join, resolve } from "node:path";
import { isCI, printEnvironment, startGroup } from "./utils.mjs";
// https://cmake.org/cmake/help/latest/manual/cmake.1.html#generate-a-project-buildsystem
const generateFlags = [
@@ -37,6 +38,10 @@ async function build(args) {
return spawn("pwsh", ["-NoProfile", "-NoLogo", "-File", shellPath, process.argv0, scriptPath, ...args]);
}
if (isCI) {
printEnvironment();
}
const env = {
...process.env,
FORCE_COLOR: "1",
@@ -102,7 +107,8 @@ async function build(args) {
const generateArgs = Object.entries(generateOptions).flatMap(([flag, value]) =>
flag.startsWith("-D") ? [`${flag}=${value}`] : [flag, value],
);
await spawn("cmake", generateArgs, { env }, "configuration");
await startGroup("CMake Configure", () => spawn("cmake", generateArgs, { env }));
const envPath = resolve(buildPath, ".env");
if (existsSync(envPath)) {
@@ -116,7 +122,8 @@ async function build(args) {
const buildArgs = Object.entries(buildOptions)
.sort(([a], [b]) => (a === "--build" ? -1 : a.localeCompare(b)))
.flatMap(([flag, value]) => [flag, value]);
await spawn("cmake", buildArgs, { env }, "compilation");
await startGroup("CMake Build", () => spawn("cmake", buildArgs, { env }));
printDuration("total", Date.now() - startTime);
}

20
scripts/check-node.sh Executable file
View File

@@ -0,0 +1,20 @@
#!/bin/bash
i=0
j=0
for x in $(git ls-files test/js/node/test/parallel --exclude-standard --others | grep test-$1)
do
i=$((i+1))
echo ./$x
if timeout 2 $PWD/build/debug/bun-debug ./$x
then
j=$((j+1))
git add ./$x
fi
echo
echo
done
echo $i tests tested
echo $j tests passed

File diff suppressed because it is too large Load Diff

View File

@@ -17,7 +17,6 @@ import {
accessSync,
appendFileSync,
readdirSync,
rmSync,
} from "node:fs";
import { spawn, spawnSync } from "node:child_process";
import { join, basename, dirname, relative, sep } from "node:path";
@@ -27,29 +26,36 @@ import {
getBuildUrl,
getEnv,
getFileUrl,
getLoggedInUserCount,
getShell,
getWindowsExitReason,
isBuildkite,
isCI,
isGithubAction,
isMacOS,
isWindows,
isX64,
printEnvironment,
startGroup,
tmpdir,
unzip,
} from "./utils.mjs";
import { userInfo } from "node:os";
let isQuiet = false;
const cwd = import.meta.dirname ? dirname(import.meta.dirname) : process.cwd();
const testsPath = join(cwd, "test");
const spawnTimeout = 5_000;
const testTimeout = 3 * 60_000;
const integrationTimeout = 5 * 60_000;
const napiTimeout = 10 * 60_000;
const { values: options, positionals: filters } = parseArgs({
allowPositionals: true,
options: {
["node-tests"]: {
type: "boolean",
default: false,
},
["exec-path"]: {
type: "string",
default: "bun",
@@ -58,6 +64,10 @@ const { values: options, positionals: filters } = parseArgs({
type: "string",
default: undefined,
},
["build-id"]: {
type: "string",
default: undefined,
},
["bail"]: {
type: "boolean",
default: false,
@@ -80,6 +90,10 @@ const { values: options, positionals: filters } = parseArgs({
multiple: true,
default: undefined,
},
["quiet"]: {
type: "boolean",
default: false,
},
["smoke"]: {
type: "string",
default: undefined,
@@ -91,6 +105,10 @@ const { values: options, positionals: filters } = parseArgs({
},
});
if (options["quiet"]) {
isQuiet = true;
}
/**
*
* @returns {Promise<TestResult[]>}
@@ -98,42 +116,17 @@ const { values: options, positionals: filters } = parseArgs({
async function runTests() {
let execPath;
if (options["step"]) {
downloadLoop: for (let i = 0; i < 10; i++) {
execPath = await getExecPathFromBuildKite(options["step"]);
for (let j = 0; j < 10; j++) {
const { error } = spawnSync(execPath, ["--version"], {
encoding: "utf-8",
timeout: spawnTimeout,
env: {
PATH: process.env.PATH,
BUN_DEBUG_QUIET_LOGS: 1,
},
});
if (!error) {
break downloadLoop;
}
const { code } = error;
if (code === "EBUSY") {
console.log("Bun appears to be busy, retrying...");
continue;
}
if (code === "UNKNOWN") {
console.log("Bun appears to be corrupted, downloading again...");
rmSync(execPath, { force: true });
continue downloadLoop;
}
}
}
execPath = await getExecPathFromBuildKite(options["step"], options["build-id"]);
} else {
execPath = getExecPath(options["exec-path"]);
}
console.log("Bun:", execPath);
!isQuiet && console.log("Bun:", execPath);
const revision = getRevision(execPath);
console.log("Revision:", revision);
!isQuiet && console.log("Revision:", revision);
const tests = getRelevantTests(testsPath);
console.log("Running tests:", tests.length);
!isQuiet && console.log("Running tests:", tests.length);
/** @type {VendorTest[] | undefined} */
let vendorTests;
@@ -142,7 +135,7 @@ async function runTests() {
vendorTests = await getVendorTests(cwd);
if (vendorTests.length) {
vendorTotal = vendorTests.reduce((total, { testPaths }) => total + testPaths.length + 1, 0);
console.log("Running vendor tests:", vendorTotal);
!isQuiet && console.log("Running vendor tests:", vendorTotal);
}
}
@@ -199,14 +192,43 @@ async function runTests() {
return result;
};
for (const path of [cwd, testsPath]) {
const title = relative(cwd, join(path, "package.json")).replace(/\\/g, "/");
await runTest(title, async () => spawnBunInstall(execPath, { cwd: path }));
if (!isQuiet) {
for (const path of [cwd, testsPath]) {
const title = relative(cwd, join(path, "package.json")).replace(/\\/g, "/");
await runTest(title, async () => spawnBunInstall(execPath, { cwd: path }));
}
}
if (results.every(({ ok }) => ok)) {
for (const testPath of tests) {
const title = relative(cwd, join(testsPath, testPath)).replace(/\\/g, "/");
if (title.startsWith("test/js/node/test/parallel/")) {
await runTest(title, async () => {
const { ok, error, stdout } = await spawnBun(execPath, {
cwd: cwd,
args: [title],
timeout: 10_000,
env: {
FORCE_COLOR: "0",
},
stdout: chunk => pipeTestStdout(process.stdout, chunk),
stderr: chunk => pipeTestStdout(process.stderr, chunk),
});
const mb = 1024 ** 3;
const stdoutPreview = stdout.slice(0, mb).split("\n").slice(0, 50).join("\n");
return {
testPath: title,
ok: ok,
status: ok ? "pass" : "fail",
error: error,
errors: [],
tests: [],
stdout: stdout,
stdoutPreview: stdoutPreview,
};
});
continue;
}
await runTest(title, async () => spawnBunTest(execPath, join("test", testPath)));
}
}
@@ -256,10 +278,10 @@ async function runTests() {
}
if (!isCI) {
console.log("-------");
console.log("passing", results.length - failedTests.length, "/", results.length);
!isQuiet && console.log("-------");
!isQuiet && console.log("passing", results.length - failedTests.length, "/", results.length);
for (const { testPath } of failedTests) {
console.log("-", testPath);
!isQuiet && console.log("-", testPath);
}
}
return results;
@@ -456,12 +478,14 @@ async function spawnBun(execPath, { args, cwd, timeout, env, stdout, stderr }) {
const path = addPath(dirname(execPath), process.env.PATH);
const tmpdirPath = mkdtempSync(join(tmpdir(), "buntmp-"));
const { username, homedir } = userInfo();
const shellPath = getShell();
const bunEnv = {
...process.env,
PATH: path,
TMPDIR: tmpdirPath,
USER: username,
HOME: homedir,
SHELL: shellPath,
FORCE_COLOR: "1",
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "1",
BUN_DEBUG_QUIET_LOGS: "1",
@@ -578,12 +602,9 @@ async function spawnBunTest(execPath, testPath, options = { cwd }) {
* @returns {number}
*/
function getTestTimeout(testPath) {
if (/integration|3rd_party|docker/i.test(testPath)) {
if (/integration|3rd_party|docker|bun-install-registry|v8/i.test(testPath)) {
return integrationTimeout;
}
if (/napi/i.test(testPath)) {
return napiTimeout;
}
return testTimeout;
}
@@ -774,7 +795,7 @@ function isJavaScriptTest(path) {
* @returns {boolean}
*/
function isTest(path) {
if (path.replaceAll(sep, "/").includes("/test-cluster-") && path.endsWith(".js")) return true;
if (path.replaceAll(sep, "/").startsWith("js/node/test/parallel/") && targetDoesRunNodeTests()) return true;
if (path.replaceAll(sep, "/").startsWith("js/node/cluster/test-") && path.endsWith(".ts")) return true;
return isTestStrict(path);
}
@@ -783,6 +804,11 @@ function isTestStrict(path) {
return isJavaScript(path) && /\.test|spec\./.test(basename(path));
}
function targetDoesRunNodeTests() {
if (isMacOS && isX64) return false;
return true;
}
/**
* @param {string} path
* @returns {boolean}
@@ -944,10 +970,14 @@ async function getVendorTests(cwd) {
* @returns {string[]}
*/
function getRelevantTests(cwd) {
const tests = getTests(cwd);
let tests = getTests(cwd);
const availableTests = [];
const filteredTests = [];
if (options["node-tests"]) {
tests = tests.filter(testPath => testPath.includes("js/node/test/parallel/"));
}
const isMatch = (testPath, filter) => {
return testPath.replace(/\\/g, "/").includes(filter);
};
@@ -964,7 +994,7 @@ function getRelevantTests(cwd) {
const includes = options["include"]?.flatMap(getFilter);
if (includes?.length) {
availableTests.push(...tests.filter(testPath => includes.some(filter => isMatch(testPath, filter))));
console.log("Including tests:", includes, availableTests.length, "/", tests.length);
!isQuiet && console.log("Including tests:", includes, availableTests.length, "/", tests.length);
} else {
availableTests.push(...tests);
}
@@ -979,7 +1009,7 @@ function getRelevantTests(cwd) {
availableTests.splice(index, 1);
}
}
console.log("Excluding tests:", excludes, excludedTests.length, "/", availableTests.length);
!isQuiet && console.log("Excluding tests:", excludes, excludedTests.length, "/", availableTests.length);
}
}
@@ -987,7 +1017,7 @@ function getRelevantTests(cwd) {
const maxShards = parseInt(options["max-shards"]);
if (filters?.length) {
filteredTests.push(...availableTests.filter(testPath => filters.some(filter => isMatch(testPath, filter))));
console.log("Filtering tests:", filteredTests.length, "/", availableTests.length);
!isQuiet && console.log("Filtering tests:", filteredTests.length, "/", availableTests.length);
} else if (options["smoke"] !== undefined) {
const smokePercent = parseFloat(options["smoke"]) || 0.01;
const smokeCount = Math.ceil(availableTests.length * smokePercent);
@@ -997,23 +1027,24 @@ function getRelevantTests(cwd) {
smokeTests.add(availableTests[randomIndex]);
}
filteredTests.push(...Array.from(smokeTests));
console.log("Smoking tests:", filteredTests.length, "/", availableTests.length);
!isQuiet && console.log("Smoking tests:", filteredTests.length, "/", availableTests.length);
} else if (maxShards > 1) {
for (let i = 0; i < availableTests.length; i++) {
if (i % maxShards === shardId) {
filteredTests.push(availableTests[i]);
}
}
console.log(
"Sharding tests:",
shardId,
"/",
maxShards,
"with tests",
filteredTests.length,
"/",
availableTests.length,
);
!isQuiet &&
console.log(
"Sharding tests:",
shardId,
"/",
maxShards,
"with tests",
filteredTests.length,
"/",
availableTests.length,
);
} else {
filteredTests.push(...availableTests);
}
@@ -1057,9 +1088,10 @@ function getExecPath(bunExe) {
/**
* @param {string} target
* @param {string} [buildId]
* @returns {Promise<string>}
*/
async function getExecPathFromBuildKite(target) {
async function getExecPathFromBuildKite(target, buildId) {
if (existsSync(target) || target.includes("/")) {
return getExecPath(target);
}
@@ -1067,23 +1099,27 @@ async function getExecPathFromBuildKite(target) {
const releasePath = join(cwd, "release");
mkdirSync(releasePath, { recursive: true });
const args = ["artifact", "download", "**", releasePath, "--step", target];
const buildId = process.env["BUILDKITE_ARTIFACT_BUILD_ID"];
if (buildId) {
args.push("--build", buildId);
}
await spawnSafe({
command: "buildkite-agent",
args,
});
let zipPath;
for (const entry of readdirSync(releasePath, { recursive: true, encoding: "utf-8" })) {
if (/^bun.*\.zip$/i.test(entry) && !entry.includes("-profile.zip")) {
zipPath = join(releasePath, entry);
break;
downloadLoop: for (let i = 0; i < 10; i++) {
const args = ["artifact", "download", "**", releasePath, "--step", target];
if (buildId) {
args.push("--build", buildId);
}
await spawnSafe({
command: "buildkite-agent",
args,
});
for (const entry of readdirSync(releasePath, { recursive: true, encoding: "utf-8" })) {
if (/^bun.*\.zip$/i.test(entry) && !entry.includes("-profile.zip")) {
zipPath = join(releasePath, entry);
break downloadLoop;
}
}
console.warn(`Waiting for ${target}.zip to be available...`);
await new Promise(resolve => setTimeout(resolve, i * 1000));
}
if (!zipPath) {
@@ -1092,13 +1128,15 @@ async function getExecPathFromBuildKite(target) {
await unzip(zipPath, releasePath);
for (const entry of readdirSync(releasePath, { recursive: true, encoding: "utf-8" })) {
const releaseFiles = readdirSync(releasePath, { recursive: true, encoding: "utf-8" });
for (const entry of releaseFiles) {
const execPath = join(releasePath, entry);
if (/bun(?:\.exe)?$/i.test(entry) && isExecutable(execPath)) {
if (/bun(?:\.exe)?$/i.test(entry) && statSync(execPath).isFile()) {
return execPath;
}
}
console.warn(`Found ${releaseFiles.length} files in ${releasePath}:`);
throw new Error(`Could not find executable from BuildKite: ${releasePath}`);
}
@@ -1287,7 +1325,7 @@ function reportAnnotationToBuildKite({ label, content, style = "error", priority
const buildLabel = getTestLabel();
const buildUrl = getBuildUrl();
const platform = buildUrl ? `<a href="${buildUrl}">${buildLabel}</a>` : buildLabel;
let errorMessage = `<details><summary><a><code>${label}</code></a> - annotation error on ${platform}</summary>`;
let errorMessage = `<details><summary><code>${label}</code> - annotation error on ${platform}</summary>`;
if (stderr) {
errorMessage += `\n\n\`\`\`terminal\n${escapeCodeBlock(stderr)}\n\`\`\`\n\n</details>\n\n`;
}
@@ -1442,9 +1480,42 @@ export async function main() {
process.on(signal, () => onExit(signal));
}
printEnvironment();
if (!isQuiet) {
printEnvironment();
}
// FIXME: Some DNS tests hang unless we set the DNS server to 8.8.8.8
// It also appears to hang on 1.1.1.1, which could explain this issue:
// https://github.com/oven-sh/bun/issues/11136
if (isWindows && isCI) {
await spawn("pwsh", [
"-Command",
"Set-DnsClientServerAddress -InterfaceAlias 'Ethernet 4' -ServerAddresses ('8.8.8.8','8.8.4.4')",
]);
}
const results = await runTests();
const ok = results.every(({ ok }) => ok);
let waitForUser = false;
while (isCI) {
const userCount = getLoggedInUserCount();
if (!userCount) {
if (waitForUser) {
!isQuiet && console.log("No users logged in, exiting runner...");
}
break;
}
if (!waitForUser) {
startGroup("Summary");
console.warn(`Found ${userCount} users logged in, keeping the runner alive until logout...`);
waitForUser = true;
}
await new Promise(resolve => setTimeout(resolve, 60_000));
}
process.exit(getExitCode(ok ? "pass" : "fail"));
}

File diff suppressed because it is too large Load Diff

View File

@@ -43,8 +43,6 @@ else if (Environment.isDebug)
std.fmt.comptimePrint(version_string ++ "-debug+{s}", .{Environment.git_sha_short})
else if (Environment.is_canary)
std.fmt.comptimePrint(version_string ++ "-canary.{d}+{s}", .{ Environment.canary_revision, Environment.git_sha_short })
else if (Environment.isTest)
std.fmt.comptimePrint(version_string ++ "-test+{s}", .{Environment.git_sha_short})
else
std.fmt.comptimePrint(version_string ++ "+{s}", .{Environment.git_sha_short});
@@ -68,7 +66,6 @@ else
"unknown";
pub inline fn getStartTime() i128 {
if (Environment.isTest) return 0;
return bun.start_time;
}
@@ -130,7 +127,20 @@ pub fn raiseIgnoringPanicHandler(sig: bun.SignalCode) noreturn {
Output.flush();
Output.Source.Stdio.restore();
// clear segfault handler
bun.crash_handler.resetSegfaultHandler();
// clear signal handler
if (bun.Environment.os != .windows) {
var sa: std.c.Sigaction = .{
.handler = .{ .handler = std.posix.SIG.DFL },
.mask = std.posix.empty_sigset,
.flags = std.posix.SA.RESETHAND,
};
_ = std.c.sigaction(@intFromEnum(sig), &sa, null);
}
// kill self
_ = std.c.raise(@intFromEnum(sig));
std.c.abort();
}

View File

@@ -1,6 +0,0 @@
{
"compilerOptions": {
"moduleResolution": "node"
},
"include": ["./node_modules/peechy", "./schema.d.ts"]
}

View File

@@ -1,10 +1,12 @@
#include "BakeGlobalObject.h"
#include "BakeSourceProvider.h"
#include "JSNextTickQueue.h"
#include "JavaScriptCore/GlobalObjectMethodTable.h"
#include "JavaScriptCore/JSInternalPromise.h"
#include "headers-handwritten.h"
#include "JavaScriptCore/JSModuleLoader.h"
#include "JavaScriptCore/Completion.h"
#include "JavaScriptCore/JSSourceCode.h"
extern "C" BunString BakeProdResolve(JSC::JSGlobalObject*, BunString a, BunString b);
@@ -72,6 +74,58 @@ JSC::Identifier bakeModuleLoaderResolve(JSC::JSGlobalObject* jsGlobal,
return Zig::GlobalObject::moduleLoaderResolve(jsGlobal, loader, key, referrer, origin);
}
static JSC::JSInternalPromise* rejectedInternalPromise(JSC::JSGlobalObject* globalObject, JSC::JSValue value)
{
JSC::VM& vm = globalObject->vm();
JSC::JSInternalPromise* promise = JSC::JSInternalPromise::create(vm, globalObject->internalPromiseStructure());
promise->internalField(JSC::JSPromise::Field::ReactionsOrResult).set(vm, promise, value);
promise->internalField(JSC::JSPromise::Field::Flags).set(vm, promise, JSC::jsNumber(promise->internalField(JSC::JSPromise::Field::Flags).get().asUInt32AsAnyInt() | JSC::JSPromise::isFirstResolvingFunctionCalledFlag | static_cast<unsigned>(JSC::JSPromise::Status::Rejected)));
return promise;
}
static JSC::JSInternalPromise* resolvedInternalPromise(JSC::JSGlobalObject* globalObject, JSC::JSValue value)
{
JSC::VM& vm = globalObject->vm();
JSC::JSInternalPromise* promise = JSC::JSInternalPromise::create(vm, globalObject->internalPromiseStructure());
promise->internalField(JSC::JSPromise::Field::ReactionsOrResult).set(vm, promise, value);
promise->internalField(JSC::JSPromise::Field::Flags).set(vm, promise, JSC::jsNumber(promise->internalField(JSC::JSPromise::Field::Flags).get().asUInt32AsAnyInt() | JSC::JSPromise::isFirstResolvingFunctionCalledFlag | static_cast<unsigned>(JSC::JSPromise::Status::Fulfilled)));
return promise;
}
extern "C" BunString BakeProdLoad(ProductionPerThread* perThreadData, BunString a);
JSC::JSInternalPromise* bakeModuleLoaderFetch(JSC::JSGlobalObject* globalObject,
JSC::JSModuleLoader* loader, JSC::JSValue key,
JSC::JSValue parameters, JSC::JSValue script)
{
Bake::GlobalObject* global = jsCast<Bake::GlobalObject*>(globalObject);
JSC::VM& vm = globalObject->vm();
auto scope = DECLARE_THROW_SCOPE(vm);
auto moduleKey = key.toWTFString(globalObject);
if (UNLIKELY(scope.exception()))
return rejectedInternalPromise(globalObject, scope.exception()->value());
if (moduleKey.startsWith("bake:/"_s)) {
if (LIKELY(global->m_perThreadData)) {
BunString source = BakeProdLoad(global->m_perThreadData, Bun::toString(moduleKey));
if (source.tag != BunStringTag::Dead) {
JSC::SourceOrigin origin = JSC::SourceOrigin(WTF::URL(moduleKey));
JSC::SourceCode sourceCode = JSC::SourceCode(Bake::SourceProvider::create(
source.toWTFString(),
origin,
WTFMove(moduleKey),
WTF::TextPosition(),
JSC::SourceProviderSourceType::Module));
return resolvedInternalPromise(globalObject, JSC::JSSourceCode::create(vm, WTFMove(sourceCode)));
}
return rejectedInternalPromise(globalObject, createTypeError(globalObject, makeString("Bundle does not have \""_s, moduleKey, "\". This is a bug in Bun's bundler."_s)));
}
return rejectedInternalPromise(globalObject, createTypeError(globalObject, "BakeGlobalObject does not have per-thread data configured"_s));
}
return Zig::GlobalObject::moduleLoaderFetch(globalObject, loader, key, parameters, script);
}
#define INHERIT_HOOK_METHOD(name) \
Zig::GlobalObject::s_globalObjectMethodTable.name
@@ -83,7 +137,7 @@ const JSC::GlobalObjectMethodTable GlobalObject::s_globalObjectMethodTable = {
INHERIT_HOOK_METHOD(shouldInterruptScriptBeforeTimeout),
bakeModuleLoaderImportModule,
bakeModuleLoaderResolve,
INHERIT_HOOK_METHOD(moduleLoaderFetch),
bakeModuleLoaderFetch,
INHERIT_HOOK_METHOD(moduleLoaderCreateImportMetaProperties),
INHERIT_HOOK_METHOD(moduleLoaderEvaluate),
INHERIT_HOOK_METHOD(promiseRejectionTracker),
@@ -155,4 +209,9 @@ extern "C" GlobalObject* BakeCreateProdGlobal(void* console)
return global;
}
extern "C" void BakeGlobalObject__attachPerThreadData(GlobalObject* global, ProductionPerThread* perThreadData)
{
global->m_perThreadData = perThreadData;
}
}; // namespace Bake

View File

@@ -4,10 +4,14 @@
namespace Bake {
struct ProductionPerThread;
class GlobalObject : public Zig::GlobalObject {
public:
using Base = Zig::GlobalObject;
ProductionPerThread* m_perThreadData;
template<typename, JSC::SubspaceAccess mode> static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
{
if constexpr (mode == JSC::SubspaceAccess::Concurrently)

View File

@@ -21,7 +21,7 @@ extern "C" JSC::EncodedJSValue BakeLoadInitialServerCode(GlobalObject* global, B
String string = "bake://server-runtime.js"_s;
JSC::SourceOrigin origin = JSC::SourceOrigin(WTF::URL(string));
JSC::SourceCode sourceCode = JSC::SourceCode(DevSourceProvider::create(
JSC::SourceCode sourceCode = JSC::SourceCode(SourceProvider::create(
source.toWTFString(),
origin,
WTFMove(string),
@@ -54,7 +54,7 @@ extern "C" JSC::EncodedJSValue BakeLoadServerHmrPatch(GlobalObject* global, BunS
String string = "bake://server.patch.js"_s;
JSC::SourceOrigin origin = JSC::SourceOrigin(WTF::URL(string));
JSC::SourceCode sourceCode = JSC::SourceCode(DevSourceProvider::create(
JSC::SourceCode sourceCode = JSC::SourceCode(SourceProvider::create(
source.toWTFString(),
origin,
WTFMove(string),
@@ -117,7 +117,7 @@ extern "C" JSC::EncodedJSValue BakeRegisterProductionChunk(JSC::JSGlobalObject*
String string = virtualPathName.toWTFString();
JSC::JSString* key = JSC::jsString(vm, string);
JSC::SourceOrigin origin = JSC::SourceOrigin(WTF::URL(string));
JSC::SourceCode sourceCode = JSC::SourceCode(DevSourceProvider::create(
JSC::SourceCode sourceCode = JSC::SourceCode(SourceProvider::create(
source.toWTFString(),
origin,
WTFMove(string),

View File

@@ -6,20 +6,20 @@
namespace Bake {
class DevSourceProvider final : public JSC::StringSourceProvider {
class SourceProvider final : public JSC::StringSourceProvider {
public:
static Ref<DevSourceProvider> create(
static Ref<SourceProvider> create(
const String& source,
const JSC::SourceOrigin& sourceOrigin,
String&& sourceURL,
const TextPosition& startPosition,
JSC::SourceProviderSourceType sourceType
) {
return adoptRef(*new DevSourceProvider(source, sourceOrigin, WTFMove(sourceURL), startPosition, sourceType));
return adoptRef(*new SourceProvider(source, sourceOrigin, WTFMove(sourceURL), startPosition, sourceType));
}
private:
DevSourceProvider(
SourceProvider(
const String& source,
const JSC::SourceOrigin& sourceOrigin,
String&& sourceURL,

View File

@@ -20,7 +20,7 @@ pub const Options = struct {
// Debugging features
dump_sources: ?[]const u8 = if (Environment.isDebug) ".bake-debug" else null,
dump_state_on_crash: ?bool = false,
dump_state_on_crash: ?bool = null,
verbose_watcher: bool = false,
};
@@ -314,7 +314,7 @@ pub fn init(options: Options) bun.JSOOM!*DevServer {
dev.framework = dev.framework.resolve(&dev.server_bundler.resolver, &dev.client_bundler.resolver, options.arena) catch {
if (dev.framework.is_built_in_react)
try bake.Framework.addReactInstallCommandNote(&dev.log);
return global.throwValue2(dev.log.toJSAggregateError(global, "Framework is missing required files!"));
return global.throwValue(dev.log.toJSAggregateError(global, "Framework is missing required files!"));
};
errdefer dev.route_lookup.clearAndFree(allocator);
@@ -623,7 +623,7 @@ fn ensureRouteIsBundled(
.data = switch (kind) {
.js_payload => .{ .js_payload = resp },
.server_handler => .{
.server_handler = (dev.server.?.DebugHTTPServer.prepareJsRequestContext(req, resp) orelse return)
.server_handler = (dev.server.?.DebugHTTPServer.prepareJsRequestContext(req, resp, null) orelse return)
.save(dev.vm.global, req, resp),
},
},
@@ -676,7 +676,7 @@ fn ensureRouteIsBundled(
.data = switch (kind) {
.js_payload => .{ .js_payload = resp },
.server_handler => .{
.server_handler = (dev.server.?.DebugHTTPServer.prepareJsRequestContext(req, resp) orelse return)
.server_handler = (dev.server.?.DebugHTTPServer.prepareJsRequestContext(req, resp, null) orelse return)
.save(dev.vm.global, req, resp),
},
},
@@ -904,6 +904,7 @@ fn startAsyncBundle(
.framework = dev.framework,
.client_bundler = &dev.client_bundler,
.ssr_bundler = &dev.ssr_bundler,
.plugins = dev.bundler_options.plugin,
} else @panic("TODO: support non-server components"),
allocator,
.{ .js = dev.vm.eventLoop() },
@@ -912,7 +913,6 @@ fn startAsyncBundle(
heap,
);
bv2.bun_watcher = dev.bun_watcher;
bv2.plugins = dev.bundler_options.plugin;
bv2.asynchronous = true;
{
@@ -1870,12 +1870,12 @@ pub fn IncrementalGraph(side: bake.Side) type {
/// exact size, instead of the log approach that dynamic arrays use.
stale_files: DynamicBitSetUnmanaged,
/// Start of the 'dependencies' linked list. These are the other files
/// that import used by this file. Walk this list to discover what
/// files are to be reloaded when something changes.
/// Start of a file's 'dependencies' linked list. These are the other
/// files that have imports to this file. Walk this list to discover
/// what files are to be reloaded when something changes.
first_dep: ArrayListUnmanaged(EdgeIndex.Optional),
/// Start of the 'imports' linked list. These are the files that this
/// file imports.
/// Start of a file's 'imports' linked lists. These are the files that
/// this file imports.
first_import: ArrayListUnmanaged(EdgeIndex.Optional),
/// `File` objects act as nodes in a directional many-to-many graph,
/// where edges represent the imports between modules. An 'dependency'
@@ -3319,7 +3319,7 @@ pub const SerializedFailure = struct {
}
};
const ErrorKind = enum(u8) {
pub const ErrorKind = enum(u8) {
// A log message. The `logger.Kind` is encoded here.
bundler_log_err = 0,
bundler_log_warn = 1,
@@ -4352,7 +4352,7 @@ fn dumpStateDueToCrash(dev: *DevServer) !void {
const filepath = std.fmt.bufPrintZ(&filepath_buf, "incremental-graph-crash-dump.{d}.html", .{std.time.timestamp()}) catch "incremental-graph-crash-dump.html";
const file = std.fs.cwd().createFileZ(filepath, .{}) catch |err| {
bun.handleErrorReturnTrace(err, @errorReturnTrace());
Output.warn("Could not open directory for dumping sources: {}", .{err});
Output.warn("Could not open file for dumping incremental graph: {}", .{err});
return;
};
defer file.close();

View File

@@ -1144,7 +1144,7 @@ pub const JSFrameworkRouter = struct {
}),
);
}
return global.throwValue2(global.createAggregateErrorWithArray(
return global.throwValue(global.createAggregateErrorWithArray(
bun.String.static("Errors scanning routes"),
arr,
));
@@ -1232,7 +1232,6 @@ pub const JSFrameworkRouter = struct {
pub fn finalize(this: *JSFrameworkRouter) void {
this.files.deinit(bun.default_allocator);
this.router.deinit(bun.default_allocator);
bun.default_allocator.free(this.router.types);
for (this.stored_parse_errors.items) |i| bun.default_allocator.free(i.rel_path);
this.stored_parse_errors.deinit(bun.default_allocator);
bun.destroy(this);
@@ -1255,8 +1254,7 @@ pub const JSFrameworkRouter = struct {
var log = TinyLog.empty;
const parsed = style.parse(filepath.slice(), std.fs.path.extension(filepath.slice()), &log, true, alloc) catch |err| switch (err) {
error.InvalidRoutePattern => {
global.throw("{s} ({d}:{d})", .{ log.msg.slice(), log.cursor_at, log.cursor_len });
return error.JSError;
return global.throw("{s} ({d}:{d})", .{ log.msg.slice(), log.cursor_at, log.cursor_len });
},
else => |e| return e,
} orelse

9
src/bake/bake.bind.ts Normal file
View File

@@ -0,0 +1,9 @@
// import { t } from "bindgen";
// export const ReactFastRefresh = t.dictionary({
// importSource: t.UTF8String,
// });
// export const FrameworkConfig = t.dictionary({
// reactFastRefresh: t.oneOf(t.boolean, ReactFastRefresh).default(false),
// });

4
src/bake/bake.d.ts vendored
View File

@@ -421,7 +421,7 @@ declare module "bun" {
type GetParamIterator =
| AsyncIterable<Record<string, string>, GetParamsFinalOpts>
| Iterable<Record<string, string>, GetParamsFinalOpts>
| ({ pages: Array<Record<String, String>> } & GetParamsFinalOpts);
| ({ pages: Array<Record<string, string>> } & GetParamsFinalOpts);
type GetParamsFinalOpts = void | null | {
/**
@@ -516,7 +516,7 @@ declare module "bun" {
* Inject a module into the development server's runtime, to be loaded
* before all other user code.
*/
addPreload(module: string, side: 'client' | 'server'): void;
addPreload(...args: any): void;
}
declare interface OnLoadArgs {

View File

@@ -141,7 +141,7 @@ pub const SplitBundlerOptions = struct {
_ = val;
},
.rejected => |err| {
return global.throwValue2(err);
return global.throwValue(err);
},
}
}
@@ -466,7 +466,7 @@ pub const Framework = struct {
} else if (exts_js.isArray()) {
var it_2 = exts_js.arrayIterator(global);
var i_2: usize = 0;
const extensions = try arena.alloc([]const u8, array.getLength(global));
const extensions = try arena.alloc([]const u8, exts_js.getLength(global));
while (it_2.next()) |array_item| : (i_2 += 1) {
const slice = refs.track(try array_item.toSlice2(global, arena));
if (bun.strings.eqlComptime(slice, "*"))
@@ -600,9 +600,13 @@ pub const Framework = struct {
out.options.framework = framework;
// In development mode, source maps must always be `linked`
// In production, TODO: follow user configuration
out.options.source_map = .linked;
out.options.source_map = switch (mode) {
// Source maps must always be linked, as DevServer special cases the
// linking and part of the generation of these.
.development => .external,
// TODO: follow user configuration
else => .none,
};
out.configureLinker();
try out.configureDefines();
@@ -615,8 +619,10 @@ pub const Framework = struct {
});
if (mode != .development) {
out.options.entry_naming = "[name]-[hash].[ext]";
out.options.chunk_naming = "chunk-[name]-[hash].[ext]";
// Hide information about the source repository, at the cost of debugging quality.
out.options.entry_naming = "_bun/[hash].[ext]";
out.options.chunk_naming = "_bun/[hash].[ext]";
out.options.asset_naming = "_bun/[hash].[ext]";
}
out.resolver.opts = out.options;

View File

@@ -5,8 +5,8 @@
import * as React from "react";
import { hydrateRoot } from "react-dom/client";
import { createFromReadableStream } from "react-server-dom-bun/client.browser";
import { onServerSideReload } from 'bun:bake/client';
import { flushSync } from 'react-dom';
import { onServerSideReload } from "bun:bake/client";
import { flushSync } from "react-dom";
const te = new TextEncoder();
const td = new TextDecoder();
@@ -74,7 +74,7 @@ const Root = () => {
const root = hydrateRoot(document, <Root />, {
onUncaughtError(e) {
console.error(e);
}
},
});
// Keep a cache of page objects to avoid re-fetching a page when pressing the
@@ -118,7 +118,7 @@ const firstPageId = Date.now();
// This is done client-side because a React error will unmount all elements.
const sheet = new CSSStyleSheet();
document.adoptedStyleSheets.push(sheet);
sheet.replaceSync(':where(*)::view-transition-group(root){animation:none}');
sheet.replaceSync(":where(*)::view-transition-group(root){animation:none}");
}
}
@@ -142,10 +142,9 @@ async function goto(href: string, cacheId?: number) {
if (cached) {
currentCssList = cached.css;
await ensureCssIsReady(currentCssList);
setPage?.(rscPayload = cached.element);
setPage?.((rscPayload = cached.element));
console.log("cached", cached);
if (olderController?.signal.aborted === false)
abortOnRender = olderController;
if (olderController?.signal.aborted === false) abortOnRender = olderController;
return;
}
@@ -199,7 +198,7 @@ async function goto(href: string, cacheId?: number) {
// Save this promise so that pressing the back button in the browser navigates
// to the same instance of the old page, instead of re-fetching it.
if (cacheId) {
cachedPages.set(cacheId, { css: currentCssList, element: p });
cachedPages.set(cacheId, { css: currentCssList!, element: p });
}
// Defer aborting a previous request until VERY late. If a previous stream is
@@ -214,8 +213,7 @@ async function goto(href: string, cacheId?: number) {
if (document.startViewTransition as unknown) {
document.startViewTransition(() => {
flushSync(() => {
if (thisNavigationId === lastNavigationId)
setPage(rscPayload = p);
if (thisNavigationId === lastNavigationId) setPage((rscPayload = p));
});
});
} else {
@@ -342,8 +340,8 @@ window.addEventListener("popstate", event => {
if (import.meta.env.DEV) {
// Frameworks can call `onServerSideReload` to hook into server-side hot
// module reloading.
onServerSideReload(async() => {
// module reloading.
onServerSideReload(async () => {
const newId = Date.now();
history.replaceState(newId, "", location.href);
await goto(location.href, newId);
@@ -355,7 +353,7 @@ if (import.meta.env.DEV) {
onServerSideReload,
get currentCssList() {
return currentCssList;
}
},
};
}
@@ -417,7 +415,7 @@ async function readCssMetadataFallback(stream: ReadableStream<Uint8Array>) {
}
if (chunks.length === 1) {
const first = chunks[0];
if(first.byteLength >= size) {
if (first.byteLength >= size) {
chunks[0] = first.subarray(size);
totalBytes -= size;
return first.subarray(0, size);
@@ -446,14 +444,14 @@ async function readCssMetadataFallback(stream: ReadableStream<Uint8Array>) {
return buffer;
}
};
const header = new Uint32Array(await readChunk(4))[0];
console.log('h', header);
const header = new Uint32Array(await readChunk(4))[0];
console.log("h", header);
if (header === 0) {
currentCssList = [];
} else {
currentCssList = td.decode(await readChunk(header)).split("\n");
}
console.log('cc', currentCssList);
console.log("cc", currentCssList);
if (chunks.length === 0) {
return stream;
}
@@ -474,6 +472,6 @@ async function readCssMetadataFallback(stream: ReadableStream<Uint8Array>) {
},
cancel() {
reader.cancel();
}
},
});
}

View File

@@ -7,7 +7,7 @@ import type { Readable } from "node:stream";
import { EventEmitter } from "node:events";
import { createFromNodeStream, type Manifest } from "react-server-dom-bun/client.node.unbundled.js";
import { renderToPipeableStream } from "react-dom/server.node";
import { MiniAbortSignal } from "./server";
import type { MiniAbortSignal } from "./server";
// Verify that React 19 is being used.
if (!React.use) {

Some files were not shown because too many files have changed in this diff Show More