Compare commits

...

114 Commits

Author SHA1 Message Date
Cursor Agent
7606ed7008 Fix Node.js HTTP automatic headers for compatibility and test passing 2025-05-29 06:54:14 +00:00
Jarred Sumner
cfb8956ac5 Cursor config 2025-05-28 23:09:16 -07:00
190n
2bb36ca6b4 Fix crash initializing process stdio streams while process is overridden (#19978) 2025-05-28 22:57:59 -07:00
Jarred Sumner
24b3de1bc3 Fix net close event and add reconnect test (#19975)
Co-authored-by: Meghan Denny <meghan@bun.sh>
2025-05-28 22:27:52 -07:00
Jarred Sumner
b01ffe6da8 Fix pauseOnConnect semantics for node:net server (#19987) 2025-05-28 22:23:57 -07:00
Kai Tamkun
579f2ecd51 Add node:vm leak tests (#19947)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2025-05-28 22:23:30 -07:00
Jarred Sumner
627b0010e0 Fix Node net bytesWritten with pending strings (#19962)
Co-authored-by: Meghan Denny <meghan@bun.sh>
2025-05-28 22:21:28 -07:00
Jarred Sumner
3369e25a70 Update environment.json 2025-05-28 22:04:38 -07:00
Jarred Sumner
06a40f0b29 Configure cursor 2025-05-28 21:55:08 -07:00
Jarred Sumner
7989352b39 Add node server close test (#19972)
Co-authored-by: Meghan Denny <meghan@bun.sh>
2025-05-28 21:38:52 -07:00
Jarred Sumner
e1ab6fe36b Add net autoselectfamily default test (#19970)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
2025-05-28 21:30:22 -07:00
Jarred Sumner
14f59568cc Fix net.listen backlog arg & add Node test (#19966)
Co-authored-by: Meghan Denny <meghan@bun.sh>
2025-05-28 21:23:35 -07:00
github-actions[bot]
1855836259 deps: update c-ares to v1.34.5 (#19897)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2025-05-28 19:50:29 -07:00
Meghan Denny
c85cf136a5 test-http-get-pipeline-problem.js passes on windows (#19980) 2025-05-28 19:28:02 -07:00
Meghan Denny
4da85ac9c1 test-http2-compat-serverrequest-pipe.js passes on windows (#19981) 2025-05-28 19:27:41 -07:00
Meghan Denny
9248d81871 test-http2-trailers-after-session-close.js passes on windows (#19983) 2025-05-28 19:27:12 -07:00
Meghan Denny
ba21d6d54b test-require-long-path.js passes on windows (#19984) 2025-05-28 19:26:44 -07:00
Meghan Denny
32985591eb test-http2-pipe-named-pipe.js passes on windows (#19982) 2025-05-28 19:26:20 -07:00
Jarred Sumner
544d399980 Start splitting install.zig into a few more files (#19959)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
2025-05-28 19:25:59 -07:00
Meghan Denny
809992229f node:net rework (#18962)
Co-authored-by: nektro <5464072+nektro@users.noreply.github.com>
2025-05-28 17:04:37 -07:00
190n
9a0624bd99 Delete files used by issue triage agent (#19955) 2025-05-28 12:07:47 -07:00
Dylan Conway
ec2c2281cf bump 2025-05-28 11:51:39 -07:00
Jarred Sumner
df017990aa Implement automatic workspace folders support for Chrome DevTools (#19949) 2025-05-28 00:25:30 -07:00
pfg
bf02d04479 Don't validate cookie strings passed in the CookieMap constructor (#19945)
Co-authored-by: pfgithub <6010774+pfgithub@users.noreply.github.com>
2025-05-27 20:14:21 -07:00
Dylan Conway
5910504aeb bun pm audit -> bun audit (#19944) 2025-05-27 19:52:18 -07:00
Meghan Denny
8759527feb zsh: fix syntax error in bun audit completion 2025-05-27 19:51:18 -07:00
Jarred Sumner
7b4b299be0 Move this up even more 2025-05-27 18:22:30 -07:00
Jarred Sumner
ff8c2dcbc4 Bump WebKit again (#19943)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
2025-05-27 17:55:43 -07:00
Jarred Sumner
a275ed654b Move this code up 2025-05-27 16:57:43 -07:00
Jarred Sumner
7b164ee9de Fix async explicit resource management in browser builds (#19896)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
Co-authored-by: Dylan Conway <dylan.conway567@gmail.com>
2025-05-27 16:45:54 -07:00
Varun Narravula
fc92921a4a fix: parse JSX namespace identifiers that have numbers in them (#19912)
Co-authored-by: Michael H <git@riskymh.dev>
Co-authored-by: Dylan Conway <dylan.conway567@gmail.com>
2025-05-27 15:45:11 -07:00
Jarred Sumner
44d04968cd Add a cursor rule (#19926)
Co-authored-by: Andrew Jefferson <8148776+eastlondoner@users.noreply.github.com>
2025-05-27 15:45:01 -07:00
Ben Grant
e6ab636313 disallow bash tool in issue triage agent 2025-05-27 11:24:44 -07:00
Yechao LI
325d0b1ed6 fix: correct function type for spyon an optional function (#19240)
Co-authored-by: Alistair Smith <hi@alistair.sh>
2025-05-26 21:58:10 -07:00
Jarred Sumner
a8e4489e10 Add docs for bun pm audit (#19885)
Co-authored-by: Alistair Smith <hi@alistair.sh>
Co-authored-by: alii <25351731+alii@users.noreply.github.com>
2025-05-26 21:56:32 -07:00
Alistair Smith
31980bc151 perf_hooks.Histogram (#19920) 2025-05-26 21:18:22 -07:00
Jarred Sumner
e58df65a75 Bump WebKit (#19882) 2025-05-26 18:56:32 -07:00
Pierre
6317d6498f fix: Add missing CryptoKeyPair global type (#19921)
Co-authored-by: Alistair Smith <hi@alistair.sh>
2025-05-26 18:17:29 -07:00
Alistair Smith
9e61b70535 test-net-socket-constructor.js (#19804) 2025-05-26 13:14:42 -07:00
Alistair Smith
58c1372b50 Implements Node.js behaviour for parallel/test-tls-set-ciphers-error.js (#19443) 2025-05-26 13:13:59 -07:00
familyboat
88840dcafa doc: remove redundant word "page" (#19915) 2025-05-26 12:45:41 -07:00
Jarred Sumner
793a9752c9 Update react.md 2025-05-25 13:16:09 -07:00
Jarred Sumner
8f08e84c1e Update react.md 2025-05-25 12:56:46 -07:00
Jarred Sumner
3605531e34 Remove empty page 2025-05-25 12:09:25 -07:00
Jarred Sumner
7dc58e0ce4 Add BUN_OPTIONS env var (#19766)
Co-authored-by: claude[bot] <209825114+claude[bot]@users.noreply.github.com>
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2025-05-24 13:36:51 -07:00
Sculas
15a58cca1c fix(windows): respect NO_COLOR in filter_run (#19888) 2025-05-24 13:33:16 -07:00
Jarred Sumner
a3fdfeb924 Add more to this list 2025-05-24 00:22:15 -07:00
Seth Flynn
c024e73e6a fix(BunRequest): make clone() return a BunRequest (#19813) 2025-05-23 23:37:47 -07:00
Kai Tamkun
392212b090 node:vm compatibility (#19703) 2025-05-23 22:59:58 -07:00
Jarred Sumner
3ea6133c46 CI: Remove unused top-level decls in formatter in zig (#19879)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
Co-authored-by: graphite-app[bot] <96075541+graphite-app[bot]@users.noreply.github.com>
2025-05-23 22:49:48 -07:00
190n
5d84f8a102 delete flaky test-worker-uncaught-exception.js (#19857) 2025-05-23 22:49:09 -07:00
Alistair Smith
9e329ee605 bun pm audit (#19855) 2025-05-23 22:31:12 -07:00
Kai Tamkun
76f6574729 Fix memory leak in c_ares.zig Error.Deferred.rejectLater (#19871) 2025-05-23 20:54:50 -07:00
Jarred Sumner
50b938561a Normalize versions in bun pm view <pkg> versions like npm view does (#19870)
Co-authored-by: Dylan Conway <35280289+dylan-conway@users.noreply.github.com>
2025-05-23 18:10:56 -07:00
190n
3b75095f0c Use long timeout for bun test launch configurations (#19869) 2025-05-23 18:03:09 -07:00
Jarred Sumner
7b127c946d Fix regression from #19783 (#19837) 2025-05-23 17:49:36 -07:00
Ashcon Partovi
b9a63893fe ci: Fix permissions with gh CLI 2025-05-23 17:23:34 -07:00
Ashcon Partovi
ff1a35668f ci: Fix claude tool permissions 2025-05-23 17:17:04 -07:00
Ashcon Partovi
b36b4b2888 ci: Fix triage permissions 2025-05-23 17:13:55 -07:00
Ashcon Partovi
e7e5528632 ci: Fix triage workflow 2025-05-23 17:10:30 -07:00
Ashcon Partovi
9a5ff02420 ci: Fix anthropic auth in CI 2025-05-23 17:09:52 -07:00
Ashcon Partovi
4e9ee08a4a ci: Fix running claude in CI 2025-05-23 17:08:12 -07:00
Ashcon Partovi
e11ac9d1b8 ci: fix install claude in workflow 2025-05-23 17:06:05 -07:00
Ashcon Partovi
e9414966ca ci: Install claude for agent workflow 2025-05-23 17:01:38 -07:00
Ashcon Partovi
b2ae98865b ci: fix triage on linux 2025-05-23 16:58:30 -07:00
Ashcon Partovi
e8ed50cd9a ci: tweak how triage automation works 2025-05-23 16:56:14 -07:00
190n
9dd799d2e6 Implement Worker.getHeapSnapshot (#19706)
Co-authored-by: 190n <7763597+190n@users.noreply.github.com>
2025-05-23 16:50:13 -07:00
Meghan Denny
ba28eeece6 ci: add update-zstd.yml (#19812)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2025-05-23 16:49:37 -07:00
Meghan Denny
e9f908fcbf cmake: move SetupWebkit early-return so that WEBKIT_NAME is always printed (#19716) 2025-05-23 15:49:57 -07:00
Ashcon Partovi
654472f217 ci: fix triage automation trigger, again 2025-05-23 15:46:39 -07:00
Ashcon Partovi
5dcf99424c ci: fix triage automation trigger 2025-05-23 15:45:34 -07:00
Ashcon Partovi
ae91711010 ci: Fix triage workflow 2025-05-23 15:42:26 -07:00
Ashcon Partovi
ca6ba0fa2d ci: add triage automation (#19873) 2025-05-23 15:38:15 -07:00
Jarred Sumner
3195df8796 Remove leading period 2025-05-22 23:55:57 -07:00
Jarred Sumner
9d1eace981 Add bun pm view command (#19841)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
2025-05-22 23:51:31 -07:00
Dylan Conway
8e80afbce1 Replace string runtime flags with enum (#19827) 2025-05-22 22:36:46 -07:00
190n
efb6b823c9 Strongly type GlobalObject::processObject() (#19826) 2025-05-22 21:48:48 -07:00
Jarred Sumner
6d348fa759 Add glob sources workflow (#19860) 2025-05-22 21:48:22 -07:00
Jarred Sumner
69be630aea WebKit Upgrade (#19839)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
Co-authored-by: Ben Grant <ben@bun.sh>
Co-authored-by: 190n <7763597+190n@users.noreply.github.com>
2025-05-22 21:12:43 -07:00
Jarred Sumner
bca833ad59 Split lockfile.zig into a more logical directory structure (#19858) 2025-05-22 21:11:54 -07:00
Ciro Spaciari
ef9ea8ae1c fix(fetch) ignore trailers and add trailer tests (#19854) 2025-05-22 20:17:21 -07:00
Dylan Conway
a844957eb3 Use operationMathPow for parser constant folding (#19853) 2025-05-22 20:16:37 -07:00
Jarred Sumner
573927c4bf Add a cursor rule 2025-05-22 12:04:10 -07:00
190n
3e97c1caf3 restore bun bd and make it quiet (#19831) 2025-05-22 00:40:48 -07:00
Meghan Denny
b4450db807 Bump 2025-05-21 16:58:52 -07:00
Alistair Smith
6a363a38da node:net compat: Invalid port test for .listen (#19768) 2025-05-21 11:56:17 -07:00
Jarred Sumner
ffa286ef70 Update docs on workspaces and catalogs (#19815) 2025-05-21 11:38:37 -07:00
Seokho Song, dave@modusign
2fc8785868 Add x25519 elliptic curve cryptography to webcrypto (#19674) 2025-05-21 11:23:23 -07:00
Dylan Conway
8ddb92085b update bun.lock types for catalog(s) (#19814)
Co-authored-by: RiskyMH <git@riskymh.dev>
2025-05-21 00:22:18 -07:00
Jarred Sumner
4ca83be84f Add Zstd decompression to HTTP client (#19800)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
Co-authored-by: Dylan Conway <35280289+dylan-conway@users.noreply.github.com>
2025-05-20 23:26:47 -07:00
Jarred Sumner
8aae534270 Fix Node browser fallbacks to have util.inherit and other size improvements (#19783)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
2025-05-20 23:25:52 -07:00
Dylan Conway
98ee30eccf Implement catalogs in bun install (#19809)
Co-authored-by: graphite-app[bot] <96075541+graphite-app[bot]@users.noreply.github.com>
Co-authored-by: claude[bot] <209825114+claude[bot]@users.noreply.github.com>
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2025-05-20 23:03:21 -07:00
Jarred Sumner
562a65037d Bump zstd version (#19801)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
2025-05-20 21:32:18 -07:00
pfg
beb1db967b Fix numeric header in node http server (#19811) 2025-05-20 21:32:07 -07:00
Jarred Sumner
0efbb29581 Do not use TCP cork in proxied HTTPS (#19794)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
2025-05-20 21:27:17 -07:00
Ciro Spaciari
0e883c935c fix(install/fetch) proper handle proxy (#19771)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
Co-authored-by: cirospaciari <6379399+cirospaciari@users.noreply.github.com>
Co-authored-by: Meghan Denny <meghan@bun.sh>
2025-05-20 21:11:22 -07:00
Jarred Sumner
497360d543 Fix BroadcastChannel.unref() return value (#19810)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
Co-authored-by: claude[bot] <209825114+claude[bot]@users.noreply.github.com>
2025-05-20 21:09:16 -07:00
190n
e23491391b bun run prettier (#19807)
Co-authored-by: 190n <7763597+190n@users.noreply.github.com>
2025-05-20 20:01:38 -07:00
190n
259bf47abd Add sourceMap to launch.json so lldb can find WebKit code (#19263) 2025-05-20 16:50:47 -07:00
190n
d1ac52da2c ci: use ARM EC2 instances for build-zig (#19781) 2025-05-20 12:41:06 -07:00
Ben Grant
1ebec90d6e Revert "Add test from #18287 (#19775)"
This reverts commit f1504c4265.
2025-05-20 12:22:01 -07:00
190n
f1504c4265 Add test from #18287 (#19775) 2025-05-20 11:56:30 -07:00
Ashcon Partovi
21f238a827 cmake: Move sources to their own folder (#19776) 2025-05-20 10:53:57 -07:00
Dylan Conway
33be08bde8 Fix RuntimeError.from return value (#19777)
Co-authored-by: claude[bot] <209825114+claude[bot]@users.noreply.github.com>
Co-authored-by: dylan-conway <dylan-conway@users.noreply.github.com>
2025-05-19 17:05:10 -07:00
Braden Everson
67b64c3334 Update TextDecoder's constructor to Handle Undefined (#19708)
Co-authored-by: Dylan Conway <35280289+dylan-conway@users.noreply.github.com>
2025-05-19 16:44:57 -07:00
Jarred Sumner
bfd12eeeba [bun install] Do not prefetch dns for npm registry if proxy (#19749) 2025-05-19 12:35:16 -07:00
Jarred Sumner
004ee11bed [internal builtins] Small typescript fix 2025-05-19 12:18:50 -07:00
Jarred Sumner
457c15e424 [bun install] Fix race condition when error occurs while extracting tarballs (#19751) 2025-05-19 11:55:34 -07:00
Jarred Sumner
815182799e [bun install] Don't save manifest cache when --no-cache is passed (#19752) 2025-05-19 11:39:39 -07:00
Jarred Sumner
a5cb42c407 Add Claude Code GitHub Workflow (#19769) 2025-05-19 11:28:20 -07:00
Jarred Sumner
0dade44a37 [internal] Add run:linux package.json script to run a command using a Linux debug build of bun 2025-05-19 06:56:16 -07:00
Jarred Sumner
09d3de918f Compress debug symbols in debug builds 2025-05-19 03:47:07 -07:00
Jarred Sumner
9e13a93215 [internal] Fix importing test/harness.ts without running bun install 2025-05-19 02:46:31 -07:00
Shubham Verma
2c5e9e5532 Removedep (#19737) 2025-05-18 06:13:37 -07:00
934 changed files with 36288 additions and 15323 deletions

78
.agent/agent.mjs Normal file
View File

@@ -0,0 +1,78 @@
import { spawnSync } from "node:child_process";
import { readFileSync, existsSync } from "node:fs";
import { parseArgs } from "node:util";
const { positionals, values } = parseArgs({
allowPositionals: true,
options: {
help: {
type: "boolean",
short: "h",
default: false,
},
interactive: {
type: "boolean",
short: "i",
default: false,
},
},
});
if (values.help || positionals.length === 0) {
console.log("Usage: node agent.mjs <prompt_name> [extra_args...]");
console.log("Example: node agent.mjs triage fix bug in authentication");
console.log("Options:");
console.log(" -h, --help Show this help message");
console.log(" -i, --interactive Run in interactive mode");
process.exit(0);
}
const promptName = positionals[0].toUpperCase();
const promptFile = `.agent/${promptName}.md`;
const extraArgs = positionals.slice(1);
if (!existsSync(promptFile)) {
console.error(`Error: Prompt file "${promptFile}" not found`);
console.error(`Available prompts should be named like: .agent/triage.md, .agent/debug.md, etc.`);
process.exit(1);
}
try {
let prompt = readFileSync(promptFile, "utf-8");
const githubEnvs = Object.entries(process.env)
.filter(([key]) => key.startsWith("GITHUB_"))
.sort(([a], [b]) => a.localeCompare(b));
if (githubEnvs.length > 0) {
const githubContext = `## GitHub Environment\n\n${githubEnvs
.map(([key, value]) => `**${key}**: \`${value}\``)
.join("\n")}\n\n---\n\n`;
prompt = githubContext + prompt;
}
if (extraArgs.length > 0) {
const extraArgsContext = `\n\n## Additional Arguments\n\n${extraArgs.join(" ")}\n\n---\n\n`;
prompt = prompt + extraArgsContext;
}
const claudeArgs = [prompt, "--allowedTools=Edit,Write,Replace,Search", "--output-format=json"];
if (!values.interactive) {
claudeArgs.unshift("--print");
}
const { status, error } = spawnSync("claude", claudeArgs, {
stdio: "inherit",
encoding: "utf-8",
});
if (error) {
console.error("Error running claude:", error);
process.exit(1);
}
process.exit(status || 0);
} catch (error) {
console.error(`Error reading prompt file "${promptFile}":`, error);
process.exit(1);
}

View File

@@ -228,13 +228,7 @@ function getRetry(limit = 0) {
manual: {
permit_on_passed: true,
},
automatic: [
{ exit_status: 1, limit },
{ exit_status: -1, limit: 1 },
{ exit_status: 255, limit: 1 },
{ signal_reason: "cancel", limit: 1 },
{ signal_reason: "agent_stop", limit: 1 },
],
automatic: false,
};
}
@@ -331,16 +325,14 @@ function getZigAgent(platform, options) {
return getEc2Agent(
{
os: "linux",
arch: "x64",
arch: "aarch64",
abi: "musl",
distro: "alpine",
release: "3.21",
},
options,
{
instanceType: "c7i.2xlarge",
cpuCount: 4,
threadsPerCore: 1,
instanceType: "r8g.large",
},
);
}
@@ -574,7 +566,7 @@ function getTestBunStep(platform, options, testOptions = {}) {
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
parallelism: unifiedTests ? undefined : os === "darwin" ? 2 : 10,
timeout_in_minutes: profile === "asan" ? 90 : 30,
timeout_in_minutes: profile === "asan" ? 45 : 30,
command:
os === "windows"
? `node .\\scripts\\runner.node.mjs ${args.join(" ")}`

3
.cursor/environment.json Normal file
View File

@@ -0,0 +1,3 @@
{
"terminals": []
}

View File

@@ -1,27 +1,13 @@
---
description: How to build Bun
globs:
globs:
alwaysApply: false
---
# How to build Bun
## CMake
Run:
Bun is built using CMake, which you can find in `CMakeLists.txt` and in the `cmake/` directory.
* `CMakeLists.txt`
* `cmake/`
* `Globals.cmake` - macros and functions used by all the other files
* `Options.cmake` - build options for configuring the build (e.g. debug/release mode)
* `CompilerFlags.cmake` - compiler and linker flags used by all the targets
* `tools/` - setup scripts for various build tools (e.g. llvm, zig, webkit, rust, etc.)
* `targets/` - targets for bun and its dependencies (e.g. brotli, boringssl, libuv, etc.)
## How to
There are `package.json` scripts that make it easy to build Bun without calling CMake directly, for example:
```sh
bun run build # builds a debug build: `build/debug/bun-debug`
bun run build:release # builds a release build: `build/release/bun`
bun run build:assert # builds a release build with debug assertions: `build/assert/bun`
```bash
bun bd
```

View File

@@ -0,0 +1,203 @@
# Registering Functions, Objects, and Modules in Bun
This guide documents the process of adding new functionality to the Bun global object and runtime.
## Overview
Bun's architecture exposes functionality to JavaScript through a set of carefully registered functions, objects, and modules. Most core functionality is implemented in Zig, with JavaScript bindings that make these features accessible to users.
There are several key ways to expose functionality in Bun:
1. **Global Functions**: Direct methods on the `Bun` object (e.g., `Bun.serve()`)
2. **Getter Properties**: Lazily initialized properties on the `Bun` object (e.g., `Bun.sqlite`)
3. **Constructor Classes**: Classes available through the `Bun` object (e.g., `Bun.ValkeyClient`)
4. **Global Modules**: Modules that can be imported directly (e.g., `import {X} from "bun:*"`)
## The Registration Process
Adding new functionality to Bun involves several coordinated steps across multiple files:
### 1. Implement the Core Functionality in Zig
First, implement your feature in Zig, typically in its own directory in `src/`. Examples:
- `src/valkey/` for Redis/Valkey client
- `src/semver/` for SemVer functionality
- `src/smtp/` for SMTP client
### 2. Create JavaScript Bindings
Create bindings that expose your Zig functionality to JavaScript:
- Create a class definition file (e.g., `js_bindings.classes.ts`) to define the JavaScript interface
- Implement `JSYourFeature` struct in a file like `js_your_feature.zig`
Example from a class definition file:
```typescript
// Example from a .classes.ts file
import { define } from "../../codegen/class-definitions";
export default [
define({
name: "YourFeature",
construct: true,
finalize: true,
hasPendingActivity: true,
memoryCost: true,
klass: {},
JSType: "0b11101110",
proto: {
yourMethod: {
fn: "yourZigMethod",
length: 1,
},
property: {
getter: "getProperty",
},
},
values: ["cachedValues"],
}),
];
```
### 3. Register with BunObject in `src/bun.js/bindings/BunObject+exports.h`
Add an entry to the `FOR_EACH_GETTER` macro:
```c
// In BunObject+exports.h
#define FOR_EACH_GETTER(macro) \
macro(CSRF) \
macro(CryptoHasher) \
... \
macro(YourFeature) \
```
### 4. Create a Getter Function in `src/bun.js/api/BunObject.zig`
Implement a getter function in `BunObject.zig` that returns your feature:
```zig
// In BunObject.zig
pub const YourFeature = toJSGetter(Bun.getYourFeatureConstructor);
// In the exportAll() function:
@export(&BunObject.YourFeature, .{ .name = getterName("YourFeature") });
```
### 5. Implement the Getter Function in a Relevant Zig File
Implement the function that creates your object:
```zig
// In your main module file (e.g., src/your_feature/your_feature.zig)
pub fn getYourFeatureConstructor(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.JSValue {
return JSC.API.YourFeature.getConstructor(globalThis);
}
```
### 6. Add to Build System
Ensure your files are included in the build system by adding them to the appropriate targets.
## Example: Adding a New Module
Here's a comprehensive example of adding a hypothetical SMTP module:
1. Create implementation files in `src/smtp/`:
- `index.zig`: Main entry point that exports everything
- `SmtpClient.zig`: Core SMTP client implementation
- `js_smtp.zig`: JavaScript bindings
- `js_bindings.classes.ts`: Class definition
2. Define your JS class in `js_bindings.classes.ts`:
```typescript
import { define } from "../../codegen/class-definitions";
export default [
define({
name: "EmailClient",
construct: true,
finalize: true,
hasPendingActivity: true,
configurable: false,
memoryCost: true,
klass: {},
JSType: "0b11101110",
proto: {
send: {
fn: "send",
length: 1,
},
verify: {
fn: "verify",
length: 0,
},
close: {
fn: "close",
length: 0,
},
},
values: ["connectionPromise"],
}),
];
```
3. Add getter to `BunObject+exports.h`:
```c
#define FOR_EACH_GETTER(macro) \
macro(CSRF) \
... \
macro(SMTP) \
```
4. Add getter function to `BunObject.zig`:
```zig
pub const SMTP = toJSGetter(Bun.getSmtpConstructor);
// In exportAll:
@export(&BunObject.SMTP, .{ .name = getterName("SMTP") });
```
5. Implement getter in your module:
```zig
pub fn getSmtpConstructor(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.JSValue {
return JSC.API.JSEmailClient.getConstructor(globalThis);
}
```
## Best Practices
1. **Follow Naming Conventions**: Align your naming with existing patterns
2. **Reference Existing Modules**: Study similar modules like Valkey or S3Client for guidance
3. **Memory Management**: Be careful with memory management and reference counting
4. **Error Handling**: Use `bun.JSError!JSValue` for proper error propagation
5. **Documentation**: Add JSDoc comments to your JavaScript bindings
6. **Testing**: Add tests for your new functionality
## Common Gotchas
- Be sure to handle reference counting properly with `ref()`/`deref()`
- Always implement proper cleanup in `deinit()` and `finalize()`
- For network operations, manage socket lifetimes correctly
- Use `JSC.Codegen` correctly to generate necessary binding code
## Related Files
- `src/bun.js/bindings/BunObject+exports.h`: Registration of getters and functions
- `src/bun.js/api/BunObject.zig`: Implementation of getters and object creation
- `src/bun.js/api/BunObject.classes.ts`: Class definitions
- `.cursor/rules/zig-javascriptcore-classes.mdc`: More details on class bindings
## Additional Resources
For more detailed information on specific topics:
- See `zig-javascriptcore-classes.mdc` for details on creating JS class bindings
- Review existing modules like `valkey`, `sqlite`, or `s3` for real-world examples

18
.github/CODEOWNERS vendored
View File

@@ -1,18 +1,18 @@
# Project
.github/CODEOWNERS @Jarred-Sumner
/.github/CODEOWNERS @Jarred-Sumner
# Build system
CMakeLists.txt @Electroid
cmake/ @Electroid
scripts/ @Electroid
/CMakeLists.txt @Electroid
/cmake/*.cmake @Electroid
/scripts/ @Electroid
# CI
.buildkite/ @Electroid
.github/workflows/ @Electroid
/.buildkite/ @Electroid
/.github/workflows/ @Electroid
# Debugger protocol
packages/bun-inspector-protocol/ @Electroid
packages/bun-debug-adapter-protocol/ @Electroid
/packages/bun-inspector-protocol/ @Electroid
/packages/bun-debug-adapter-protocol/ @Electroid
# Tests
test/expectations.txt @Jarred-Sumner
/test/expectations.txt @Jarred-Sumner

35
.github/workflows/claude.yml vendored Normal file
View File

@@ -0,0 +1,35 @@
name: Claude Code
on:
issue_comment:
types: [created]
pull_request_review_comment:
types: [created]
issues:
types: [opened, assigned]
pull_request_review:
types: [submitted]
jobs:
claude:
if: |
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
(github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
(github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude')))
runs-on: ubuntu-latest
permissions:
contents: read
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Run Claude Code
id: claude
uses: anthropics/claude-code-action@beta
with:
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}

View File

@@ -44,6 +44,7 @@ jobs:
version: 0.14.0
- name: Zig Format
run: |
bun scripts/zig-remove-unreferenced-top-level-decls.ts src/
zig fmt src/**.zig
- name: Commit
uses: stefanzweifel/git-auto-commit-action@v5

41
.github/workflows/glob-sources.yml vendored Normal file
View File

@@ -0,0 +1,41 @@
name: Glob Sources
permissions:
contents: write
on:
workflow_call:
workflow_dispatch:
pull_request:
env:
BUN_VERSION: "1.2.11"
jobs:
glob-sources:
name: Glob Sources
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Configure Git
run: |
git config --global core.autocrlf true
git config --global core.ignorecase true
git config --global core.precomposeUnicode true
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Setup Dependencies
run: |
bun install
- name: Glob sources
run: bun scripts/glob-sources.mjs
- name: Commit
uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: "`bun scripts/glob-sources.mjs`"

View File

@@ -50,12 +50,12 @@ jobs:
exit 1
fi
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/ref/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
LATEST_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
exit 1

View File

@@ -50,7 +50,7 @@ jobs:
exit 1
fi
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/libarchive/libarchive/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/libarchive/libarchive/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1

View File

@@ -50,7 +50,7 @@ jobs:
exit 1
fi
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/ebiggers/libdeflate/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/ebiggers/libdeflate/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1

View File

@@ -50,7 +50,7 @@ jobs:
exit 1
fi
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1

View File

@@ -50,7 +50,7 @@ jobs:
exit 1
fi
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1

99
.github/workflows/update-zstd.yml vendored Normal file
View File

@@ -0,0 +1,99 @@
name: Update zstd
on:
schedule:
- cron: "0 1 * * 0"
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check zstd version
id: check-version
run: |
set -euo pipefail
# Extract the commit hash from the line after COMMIT
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildZstd.cmake)
if [ -z "$CURRENT_VERSION" ]; then
echo "Error: Could not find COMMIT line in BuildZstd.cmake"
exit 1
fi
# Validate that it looks like a git hash
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid git hash format in BuildZstd.cmake"
echo "Found: $CURRENT_VERSION"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/facebook/zstd/releases/latest)
if [ -z "$LATEST_RELEASE" ]; then
echo "Error: Failed to fetch latest release from GitHub API"
exit 1
fi
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
echo "Error: Could not extract tag name from GitHub API response"
exit 1
fi
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/facebook/zstd/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/facebook/zstd/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
exit 1
fi
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid SHA format received from GitHub"
echo "Found: $LATEST_SHA"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
- name: Update version if needed
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
run: |
set -euo pipefail
# Handle multi-line format where COMMIT and its value are on separate lines
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildZstd.cmake
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |
cmake/targets/BuildZstd.cmake
commit-message: "deps: update zstd to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update zstd to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-zstd-${{ github.run_number }}
body: |
## What does this PR do?
Updates zstd to version ${{ steps.check-version.outputs.tag }}
Compare: https://github.com/facebook/zstd/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-zstd.yml)

327
.vscode/launch.json generated vendored
View File

@@ -5,6 +5,9 @@
// - FORCE_COLOR=1 forces colors in the terminal
// - "${workspaceFolder}/test" is the cwd for `bun test` so it matches CI, we should fix this later
// - "cppvsdbg" is used instead of "lldb" on Windows, because "lldb" is too slow
// - Seeing WebKit files requires `vendor/WebKit` to exist and have code from the right commit.
// Run `bun sync-webkit-source` to ensure that folder is at the right commit. If you haven't
// cloned it at all, that script will suggest how.
"version": "0.2.0",
"configurations": [
// bun test [file]
@@ -13,7 +16,7 @@
"request": "launch",
"name": "bun test [file]",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${file}"],
"args": ["test", "--timeout=3600000", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -21,14 +24,21 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] --only",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "--only", "${file}"],
"args": ["test", "--timeout=3600000", "--only", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -36,20 +46,35 @@
"BUN_DEBUG_jest": "1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
"name": "Attach",
"request": "attach",
"pid": "${command:pickMyProcess}",
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] (fast)",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${file}"],
"args": ["test", "--timeout=3600000", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -57,14 +82,21 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] (verbose)",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${file}"],
"args": ["test", "--timeout=3600000", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "0",
@@ -72,14 +104,21 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] --watch",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "--watch", "${file}"],
"args": ["test", "--timeout=3600000", "--watch", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -87,14 +126,21 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] --hot",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "--hot", "${file}"],
"args": ["test", "--timeout=3600000", "--hot", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -102,14 +148,21 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] --inspect",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${file}"],
"args": ["test", "--timeout=3600000", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -118,7 +171,14 @@
"BUN_INSPECT": "ws://localhost:0/?wait=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -130,7 +190,7 @@
"request": "launch",
"name": "bun test [file] --inspect-brk",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${file}"],
"args": ["test", "--timeout=3600000", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -139,7 +199,14 @@
"BUN_INSPECT": "ws://localhost:0/?break=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -160,7 +227,14 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
@@ -178,7 +252,14 @@
"GOMAXPROCS": "1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
@@ -192,7 +273,14 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
@@ -202,14 +290,18 @@
"args": ["run", "--watch", "${file}"],
"cwd": "${fileDirname}",
"env": {
// "BUN_DEBUG_DEBUGGER": "1",
// "BUN_DEBUG_INTERNAL_DEBUGGER": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
// "BUN_INSPECT": "ws+unix:///var/folders/jk/8fzl9l5119598vsqrmphsw7m0000gn/T/tl15npi7qtf.sock?report=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
@@ -223,7 +315,14 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
@@ -239,7 +338,14 @@
"BUN_INSPECT": "ws://localhost:0/?wait=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -260,7 +366,14 @@
"BUN_INSPECT": "ws://localhost:0/?break=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -273,7 +386,7 @@
"request": "launch",
"name": "bun test [...]",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${input:testName}"],
"args": ["test", "--timeout=3600000", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -281,14 +394,21 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [...] (fast)",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${input:testName}"],
"args": ["test", "--timeout=3600000", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -296,14 +416,21 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [...] (verbose)",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${input:testName}"],
"args": ["test", "--timeout=3600000", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -311,14 +438,21 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [...] --watch",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "--watch", "${input:testName}"],
"args": ["test", "--timeout=3600000", "--watch", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -326,14 +460,21 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [...] --hot",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "--hot", "${input:testName}"],
"args": ["test", "--timeout=3600000", "--hot", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -341,14 +482,21 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [...] --inspect",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${input:testName}"],
"args": ["test", "--timeout=3600000", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -357,7 +505,14 @@
"BUN_INSPECT": "ws://localhost:0/?wait=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -369,7 +524,7 @@
"request": "launch",
"name": "bun test [...] --inspect-brk",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${input:testName}"],
"args": ["test", "--timeout=3600000", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -378,7 +533,14 @@
"BUN_INSPECT": "ws://localhost:0/?break=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -398,7 +560,14 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
// bun test [*]
{
@@ -413,7 +582,14 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
@@ -427,7 +603,14 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
@@ -442,7 +625,14 @@
"BUN_INSPECT": "ws://localhost:0/",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -461,7 +651,14 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
{
"type": "lldb",
@@ -475,7 +672,14 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"sourceMap": {
// macOS
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
// linux
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
},
},
// Windows: bun test [file]
{
@@ -486,7 +690,7 @@
"request": "launch",
"name": "Windows: bun test [file]",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${file}"],
"args": ["test", "--timeout=3600000", "${file}"],
"cwd": "${workspaceFolder}",
"environment": [
{
@@ -511,7 +715,7 @@
"request": "launch",
"name": "Windows: bun test --only [file]",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "--only", "${file}"],
"args": ["test", "--timeout=3600000", "--only", "${file}"],
"cwd": "${workspaceFolder}",
"environment": [
{
@@ -536,7 +740,7 @@
"request": "launch",
"name": "Windows: bun test [file] (fast)",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${file}"],
"args": ["test", "--timeout=3600000", "${file}"],
"cwd": "${workspaceFolder}",
"environment": [
{
@@ -561,7 +765,7 @@
"request": "launch",
"name": "Windows: bun test [file] (verbose)",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${file}"],
"args": ["test", "--timeout=3600000", "${file}"],
"cwd": "${workspaceFolder}",
"environment": [
{
@@ -586,7 +790,7 @@
"request": "launch",
"name": "Windows: bun test [file] --inspect",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${file}"],
"args": ["test", "--timeout=3600000", "${file}"],
"cwd": "${workspaceFolder}",
"environment": [
{
@@ -620,7 +824,7 @@
"request": "launch",
"name": "Windows: bun test [file] --inspect-brk",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${file}"],
"args": ["test", "--timeout=3600000", "${file}"],
"cwd": "${workspaceFolder}",
"environment": [
{
@@ -787,7 +991,7 @@
"request": "launch",
"name": "Windows: bun test [...]",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${input:testName}"],
"args": ["test", "--timeout=3600000", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
@@ -812,7 +1016,7 @@
"request": "launch",
"name": "Windows: bun test [...] (fast)",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${input:testName}"],
"args": ["test", "--timeout=3600000", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
@@ -837,7 +1041,7 @@
"request": "launch",
"name": "Windows: bun test [...] (verbose)",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${input:testName}"],
"args": ["test", "--timeout=3600000", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
@@ -862,7 +1066,7 @@
"request": "launch",
"name": "Windows: bun test [...] --watch",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "--watch", "${input:testName}"],
"args": ["test", "--timeout=3600000", "--watch", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
@@ -887,7 +1091,7 @@
"request": "launch",
"name": "Windows: bun test [...] --hot",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "--hot", "${input:testName}"],
"args": ["test", "--timeout=3600000", "--hot", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
@@ -912,7 +1116,7 @@
"request": "launch",
"name": "Windows: bun test [...] --inspect",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${input:testName}"],
"args": ["test", "--timeout=3600000", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
@@ -946,7 +1150,7 @@
"request": "launch",
"name": "Windows: bun test [...] --inspect-brk",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${input:testName}"],
"args": ["test", "--timeout=3600000", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
@@ -1133,6 +1337,11 @@
"handle SIGPWR nostop noprint pass",
"source ${workspaceFolder}/misctools/gdb/std_gdb_pretty_printers.py",
"source ${workspaceFolder}/misctools/gdb/zig_gdb_pretty_printers.py",
"set substitute-path /webkitbuild/vendor/WebKit ${workspaceFolder}/vendor/WebKit",
"set substitute-path /webkitbuild/.WTF/Headers ${workspaceFolder}/vendor/WebKit/Source/WTF",
// uncomment if you like
// "set disassembly-flavor intel",
"set print asm-demangle",
],
},
],

View File

@@ -1,36 +1,36 @@
## bun tests
**IMPORTANT**: use the `bun agent` command instead of the `bun` command. For example:
**IMPORTANT**: use the `bun bd` command instead of the `bun` command. For example:
✅ Good
```sh
bun agent test internal/ban-words.test.ts
bun agent ./foo.ts
bun bd test internal/ban-words.test.ts
bun bd ./foo.ts
```
The `bun agent` command runs the DEBUG build. If you forget to run the debug build, your changes will not be reflected..
The `bun bd` command runs the DEBUG build. If you forget to run the debug build, your changes will not be reflected..
### Run a file
To run a file, you can use the `bun agent <file-path>` command.
To run a file, you can use the `bun bd <file-path>` command.
```sh
bun agent ./foo.ts
bun bd ./foo.ts
```
### Run tests
To run a single test, you need to use the `bun agent test <test-name>` command.
To run a single test, you need to use the `bun bd test <test-name>` command.
```sh
bun agent test internal/ban-words.test.ts
bun bd test internal/ban-words.test.ts
```
You must ALWAYS make sure to pass a file path to the `bun agent test <file-path>` command. DO NOT try to run ALL the tests at once unless you're in a specific subdirectory.
You must ALWAYS make sure to pass a file path to the `bun bd test <file-path>` command. DO NOT try to run ALL the tests at once unless you're in a specific subdirectory.
### Run a Node.js test
```sh
bun agent --silent node:test test-fs-link
bun bd --silent node:test test-fs-link
```

2
LATEST
View File

@@ -1 +1 @@
1.2.13
1.2.15

View File

@@ -482,7 +482,7 @@ STATIC_MUSL_FLAG ?=
WRAP_SYMBOLS_ON_LINUX =
ifeq ($(OS_NAME), linux)
WRAP_SYMBOLS_ON_LINUX = -Wl,--wrap=fcntl -Wl,--wrap=fcntl64 -Wl,--wrap=stat64 -Wl,--wrap=pow -Wl,--wrap=exp -Wl,--wrap=log -Wl,--wrap=log2 \
WRAP_SYMBOLS_ON_LINUX = -Wl,--wrap=fcntl -Wl,--wrap=fcntl64 -Wl,--wrap=stat64 -Wl,--wrap=pow -Wl,--wrap=exp -Wl,--wrap=exp2 -Wl,--wrap=log -Wl,--wrap=log2 \
-Wl,--wrap=lstat \
-Wl,--wrap=stat \
-Wl,--wrap=fstat \

View File

@@ -0,0 +1,58 @@
# Fix for Node.js HTTP Automatic Headers Issue (BUN-13559)
## Problem
The Node.js test `test-http-automatic-headers.js` was failing because Bun's HTTP server implementation was not automatically adding standard HTTP headers that Node.js adds by default:
- `connection: keep-alive` for HTTP/1.1 connections
- `content-length: 0` when no body is sent
- `date` header with current timestamp
The test was specifically failing on this assertion:
```javascript
assert.strictEqual(res.headers.connection, 'keep-alive');
```
## Root Cause
The issue was in the `NodeHTTPServer__writeHead` function in `src/bun.js/bindings/NodeHTTP.cpp`. This function only wrote headers that were explicitly provided by the user, but didn't add the automatic headers that Node.js adds by default.
## Solution
### Changes Made
1. **Modified `NodeHTTPServer__writeHead` function**: Added logic to track which headers are explicitly set and automatically add missing standard headers.
2. **Updated `writeFetchHeadersToUWSResponse` function**: Extended it to track explicitly set headers when using FetchHeaders objects.
3. **Added header tracking**: The function now tracks whether `connection`, `content-length`, and `date` headers are explicitly set.
4. **Added automatic header logic**: After processing all explicit headers, the function adds:
- `Connection: keep-alive` if not explicitly set
- `Content-Length: 0` if not explicitly set (for responses with no body)
- `Date: <current_timestamp>` if not explicitly set
### Files Modified
- `src/bun.js/bindings/NodeHTTP.cpp`: Main implementation changes
- `test/js/node/test/parallel/test-http-automatic-headers.js`: Copied Node.js test
### Technical Details
The fix ensures Node.js compatibility by:
1. **Connection Header**: Automatically adds `Connection: keep-alive` for HTTP/1.1 unless explicitly overridden
2. **Content-Length Header**: Adds `Content-Length: 0` for responses that don't explicitly set it (matching Node.js behavior for empty responses)
3. **Date Header**: Adds current GMT timestamp in RFC format
4. **Backward Compatibility**: Only adds headers when they're not explicitly set, preserving user-defined values
The implementation handles both regular JavaScript objects and FetchHeaders objects used for header management.
## Testing
The test `test/js/node/test/parallel/test-http-automatic-headers.js` now passes, verifying that:
- Custom headers (x-date, x-connection, x-content-length) are preserved
- Automatic headers (connection, content-length, date) are added when not explicitly set
- The behavior matches Node.js exactly
This fix improves Node.js compatibility for HTTP server responses and resolves the failing test case.

View File

@@ -88,7 +88,7 @@ endif()
if(UNIX)
register_compiler_flags(
DESCRIPTION "Enable debug symbols"
-g3 ${DEBUG}
-g3 -gz=zstd ${DEBUG}
-g1 ${RELEASE}
)

View File

@@ -48,6 +48,7 @@
"src/bun.js/bindings/v8/shim/*.cpp",
"src/bake/*.cpp",
"src/deps/*.cpp",
"src/vm/*.cpp",
"packages/bun-usockets/src/crypto/*.cpp"
]
},

View File

@@ -20,4 +20,4 @@ src/bake/hmr-runtime-client.ts
src/bake/hmr-runtime-error.ts
src/bake/hmr-runtime-server.ts
src/bake/server/stack-trace-stub.ts
src/bake/shared.ts
src/bake/shared.ts

View File

@@ -4,4 +4,4 @@ src/bun.js/api/BunObject.bind.ts
src/bun.js/bindgen_test.bind.ts
src/bun.js/bindings/NodeModuleModule.bind.ts
src/bun.js/node/node_os.bind.ts
src/fmt.bind.ts
src/fmt.bind.ts

View File

@@ -9,4 +9,4 @@ packages/bun-error/package.json
packages/bun-error/runtime-error.ts
packages/bun-error/sourcemap.ts
packages/bun-error/stack-trace-parser.ts
packages/bun-error/tsconfig.json
packages/bun-error/tsconfig.json

View File

@@ -9,4 +9,3 @@ packages/bun-usockets/src/socket.c
packages/bun-usockets/src/udp.c
src/bun.js/bindings/uv-posix-polyfills.c
src/bun.js/bindings/uv-posix-stubs.c
src/asan-config.c

View File

@@ -80,6 +80,9 @@ src/bun.js/bindings/JSEnvironmentVariableMap.cpp
src/bun.js/bindings/JSFFIFunction.cpp
src/bun.js/bindings/JSMockFunction.cpp
src/bun.js/bindings/JSNextTickQueue.cpp
src/bun.js/bindings/JSNodePerformanceHooksHistogram.cpp
src/bun.js/bindings/JSNodePerformanceHooksHistogramConstructor.cpp
src/bun.js/bindings/JSNodePerformanceHooksHistogramPrototype.cpp
src/bun.js/bindings/JSPropertyIterator.cpp
src/bun.js/bindings/JSS3File.cpp
src/bun.js/bindings/JSSocketAddressDTO.cpp
@@ -153,6 +156,9 @@ src/bun.js/bindings/NodeTLS.cpp
src/bun.js/bindings/NodeURL.cpp
src/bun.js/bindings/NodeValidator.cpp
src/bun.js/bindings/NodeVM.cpp
src/bun.js/bindings/NodeVMModule.cpp
src/bun.js/bindings/NodeVMScript.cpp
src/bun.js/bindings/NodeVMSourceTextModule.cpp
src/bun.js/bindings/NoOpForTesting.cpp
src/bun.js/bindings/ObjectBindings.cpp
src/bun.js/bindings/objects.cpp
@@ -409,6 +415,7 @@ src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA224.cpp
src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA256.cpp
src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA384.cpp
src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA512.cpp
src/bun.js/bindings/webcrypto/CryptoAlgorithmX25519.cpp
src/bun.js/bindings/webcrypto/CryptoDigest.cpp
src/bun.js/bindings/webcrypto/CryptoKey.cpp
src/bun.js/bindings/webcrypto/CryptoKeyAES.cpp
@@ -449,6 +456,7 @@ src/bun.js/bindings/webcrypto/JSRsaOaepParams.cpp
src/bun.js/bindings/webcrypto/JSRsaOtherPrimesInfo.cpp
src/bun.js/bindings/webcrypto/JSRsaPssParams.cpp
src/bun.js/bindings/webcrypto/JSSubtleCrypto.cpp
src/bun.js/bindings/webcrypto/JSX25519Params.cpp
src/bun.js/bindings/webcrypto/OpenSSLUtilities.cpp
src/bun.js/bindings/webcrypto/PhonyWorkQueue.cpp
src/bun.js/bindings/webcrypto/SerializedCryptoKeyWrapOpenSSL.cpp
@@ -463,4 +471,6 @@ src/bun.js/modules/NodeTTYModule.cpp
src/bun.js/modules/NodeUtilTypesModule.cpp
src/bun.js/modules/ObjectModule.cpp
src/deps/libuwsockets.cpp
src/io/io_darwin.cpp
src/io/io_darwin.cpp
src/vm/Semaphore.cpp
src/vm/SigintWatcher.cpp

View File

@@ -15,4 +15,4 @@ src/codegen/generate-jssink.ts
src/codegen/generate-node-errors.ts
src/codegen/helpers.ts
src/codegen/internal-module-registry-scanner.ts
src/codegen/replacements.ts
src/codegen/replacements.ts

View File

@@ -160,4 +160,4 @@ src/js/thirdparty/node-fetch.ts
src/js/thirdparty/undici.js
src/js/thirdparty/vercel_fetch.js
src/js/thirdparty/ws.js
src/js/wasi-runner.js
src/js/wasi-runner.js

View File

@@ -17,7 +17,8 @@ src/node-fallbacks/stream.js
src/node-fallbacks/string_decoder.js
src/node-fallbacks/sys.js
src/node-fallbacks/timers.js
src/node-fallbacks/timers.promises.js
src/node-fallbacks/tty.js
src/node-fallbacks/url.js
src/node-fallbacks/util.js
src/node-fallbacks/zlib.js
src/node-fallbacks/zlib.js

View File

@@ -20,4 +20,4 @@ src/bun.js/node/node.classes.ts
src/bun.js/resolve_message.classes.ts
src/bun.js/test/jest.classes.ts
src/bun.js/webcore/encoding.classes.ts
src/bun.js/webcore/response.classes.ts
src/bun.js/webcore/response.classes.ts

View File

@@ -86,8 +86,6 @@ src/bun.js/bindings/Exception.zig
src/bun.js/bindings/FetchHeaders.zig
src/bun.js/bindings/FFI.zig
src/bun.js/bindings/generated_classes_list.zig
src/bun.js/bindings/GeneratedBindings.zig
src/bun.js/bindings/GeneratedJS2Native.zig
src/bun.js/bindings/GetterSetter.zig
src/bun.js/bindings/HTTPServerAgent.zig
src/bun.js/bindings/JSArray.zig
@@ -222,6 +220,7 @@ src/bun.js/webcore/Response.zig
src/bun.js/webcore/S3Client.zig
src/bun.js/webcore/S3File.zig
src/bun.js/webcore/S3Stat.zig
src/bun.js/webcore/ScriptExecutionContext.zig
src/bun.js/webcore/Sink.zig
src/bun.js/webcore/streams.zig
src/bun.js/webcore/TextDecoder.zig
@@ -237,6 +236,7 @@ src/ci_info.zig
src/cli.zig
src/cli/add_command.zig
src/cli/add_completions.zig
src/cli/audit_command.zig
src/cli/build_command.zig
src/cli/bunx_command.zig
src/cli/colon_list_type.zig
@@ -256,6 +256,7 @@ src/cli/package_manager_command.zig
src/cli/patch_command.zig
src/cli/patch_commit_command.zig
src/cli/pm_trusted_command.zig
src/cli/pm_view_command.zig
src/cli/publish_command.zig
src/cli/remove_command.zig
src/cli/run_command.zig
@@ -432,15 +433,30 @@ src/identity_context.zig
src/import_record.zig
src/ini.zig
src/install/bin.zig
src/install/bun.lock.zig
src/install/dependency.zig
src/install/extract_tarball.zig
src/install/install.zig
src/install/integrity.zig
src/install/lifecycle_script_runner.zig
src/install/lockfile.zig
src/install/lockfile/Buffers.zig
src/install/lockfile/bun.lock.zig
src/install/lockfile/bun.lockb.zig
src/install/lockfile/CatalogMap.zig
src/install/lockfile/lockfile_json_stringify_for_debugging.zig
src/install/lockfile/OverrideMap.zig
src/install/lockfile/Package.zig
src/install/lockfile/Package/Meta.zig
src/install/lockfile/Package/Scripts.zig
src/install/lockfile/Package/WorkspaceMap.zig
src/install/lockfile/printer/tree_printer.zig
src/install/lockfile/printer/Yarn.zig
src/install/lockfile/Tree.zig
src/install/migration.zig
src/install/npm.zig
src/install/PackageManager/CommandLineArguments.zig
src/install/PackageManager/PackageJSONEditor.zig
src/install/PackageManager/PackageManagerOptions.zig
src/install/padding_checker.zig
src/install/patch_install.zig
src/install/repository.zig
@@ -607,4 +623,4 @@ src/windows.zig
src/work_pool.zig
src/workaround_missing_symbols.zig
src/wyhash.zig
src/zlib.zig
src/zlib.zig

View File

@@ -46,7 +46,7 @@ endif()
set(BUN_ERROR_SOURCE ${CWD}/packages/bun-error)
absolute_sources(BUN_ERROR_SOURCES ${CWD}/cmake/BunErrorSources.txt)
absolute_sources(BUN_ERROR_SOURCES ${CWD}/cmake/sources/BunErrorSources.txt)
set(BUN_ERROR_OUTPUT ${CODEGEN_PATH}/bun-error)
set(BUN_ERROR_OUTPUTS
@@ -135,7 +135,7 @@ register_command(
set(BUN_NODE_FALLBACKS_SOURCE ${CWD}/src/node-fallbacks)
absolute_sources(BUN_NODE_FALLBACKS_SOURCES ${CWD}/cmake/NodeFallbacksSources.txt)
absolute_sources(BUN_NODE_FALLBACKS_SOURCES ${CWD}/cmake/sources/NodeFallbacksSources.txt)
set(BUN_NODE_FALLBACKS_OUTPUT ${CODEGEN_PATH}/node-fallbacks)
set(BUN_NODE_FALLBACKS_OUTPUTS)
@@ -161,14 +161,9 @@ register_command(
CWD
${BUN_NODE_FALLBACKS_SOURCE}
COMMAND
${BUN_EXECUTABLE} x
esbuild ${ESBUILD_ARGS}
${BUN_EXECUTABLE} run build-fallbacks
${BUN_NODE_FALLBACKS_OUTPUT}
${BUN_NODE_FALLBACKS_SOURCES}
--outdir=${BUN_NODE_FALLBACKS_OUTPUT}
--format=esm
--minify
--bundle
--platform=browser
SOURCES
${BUN_NODE_FALLBACKS_SOURCES}
${BUN_NODE_FALLBACKS_NODE_MODULES}
@@ -235,7 +230,7 @@ register_command(
set(BUN_ZIG_GENERATED_CLASSES_SCRIPT ${CWD}/src/codegen/generate-classes.ts)
absolute_sources(BUN_ZIG_GENERATED_CLASSES_SOURCES ${CWD}/cmake/ZigGeneratedClassesSources.txt)
absolute_sources(BUN_ZIG_GENERATED_CLASSES_SOURCES ${CWD}/cmake/sources/ZigGeneratedClassesSources.txt)
set(BUN_ZIG_GENERATED_CLASSES_OUTPUTS
${CODEGEN_PATH}/ZigGeneratedClasses.h
@@ -268,8 +263,8 @@ register_command(
set(BUN_JAVASCRIPT_CODEGEN_SCRIPT ${CWD}/src/codegen/bundle-modules.ts)
absolute_sources(BUN_JAVASCRIPT_SOURCES ${CWD}/cmake/JavaScriptSources.txt)
absolute_sources(BUN_JAVASCRIPT_CODEGEN_SOURCES ${CWD}/cmake/JavaScriptCodegenSources.txt)
absolute_sources(BUN_JAVASCRIPT_SOURCES ${CWD}/cmake/sources/JavaScriptSources.txt)
absolute_sources(BUN_JAVASCRIPT_CODEGEN_SOURCES ${CWD}/cmake/sources/JavaScriptCodegenSources.txt)
list(APPEND BUN_JAVASCRIPT_CODEGEN_SOURCES
${CWD}/src/bun.js/bindings/InternalModuleRegistry.cpp
@@ -311,7 +306,7 @@ register_command(
set(BUN_BAKE_RUNTIME_CODEGEN_SCRIPT ${CWD}/src/codegen/bake-codegen.ts)
absolute_sources(BUN_BAKE_RUNTIME_SOURCES ${CWD}/cmake/BakeRuntimeSources.txt)
absolute_sources(BUN_BAKE_RUNTIME_SOURCES ${CWD}/cmake/sources/BakeRuntimeSources.txt)
list(APPEND BUN_BAKE_RUNTIME_CODEGEN_SOURCES
${CWD}/src/bun.js/bindings/InternalModuleRegistry.cpp
@@ -344,7 +339,7 @@ register_command(
set(BUN_BINDGEN_SCRIPT ${CWD}/src/codegen/bindgen.ts)
absolute_sources(BUN_BINDGEN_SOURCES ${CWD}/cmake/BindgenSources.txt)
absolute_sources(BUN_BINDGEN_SOURCES ${CWD}/cmake/sources/BindgenSources.txt)
set(BUN_BINDGEN_CPP_OUTPUTS
${CODEGEN_PATH}/GeneratedBindings.cpp
@@ -501,7 +496,7 @@ WEBKIT_ADD_SOURCE_DEPENDENCIES(
# --- Zig ---
absolute_sources(BUN_ZIG_SOURCES ${CWD}/cmake/ZigSources.txt)
absolute_sources(BUN_ZIG_SOURCES ${CWD}/cmake/sources/ZigSources.txt)
list(APPEND BUN_ZIG_SOURCES
${CWD}/build.zig
@@ -598,8 +593,8 @@ set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS "build.zig")
set(BUN_USOCKETS_SOURCE ${CWD}/packages/bun-usockets)
# hand written cpp source files. Full list of "source" code (including codegen) is in BUN_CPP_SOURCES
absolute_sources(BUN_CXX_SOURCES ${CWD}/cmake/CxxSources.txt)
absolute_sources(BUN_C_SOURCES ${CWD}/cmake/CSources.txt)
absolute_sources(BUN_CXX_SOURCES ${CWD}/cmake/sources/CxxSources.txt)
absolute_sources(BUN_C_SOURCES ${CWD}/cmake/sources/CSources.txt)
if(WIN32)
list(APPEND BUN_CXX_SOURCES ${CWD}/src/bun.js/bindings/windows/rescle.cpp)
@@ -749,7 +744,7 @@ target_include_directories(${bun} PRIVATE
${NODEJS_HEADERS_PATH}/include
)
if(NOT WIN32)
if(NOT WIN32)
target_include_directories(${bun} PRIVATE ${CWD}/src/bun.js/bindings/libuv)
endif()
@@ -882,7 +877,7 @@ if(NOT WIN32)
-Wno-nullability-completeness
-Werror
)
if(ENABLE_ASAN)
target_compile_options(${bun} PUBLIC
-fsanitize=address
@@ -940,6 +935,7 @@ if(LINUX)
if(NOT ABI STREQUAL "musl")
target_link_options(${bun} PUBLIC
-Wl,--wrap=exp
-Wl,--wrap=exp2
-Wl,--wrap=expf
-Wl,--wrap=fcntl64
-Wl,--wrap=log
@@ -1062,6 +1058,7 @@ set(BUN_DEPENDENCIES
TinyCC
Zlib
LibArchive # must be loaded after zlib
HdrHistogram # must be loaded after zlib
Zstd
)

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
c-ares/c-ares
COMMIT
4f4912bce7374f787b10576851b687935f018e17
d3a507e920e7af18a5efb7f9f1d8044ed4750013
)
register_cmake_command(

View File

@@ -0,0 +1,24 @@
register_repository(
NAME
hdrhistogram
REPOSITORY
HdrHistogram/HdrHistogram_c
COMMIT
652d51bcc36744fd1a6debfeb1a8a5f58b14022c
)
register_cmake_command(
TARGET
hdrhistogram
LIBRARIES
hdr_histogram_static
INCLUDES
include
LIB_PATH
src
ARGS
-DHDR_HISTOGRAM_BUILD_SHARED=OFF
-DHDR_HISTOGRAM_BUILD_STATIC=ON
-DHDR_LOG_REQUIRED=DISABLED
-DHDR_HISTOGRAM_BUILD_PROGRAMS=OFF
)

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
facebook/zstd
COMMIT
794ea1b0afca0f020f4e57b6732332231fb23c70
f8745da6ff1ad1e7bab384bd1f9d742439278e99
)
register_cmake_command(

View File

@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
if(NOT WEBKIT_VERSION)
set(WEBKIT_VERSION eda8b0fb4fb1aa23db9c2b00933df8b58bcdd289)
set(WEBKIT_VERSION b98e20b11e6ab044f73218bdd05ab064587b9ead)
endif()
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)
@@ -41,14 +41,6 @@ if(WEBKIT_LOCAL)
return()
endif()
if(EXISTS ${WEBKIT_PATH}/package.json)
file(READ ${WEBKIT_PATH}/package.json WEBKIT_PACKAGE_JSON)
if(WEBKIT_PACKAGE_JSON MATCHES ${WEBKIT_VERSION})
return()
endif()
endif()
if(WIN32)
set(WEBKIT_OS "windows")
elseif(APPLE)
@@ -86,10 +78,18 @@ if(ENABLE_ASAN)
set(WEBKIT_SUFFIX "${WEBKIT_SUFFIX}-asan")
endif()
set(WEBKIT_NAME bun-webkit-${WEBKIT_OS}-${WEBKIT_ARCH}${WEBKIT_SUFFIX})
setx(WEBKIT_NAME bun-webkit-${WEBKIT_OS}-${WEBKIT_ARCH}${WEBKIT_SUFFIX})
set(WEBKIT_FILENAME ${WEBKIT_NAME}.tar.gz)
setx(WEBKIT_DOWNLOAD_URL https://github.com/oven-sh/WebKit/releases/download/autobuild-${WEBKIT_VERSION}/${WEBKIT_FILENAME})
if(EXISTS ${WEBKIT_PATH}/package.json)
file(READ ${WEBKIT_PATH}/package.json WEBKIT_PACKAGE_JSON)
if(WEBKIT_PACKAGE_JSON MATCHES ${WEBKIT_VERSION})
return()
endif()
endif()
file(DOWNLOAD ${WEBKIT_DOWNLOAD_URL} ${CACHE_PATH}/${WEBKIT_FILENAME} SHOW_PROGRESS)
file(ARCHIVE_EXTRACT INPUT ${CACHE_PATH}/${WEBKIT_FILENAME} DESTINATION ${CACHE_PATH} TOUCH)
file(REMOVE ${CACHE_PATH}/${WEBKIT_FILENAME})

View File

@@ -260,6 +260,7 @@ _bun_pm_completion() {
'hash\:"generate & print the hash of the current lockfile" '
'hash-string\:"print the string used to hash the lockfile" '
'hash-print\:"print the hash stored in the current lockfile" '
'audit\:"run a security audit of dependencies in Bun'\''s lockfile"'
'cache\:"print the path to the cache folder" '
)

View File

@@ -1 +0,0 @@
See the [`bun test`](https://bun.sh/docs/cli/test) documentation.

View File

@@ -206,6 +206,38 @@ Each call to `console.log` or `console.error` will be broadcast to the terminal
Internally, this reuses the existing WebSocket connection from hot module reloading to send the logs.
### Edit files in the browser
Bun's frontend dev server has support for [Automatic Workspace Folders](https://chromium.googlesource.com/devtools/devtools-frontend/+/main/docs/ecosystem/automatic_workspace_folders.md) in Chrome DevTools, which lets you save edits to files in the browser.
{% image src="/images/bun-chromedevtools.gif" alt="Bun's frontend dev server has support for Automatic Workspace Folders in Chrome DevTools, which lets you save edits to files in the browser." /%}
{% details summary="How it works" %}
Bun's dev server automatically adds a `/.well-known/appspecific/com.chrome.devtools.json` route to the server.
This route returns a JSON object with the following shape:
```json
{
"workspace": {
"root": "/path/to/your/project",
"uuid": "a-unique-identifier-for-this-workspace"
}
}
```
For security reasons, this is only enabled when:
1. The request is coming from localhost, 127.0.0.1, or ::1.
2. Hot Module Reloading is enabled.
3. The `chromeDevToolsAutomaticWorkspaceFolders` flag is set to `true` or `undefined`.
4. There are no other routes that match the request.
You can disable this by passing `development: { chromeDevToolsAutomaticWorkspaceFolders: false }` in `Bun.serve`'s options.
{% /details %}
## Keyboard Shortcuts
While the server is running:

View File

@@ -1,6 +1,6 @@
Use `bun publish` to publish a package to the npm registry.
`bun publish` will automatically pack your package into a tarball, strip workspace protocols from the `package.json` (resolving versions if necessary), and publish to the registry specified in your configuration files. Both `bunfig.toml` and `.npmrc` files are supported.
`bun publish` will automatically pack your package into a tarball, strip catalog and workspace protocols from the `package.json` (resolving versions if necessary), and publish to the registry specified in your configuration files. Both `bunfig.toml` and `.npmrc` files are supported.
```sh
## Publishing the package from the current working directory

View File

@@ -1,49 +1,50 @@
---
name: Use React and JSX
name: Build a React app with Bun
---
React just works with Bun. Bun supports `.jsx` and `.tsx` files out of the box.
Bun supports `.jsx` and `.tsx` files out of the box. React just works with Bun.
Remember that JSX is just a special syntax for including HTML-like syntax in JavaScript files. React uses JSX syntax, as do alternatives like [Preact](https://preactjs.com/) and [Solid](https://www.solidjs.com/). Bun's internal transpiler converts JSX syntax into vanilla JavaScript before execution.
---
Bun _assumes_ you're using React (unless you [configure it otherwise](https://bun.sh/docs/runtime/bunfig#jsx)) so a line like this:
```
const element = <h1>Hello, world!</h1>;
```
---
is internally converted into something like this:
```ts
// jsxDEV
import { jsx } from "react/jsx-dev-runtime";
const element = jsx("h1", { children: "Hello, world!" });
```
---
This code requires `react` to run, so make sure you've installed React.
Create a new React app with `bun init --react`. This gives you a template with a simple React app and a simple API server together in one full-stack app.
```bash
$ bun install react
# Create a new React app
$ bun init --react
# Run the app in development mode
$ bun dev
# Build as a static site for production
$ bun run build
# Run the server in production
$ bun start
```
---
Bun implements special logging for JSX components to make debugging easier.
### Hot Reloading
Run `bun dev` to start the app in development mode. This will start the API server and the React app with hot reloading.
### Full-Stack App
Run `bun start` to start the API server and frontend together in one process.
### Static Site
Run `bun run build` to build the app as a static site. This will create a `dist` directory with the built app and all the assets.
```bash
$ bun run log-my-component.tsx
<Component message="Hello world!" />
```
---
As far as "official support" for React goes, that's it. React is a library like any other, and Bun can run that library. Bun is not a framework, so you should use a framework like [Vite](https://vitejs.dev/) to build an app with server-side rendering and hot reloading in the browser.
Refer to [Runtime > JSX](https://bun.sh/docs/runtime/jsx) for complete documentation on configuring JSX.
├── src/
│ ├── index.tsx # Server entry point with API routes
│ ├── frontend.tsx # React app entry point with HMR
│ ├── App.tsx # Main React component
│ ├── APITester.tsx # Component for testing API endpoints
│ ├── index.html # HTML template
│ ├── index.css # Styles
│ └── *.svg # Static assets
├── package.json # Dependencies and scripts
├── tsconfig.json # TypeScript configuration
├── bunfig.toml # Bun configuration
└── bun.lock # Lock file
```

37
docs/install/audit.md Normal file
View File

@@ -0,0 +1,37 @@
`bun audit` checks your installed packages for known security vulnerabilities.
Run the command in a project with a `bun.lock` file:
```bash
$ bun audit
```
Bun sends the list of installed packages and versions to NPM, and prints a report of any vulnerabilities that were found. Packages installed from registries other than the default registry are skipped.
If no vulnerabilities are found, the command prints:
```
No vulnerabilities found
```
When vulnerabilities are detected, each affected package is listed along with the severity, a short description and a link to the advisory. At the end of the report Bun prints a summary and hints for updating:
```
3 vulnerabilities (1 high, 2 moderate)
To update all dependencies to the latest compatible versions:
bun update
To update all dependencies to the latest versions (including breaking changes):
bun update --latest
```
### `--json`
Use the `--json` flag to print the raw JSON response from the registry instead of the formatted report:
```bash
$ bun audit --json
```
### Exit code
`bun audit` will exit with code `0` if no vulnerabilities are found and `1` if the report lists any vulnerabilities. This will still happen even if `--json` is passed.

296
docs/install/catalogs.md Normal file
View File

@@ -0,0 +1,296 @@
Catalogs in Bun provide a straightforward way to share common dependency versions across multiple packages in a monorepo. Rather than specifying the same versions repeatedly in each workspace package, you define them once in the root package.json and reference them consistently throughout your project.
## Overview
Unlike traditional dependency management where each workspace package needs to independently specify versions, catalogs let you:
1. Define version catalogs in the root package.json
2. Reference these versions with a simple `catalog:` protocol
3. Update all packages simultaneously by changing the version in just one place
This is especially useful in large monorepos where dozens of packages need to use the same version of key dependencies.
## How to Use Catalogs
### Directory Structure Example
Consider a monorepo with the following structure:
```
my-monorepo/
├── package.json
├── bun.lock
└── packages/
├── app/
│ └── package.json
├── ui/
│ └── package.json
└── utils/
└── package.json
```
### 1. Define Catalogs in Root package.json
In your root-level `package.json`, add a `catalog` or `catalogs` field within the `workspaces` object:
```json
{
"name": "my-monorepo",
"workspaces": {
"packages": ["packages/*"],
"catalog": {
"react": "^19.0.0",
"react-dom": "^19.0.0"
},
"catalogs": {
"testing": {
"jest": "30.0.0",
"testing-library": "14.0.0"
}
}
}
}
```
### 2. Reference Catalog Versions in Workspace Packages
In your workspace packages, use the `catalog:` protocol to reference versions:
**packages/app/package.json**
```json
{
"name": "app",
"dependencies": {
"react": "catalog:",
"react-dom": "catalog:",
"jest": "catalog:testing"
}
}
```
**packages/ui/package.json**
```json
{
"name": "ui",
"dependencies": {
"react": "catalog:",
"react-dom": "catalog:"
},
"devDependencies": {
"jest": "catalog:testing",
"testing-library": "catalog:testing"
}
}
```
### 3. Run Bun Install
Run `bun install` to install all dependencies according to the catalog versions.
## Catalog vs Catalogs
Bun supports two ways to define catalogs:
1. **`catalog`** (singular): A single default catalog for commonly used dependencies
```json
"catalog": {
"react": "^19.0.0",
"react-dom": "^19.0.0"
}
```
Reference with simply `catalog:`:
```json
"dependencies": {
"react": "catalog:"
}
```
2. **`catalogs`** (plural): Multiple named catalogs for grouping dependencies
```json
"catalogs": {
"testing": {
"jest": "30.0.0"
},
"ui": {
"tailwind": "4.0.0"
}
}
```
Reference with `catalog:<name>`:
```json
"dependencies": {
"jest": "catalog:testing",
"tailwind": "catalog:ui"
}
```
## Benefits of Using Catalogs
- **Consistency**: Ensures all packages use the same version of critical dependencies
- **Maintenance**: Update a dependency version in one place instead of across multiple package.json files
- **Clarity**: Makes it obvious which dependencies are standardized across your monorepo
- **Simplicity**: No need for complex version resolution strategies or external tools
## Real-World Example
Here's a more comprehensive example for a React application:
**Root package.json**
```json
{
"name": "react-monorepo",
"workspaces": {
"packages": ["packages/*"],
"catalog": {
"react": "^19.0.0",
"react-dom": "^19.0.0",
"react-router-dom": "^6.15.0"
},
"catalogs": {
"build": {
"webpack": "5.88.2",
"babel": "7.22.10"
},
"testing": {
"jest": "29.6.2",
"react-testing-library": "14.0.0"
}
}
},
"devDependencies": {
"typescript": "5.1.6"
}
}
```
**packages/app/package.json**
```json
{
"name": "app",
"dependencies": {
"react": "catalog:",
"react-dom": "catalog:",
"react-router-dom": "catalog:",
"@monorepo/ui": "workspace:*",
"@monorepo/utils": "workspace:*"
},
"devDependencies": {
"webpack": "catalog:build",
"babel": "catalog:build",
"jest": "catalog:testing",
"react-testing-library": "catalog:testing"
}
}
```
**packages/ui/package.json**
```json
{
"name": "@monorepo/ui",
"dependencies": {
"react": "catalog:",
"react-dom": "catalog:"
},
"devDependencies": {
"jest": "catalog:testing",
"react-testing-library": "catalog:testing"
}
}
```
**packages/utils/package.json**
```json
{
"name": "@monorepo/utils",
"dependencies": {
"react": "catalog:"
},
"devDependencies": {
"jest": "catalog:testing"
}
}
```
## Updating Versions
To update versions across all packages, simply change the version in the root package.json:
```json
"catalog": {
"react": "^19.1.0", // Updated from ^19.0.0
"react-dom": "^19.1.0" // Updated from ^19.0.0
}
```
Then run `bun install` to update all packages.
## Lockfile Integration
Bun's lockfile tracks catalog versions, making it easy to ensure consistent installations across different environments. The lockfile includes:
- The catalog definitions from your package.json
- The resolution of each cataloged dependency
```
// bun.lock (excerpt)
{
"lockfileVersion": 1,
"workspaces": {
"": {
"name": "react-monorepo",
},
"packages/app": {
"name": "app",
"dependencies": {
"react": "catalog:",
"react-dom": "catalog:",
...
},
},
...
},
"catalog": {
"react": "^19.0.0",
"react-dom": "^19.0.0",
...
},
"catalogs": {
"build": {
"webpack": "5.88.2",
...
},
...
},
"packages": {
...
}
}
```
## Limitations and Edge Cases
- Catalog references must match a dependency defined in either `catalog` or one of the named `catalogs`
- Empty strings and whitespace in catalog names are ignored (treated as default catalog)
- Invalid dependency versions in catalogs will fail to resolve during `bun install`
- Catalogs are only available within workspaces; they cannot be used outside the monorepo
Bun's catalog system provides a powerful yet simple way to maintain consistency across your monorepo without introducing additional complexity to your workflow.
## Publishing
When you run `bun publish` or `bun pm pack`, Bun automatically replaces
`catalog:` references in your `package.json` with the resolved version numbers.
The published package includes regular semver strings and no longer depends on
your catalog definitions.

View File

@@ -83,6 +83,14 @@ Workspaces have a couple major benefits.
- **Dependencies can be de-duplicated.** If `a` and `b` share a common dependency, it will be _hoisted_ to the root `node_modules` directory. This reduces redundant disk usage and minimizes "dependency hell" issues associated with having multiple versions of a package installed simultaneously.
- **Run scripts in multiple packages.** You can use the [`--filter` flag](https://bun.sh/docs/cli/filter) to easily run `package.json` scripts in multiple packages in your workspace.
## Share versions with Catalogs
When many packages need the same dependency versions, catalogs let you define
those versions once in the root `package.json` and reference them from your
workspaces using the `catalog:` protocol. Updating the catalog automatically
updates every package that references it. See
[Catalogs](https://bun.sh/docs/install/catalogs) for details.
{% callout %}
⚡️ **Speed** — Installs are fast, even for big monorepos. Bun installs the [Remix](https://github.com/remix-run/remix) monorepo in about `500ms` on Linux.

View File

@@ -206,7 +206,7 @@ $ iex "& {$(irm https://bun.sh/install.ps1)} -Version $BUN_LATEST_VERSION"
## Downloading Bun binaries directly
To download Bun binaries directly, you can visit the [releases page](https://github.com/oven-sh/bun/releases) page on GitHub.
To download Bun binaries directly, you can visit the [releases page](https://github.com/oven-sh/bun/releases) on GitHub.
For convenience, here are download links for the latest version:

View File

@@ -183,6 +183,9 @@ export default {
page("install/workspaces", "Workspaces", {
description: "Bun's package manager supports workspaces and monorepo development workflows.",
}),
page("install/catalogs", "Catalogs", {
description: "Use catalogs to share dependency versions between packages in a monorepo.",
}),
page("install/lifecycle", "Lifecycle scripts", {
description: "How Bun handles package lifecycle scripts with trustedDependencies",
}),
@@ -204,6 +207,9 @@ export default {
description:
"Patch dependencies in your project to fix bugs or add features without vendoring the entire package.",
}),
page("install/audit", "Audit dependencies", {
description: "Check installed packages for vulnerabilities.",
}),
page("install/npmrc", ".npmrc support", {
description: "Bun supports loading some configuration options from .npmrc",
}),
@@ -389,7 +395,7 @@ export default {
page("api/cc", "C Compiler", {
description: `Build & run native C from JavaScript with Bun's native C compiler API`,
}), // "`bun:ffi`"),
page("api/test", "Testing", {
page("cli/test", "Testing", {
description: `Bun's built-in test runner is fast and uses Jest-compatible syntax.`,
}), // "`bun:test`"),
page("api/utils", "Utils", {

View File

@@ -19,31 +19,43 @@ Click the link in the right column to jump to the associated documentation.
---
- HTTP server
- HTTP Server
- [`Bun.serve`](https://bun.sh/docs/api/http#bun-serve)
---
- Shell
- [`$`](https://bun.sh/docs/runtime/shell)
---
- Bundler
- [`Bun.build`](https://bun.sh/docs/bundler)
---
- File I/O
- [`Bun.file`](https://bun.sh/docs/api/file-io#reading-files-bun-file)
[`Bun.write`](https://bun.sh/docs/api/file-io#writing-files-bun-write)
- [`Bun.file`](https://bun.sh/docs/api/file-io#reading-files-bun-file), [`Bun.write`](https://bun.sh/docs/api/file-io#writing-files-bun-write), `Bun.stdin`, `Bun.stdout`, `Bun.stderr`
---
- Child processes
- [`Bun.spawn`](https://bun.sh/docs/api/spawn#spawn-a-process-bun-spawn)
[`Bun.spawnSync`](https://bun.sh/docs/api/spawn#blocking-api-bun-spawnsync)
- Child Processes
- [`Bun.spawn`](https://bun.sh/docs/api/spawn#spawn-a-process-bun-spawn), [`Bun.spawnSync`](https://bun.sh/docs/api/spawn#blocking-api-bun-spawnsync)
---
- TCP
- [`Bun.listen`](https://bun.sh/docs/api/tcp#start-a-server-bun-listen)
[`Bun.connect`](https://bun.sh/docs/api/tcp#start-a-server-bun-listen)
- TCP Sockets
- [`Bun.listen`](https://bun.sh/docs/api/tcp#start-a-server-bun-listen), [`Bun.connect`](https://bun.sh/docs/api/tcp#start-a-server-bun-listen)
---
- UDP Sockets
- [`Bun.udpSocket`](https://bun.sh/docs/api/udp)
---
- WebSockets
- `new WebSocket()` (client), [`Bun.serve`](https://bun.sh/docs/api/websockets) (server)
---
@@ -57,44 +69,53 @@ Click the link in the right column to jump to the associated documentation.
---
- Streaming HTML Transformations
- Streaming HTML
- [`HTMLRewriter`](https://bun.sh/docs/api/html-rewriter)
---
- Hashing
- [`Bun.hash`](https://bun.sh/docs/api/hashing#bun-hash)
[`Bun.CryptoHasher`](https://bun.sh/docs/api/hashing#bun-cryptohasher)
- [`Bun.password`](https://bun.sh/docs/api/hashing#bun-password), [`Bun.hash`](https://bun.sh/docs/api/hashing#bun-hash), [`Bun.CryptoHasher`](https://bun.sh/docs/api/hashing#bun-cryptohasher), `Bun.sha`
---
- import.meta
- [`import.meta`](https://bun.sh/docs/api/import-meta)
---
<!-- - [DNS](https://bun.sh/docs/api/dns)
- `Bun.dns`
--- -->
- SQLite
- [`bun:sqlite`](https://bun.sh/docs/api/sqlite)
---
- FFI
- PostgreSQL Client
- [`Bun.SQL`](https://bun.sh/docs/api/sql), `Bun.sql`
---
- Redis (Valkey) Client
- [`Bun.RedisClient`](https://bun.sh/docs/api/redis), `Bun.redis`
---
- FFI (Foreign Function Interface)
- [`bun:ffi`](https://bun.sh/docs/api/ffi)
---
- DNS
- [`Bun.dns.lookup`](https://bun.sh/docs/api/dns), `Bun.dns.prefetch`, `Bun.dns.getCacheStats`
---
- Testing
- [`bun:test`](https://bun.sh/docs/cli/test)
---
- Node-API
- [`Node-API`](https://bun.sh/docs/api/node-api)
- Workers
- [`new Worker()`](https://bun.sh/docs/api/workers)
---
- Module Loaders
- [`Bun.plugin`](https://bun.sh/docs/bundler/plugins)
---
@@ -103,27 +124,84 @@ Click the link in the right column to jump to the associated documentation.
---
- Cookies
- [`Bun.Cookie`](https://bun.sh/docs/api/cookie), [`Bun.CookieMap`](https://bun.sh/docs/api/cookie)
---
- Node-API
- [`Node-API`](https://bun.sh/docs/api/node-api)
---
- `import.meta`
- [`import.meta`](https://bun.sh/docs/api/import-meta)
---
- Utilities
- [`Bun.version`](https://bun.sh/docs/api/utils#bun-version)
[`Bun.revision`](https://bun.sh/docs/api/utils#bun-revision)
[`Bun.env`](https://bun.sh/docs/api/utils#bun-env)
[`Bun.main`](https://bun.sh/docs/api/utils#bun-main)
[`Bun.sleep()`](https://bun.sh/docs/api/utils#bun-sleep)
[`Bun.sleepSync()`](https://bun.sh/docs/api/utils#bun-sleepsync)
[`Bun.which()`](https://bun.sh/docs/api/utils#bun-which)
[`Bun.peek()`](https://bun.sh/docs/api/utils#bun-peek)
[`Bun.openInEditor()`](https://bun.sh/docs/api/utils#bun-openineditor)
[`Bun.deepEquals()`](https://bun.sh/docs/api/utils#bun-deepequals)
[`Bun.escapeHTML()`](https://bun.sh/docs/api/utils#bun-escapehtml)
[`Bun.fileURLToPath()`](https://bun.sh/docs/api/utils#bun-fileurltopath)
[`Bun.pathToFileURL()`](https://bun.sh/docs/api/utils#bun-pathtofileurl)
[`Bun.gzipSync()`](https://bun.sh/docs/api/utils#bun-gzipsync)
[`Bun.gunzipSync()`](https://bun.sh/docs/api/utils#bun-gunzipsync)
[`Bun.deflateSync()`](https://bun.sh/docs/api/utils#bun-deflatesync)
[`Bun.inflateSync()`](https://bun.sh/docs/api/utils#bun-inflatesync)
[`Bun.inspect()`](https://bun.sh/docs/api/utils#bun-inspect)
[`Bun.nanoseconds()`](https://bun.sh/docs/api/utils#bun-nanoseconds)
[`Bun.readableStreamTo*()`](https://bun.sh/docs/api/utils#bun-readablestreamto)
[`Bun.resolveSync()`](https://bun.sh/docs/api/utils#bun-resolvesync)
- [`Bun.version`](https://bun.sh/docs/api/utils#bun-version), [`Bun.revision`](https://bun.sh/docs/api/utils#bun-revision), [`Bun.env`](https://bun.sh/docs/api/utils#bun-env), [`Bun.main`](https://bun.sh/docs/api/utils#bun-main)
---
- Sleep & Timing
- [`Bun.sleep()`](https://bun.sh/docs/api/utils#bun-sleep), [`Bun.sleepSync()`](https://bun.sh/docs/api/utils#bun-sleepsync), [`Bun.nanoseconds()`](https://bun.sh/docs/api/utils#bun-nanoseconds)
---
- Random & UUID
- [`Bun.randomUUIDv7()`](https://bun.sh/docs/api/utils#bun-randomuuidv7)
---
- System & Environment
- [`Bun.which()`](https://bun.sh/docs/api/utils#bun-which)
---
- Comparison & Inspection
- [`Bun.peek()`](https://bun.sh/docs/api/utils#bun-peek), [`Bun.deepEquals()`](https://bun.sh/docs/api/utils#bun-deepequals), `Bun.deepMatch`, [`Bun.inspect()`](https://bun.sh/docs/api/utils#bun-inspect)
---
- String & Text Processing
- [`Bun.escapeHTML()`](https://bun.sh/docs/api/utils#bun-escapehtml), [`Bun.stringWidth()`](https://bun.sh/docs/api/utils#bun-stringwidth), `Bun.indexOfLine`
---
- URL & Path Utilities
- [`Bun.fileURLToPath()`](https://bun.sh/docs/api/utils#bun-fileurltopath), [`Bun.pathToFileURL()`](https://bun.sh/docs/api/utils#bun-pathtofileurl)
---
- Compression
- [`Bun.gzipSync()`](https://bun.sh/docs/api/utils#bun-gzipsync), [`Bun.gunzipSync()`](https://bun.sh/docs/api/utils#bun-gunzipsync), [`Bun.deflateSync()`](https://bun.sh/docs/api/utils#bun-deflatesync), [`Bun.inflateSync()`](https://bun.sh/docs/api/utils#bun-inflatesync), `Bun.zstdCompressSync()`, `Bun.zstdDecompressSync()`, `Bun.zstdCompress()`, `Bun.zstdDecompress()`
---
- Stream Processing
- [`Bun.readableStreamTo*()`](https://bun.sh/docs/api/utils#bun-readablestreamto), `Bun.readableStreamToBytes()`, `Bun.readableStreamToBlob()`, `Bun.readableStreamToFormData()`, `Bun.readableStreamToJSON()`, `Bun.readableStreamToArray()`
---
- Memory & Buffer Management
- `Bun.ArrayBufferSink`, `Bun.allocUnsafe`, `Bun.concatArrayBuffers`
---
- Module Resolution
- [`Bun.resolveSync()`](https://bun.sh/docs/api/utils#bun-resolvesync)
---
- Parsing & Formatting
- [`Bun.semver`](https://bun.sh/docs/api/semver), `Bun.TOML.parse`, [`Bun.color`](https://bun.sh/docs/api/color)
---
- Low-level / Internals
- `Bun.mmap`, `Bun.gc`, `Bun.generateHeapSnapshot`, [`bun:jsc`](https://bun.sh/docs/api/bun-jsc)
---
{% /table %}

View File

@@ -120,7 +120,7 @@ This page is updated regularly to reflect compatibility status of the latest ver
### [`node:net`](https://nodejs.org/api/net.html)
🟡 `SocketAddress` class not exposed (but implemented). `BlockList` exists but is a no-op.
🟢 Fully implemented.
### [`node:perf_hooks`](https://nodejs.org/api/perf_hooks.html)

View File

@@ -1,7 +1,7 @@
{
"private": true,
"name": "bun",
"version": "1.2.14",
"version": "1.2.16",
"workspaces": [
"./packages/bun-types",
"./packages/@types/bun"
@@ -26,7 +26,8 @@
"build": "bun run build:debug",
"watch": "zig build check --watch -fincremental --prominent-compile-errors --global-cache-dir build/debug/zig-check-cache --zig-lib-dir vendor/zig/lib",
"watch-windows": "zig build check-windows --watch -fincremental --prominent-compile-errors --global-cache-dir build/debug/zig-check-cache --zig-lib-dir vendor/zig/lib",
"agent": "(bun run --silent build:debug &> /tmp/bun.debug.build.log || (cat /tmp/bun.debug.build.log && rm -rf /tmp/bun.debug.build.log && exit 1)) && rm -f /tmp/bun.debug.build.log && ./build/debug/bun-debug",
"bd:v": "(bun run --silent build:debug &> /tmp/bun.debug.build.log || (cat /tmp/bun.debug.build.log && rm -rf /tmp/bun.debug.build.log && exit 1)) && rm -f /tmp/bun.debug.build.log && ./build/debug/bun-debug",
"bd": "BUN_DEBUG_QUIET_LOGS=1 bun bd:v",
"build:debug": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -B build/debug",
"build:debug:asan": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DENABLE_ASAN=ON -B build/debug-asan",
"build:valgrind": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DENABLE_BASELINE=ON -ENABLE_VALGRIND=ON -B build/debug-valgrind",
@@ -41,6 +42,7 @@
"build:release:local": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DWEBKIT_LOCAL=ON -B build/release-local",
"build:release:with_logs": "cmake . -DCMAKE_BUILD_TYPE=Release -DENABLE_LOGS=true -GNinja -Bbuild-release && ninja -Cbuild-release",
"build:debug-zig-release": "cmake . -DCMAKE_BUILD_TYPE=Release -DZIG_OPTIMIZE=Debug -GNinja -Bbuild-debug-zig-release && ninja -Cbuild-debug-zig-release",
"run:linux": "docker run --rm -v \"$PWD:/root/bun/\" -w /root/bun ghcr.io/oven-sh/bun-development-docker-image",
"css-properties": "bun run src/css/properties/generate_properties.ts",
"uv-posix-stubs": "bun run src/bun.js/bindings/libuv/generate_uv_posix_stubs.ts",
"bump": "bun ./scripts/bump.ts",
@@ -74,6 +76,7 @@
"zig-format:check": "bun run analysis:no-llvm --target zig-format-check",
"prettier": "bunx prettier@latest --plugin=prettier-plugin-organize-imports --config .prettierrc --write scripts packages src docs 'test/**/*.{test,spec}.{ts,tsx,js,jsx,mts,mjs,cjs,cts}' '!test/**/*fixture*.*'",
"node:test": "node ./scripts/runner.node.mjs --quiet --exec-path=$npm_execpath --node-tests ",
"clean:zig": "rm -rf build/debug/cache/zig build/debug/CMakeCache.txt 'build/debug/*.o' .zig-cache zig-out || true"
"clean:zig": "rm -rf build/debug/cache/zig build/debug/CMakeCache.txt 'build/debug/*.o' .zig-cache zig-out || true",
"sync-webkit-source": "bun ./scripts/sync-webkit-source.ts"
}
}

View File

@@ -42,11 +42,11 @@ export default class RuntimeError {
original: Error;
stack: StackFrame[];
static from(error: Error) {
static from(error: Error): RuntimeError {
const runtime = new RuntimeError();
runtime.original = error;
runtime.stack = this.parseStack(error);
return RuntimeError;
return runtime;
}
/**

View File

@@ -3304,6 +3304,8 @@ declare module "bun" {
interface BunRequest<T extends string = string> extends Request {
params: RouterTypes.ExtractRouteParams<T>;
readonly cookies: CookieMap;
clone(): BunRequest<T>;
}
interface GenericServeOptions {
@@ -3355,6 +3357,30 @@ declare module "bun" {
* @default false
*/
console?: boolean;
/**
* Enable automatic workspace folders for Chrome DevTools
*
* This lets you persistently edit files in the browser. It works by adding the following route to the server:
* `/.well-known/appspecific/com.chrome.devtools.json`
*
* The response is a JSON object with the following shape:
* ```json
* {
* "workspace": {
* "root": "<cwd>",
* "uuid": "<uuid>"
* }
* }
* ```
*
* The `root` field is the current working directory of the server.
* The `"uuid"` field is a hash of the file that started the server and a hash of the current working directory.
*
* For security reasons, if the remote socket address is not from localhost, 127.0.0.1, or ::1, the request is ignored.
* @default true
*/
chromeDevToolsAutomaticWorkspaceFolders?: boolean;
};
error?: (this: Server, error: ErrorLike) => Response | Promise<Response> | void | Promise<void>;
@@ -3660,7 +3686,7 @@ declare module "bun" {
* the well-known CAs curated by Mozilla. Mozilla's CAs are completely
* replaced when CAs are explicitly specified using this option.
*/
ca?: string | Buffer | BunFile | Array<string | Buffer | BunFile> | undefined;
ca?: string | BufferSource | BunFile | Array<string | BufferSource | BunFile> | undefined;
/**
* Cert chains in PEM format. One cert chain should be provided per
* private key. Each cert chain should consist of the PEM formatted
@@ -3672,7 +3698,7 @@ declare module "bun" {
* intermediate certificates are not provided, the peer will not be
* able to validate the certificate, and the handshake will fail.
*/
cert?: string | Buffer | BunFile | Array<string | Buffer | BunFile> | undefined;
cert?: string | BufferSource | BunFile | Array<string | BufferSource | BunFile> | undefined;
/**
* Private keys in PEM format. PEM allows the option of private keys
* being encrypted. Encrypted keys will be decrypted with
@@ -3683,13 +3709,25 @@ declare module "bun" {
* object.passphrase is optional. Encrypted keys will be decrypted with
* object.passphrase if provided, or options.passphrase if it is not.
*/
key?: string | Buffer | BunFile | Array<string | Buffer | BunFile> | undefined;
key?: string | BufferSource | BunFile | Array<string | BufferSource | BunFile> | undefined;
/**
* Optionally affect the OpenSSL protocol behavior, which is not
* usually necessary. This should be used carefully if at all! Value is
* a numeric bitmask of the SSL_OP_* options from OpenSSL Options
*/
secureOptions?: number | undefined; // Value is a numeric bitmask of the `SSL_OP_*` options
keyFile?: string;
certFile?: string;
ALPNProtocols?: string | BufferSource;
ciphers?: string;
clientRenegotiationLimit?: number;
clientRenegotiationWindow?: number;
}
// Note for contributors: TLSOptionsAsDeprecated should be considered immutable
@@ -5403,6 +5441,42 @@ declare module "bun" {
options?: ZlibCompressionOptions | LibdeflateCompressionOptions,
): Uint8Array;
/**
* Compresses a chunk of data with the Zstandard (zstd) compression algorithm.
* @param data The buffer of data to compress
* @param options Compression options to use
* @returns The output buffer with the compressed data
*/
function zstdCompressSync(
data: NodeJS.TypedArray | Buffer | string | ArrayBuffer,
options?: { level?: number },
): Buffer;
/**
* Compresses a chunk of data with the Zstandard (zstd) compression algorithm.
* @param data The buffer of data to compress
* @param options Compression options to use
* @returns A promise that resolves to the output buffer with the compressed data
*/
function zstdCompress(
data: NodeJS.TypedArray | Buffer | string | ArrayBuffer,
options?: { level?: number },
): Promise<Buffer>;
/**
* Decompresses a chunk of data with the Zstandard (zstd) decompression algorithm.
* @param data The buffer of data to decompress
* @returns The output buffer with the decompressed data
*/
function zstdDecompressSync(data: NodeJS.TypedArray | Buffer | string | ArrayBuffer): Buffer;
/**
* Decompresses a chunk of data with the Zstandard (zstd) decompression algorithm.
* @param data The buffer of data to decompress
* @returns A promise that resolves to the output buffer with the decompressed data
*/
function zstdDecompress(data: NodeJS.TypedArray | Buffer | string | ArrayBuffer): Promise<Buffer>;
type Target =
/**
* For generating bundles that are intended to be run by the Bun runtime. In many cases,
@@ -6022,7 +6096,7 @@ declare module "bun" {
* certificate.
* @return A certificate object.
*/
getPeerCertificate(): import("tls").PeerCertificate;
getPeerCertificate(): import("node:tls").PeerCertificate;
getPeerX509Certificate(): import("node:crypto").X509Certificate;
/**
@@ -6127,6 +6201,34 @@ declare module "bun" {
* The number of bytes written to the socket.
*/
readonly bytesWritten: number;
resume(): void;
pause(): void;
renegotiate(): void;
setVerifyMode(requestCert: boolean, rejectUnauthorized: boolean): void;
getSession(): void;
setSession(session: string | Buffer | BufferSource): void;
exportKeyingMaterial(length: number, label: string, context?: string | BufferSource): void;
upgradeTLS<Data>(options: TLSUpgradeOptions<Data>): [raw: Socket<Data>, tls: Socket<Data>];
close(): void;
getServername(): string;
setServername(name: string): void;
}
interface TLSUpgradeOptions<Data> {
data?: Data;
tls: TLSOptions | boolean;
socket: SocketHandler<Data>;
}
interface SocketListener<Data = undefined> extends Disposable {
@@ -6227,6 +6329,22 @@ declare module "bun" {
* The per-instance data context
*/
data?: Data;
/**
* Whether to allow half-open connections.
*
* A half-open connection occurs when one end of the connection has called `close()`
* or sent a FIN packet, while the other end remains open. When set to `true`:
*
* - The socket won't automatically send FIN when the remote side closes its end
* - The local side can continue sending data even after the remote side has closed
* - The application must explicitly call `end()` to fully close the connection
*
* When `false`, the socket automatically closes both ends of the connection when
* either side closes.
*
* @default false
*/
allowHalfOpen?: boolean;
}
interface TCPSocketListenOptions<Data = undefined> extends SocketOptions<Data> {
@@ -6241,7 +6359,7 @@ declare module "bun" {
/**
* The TLS configuration object with which to create the server
*/
tls?: TLSOptions;
tls?: TLSOptions | boolean;
/**
* Whether to use exclusive mode.
*
@@ -6287,7 +6405,7 @@ declare module "bun" {
/**
* TLS Configuration with which to create the socket
*/
tls?: boolean;
tls?: TLSOptions | boolean;
/**
* Whether to use exclusive mode.
*
@@ -6303,22 +6421,8 @@ declare module "bun" {
* @default false
*/
exclusive?: boolean;
/**
* Whether to allow half-open connections.
*
* A half-open connection occurs when one end of the connection has called `close()`
* or sent a FIN packet, while the other end remains open. When set to `true`:
*
* - The socket won't automatically send FIN when the remote side closes its end
* - The local side can continue sending data even after the remote side has closed
* - The application must explicitly call `end()` to fully close the connection
*
* When `false` (default), the socket automatically closes both ends of the connection
* when either side closes.
*
* @default false
*/
allowHalfOpen?: boolean;
reusePort?: boolean;
ipv6Only?: boolean;
}
interface UnixSocketOptions<Data = undefined> extends SocketOptions<Data> {
@@ -6329,14 +6433,14 @@ declare module "bun" {
/**
* TLS Configuration with which to create the socket
*/
tls?: TLSOptions;
tls?: TLSOptions | boolean;
}
interface FdSocketOptions<Data = undefined> extends SocketOptions<Data> {
/**
* TLS Configuration with which to create the socket
*/
tls?: TLSOptions;
tls?: TLSOptions | boolean;
/**
* The file descriptor to connect to
*/
@@ -7443,9 +7547,16 @@ declare module "bun" {
workspaces: {
[workspace: string]: BunLockFileWorkspacePackage;
};
/** @see https://bun.sh/docs/install/overrides */
overrides?: Record<string, string>;
/** @see https://bun.sh/docs/install/patch */
patchedDependencies?: Record<string, string>;
/** @see https://bun.sh/docs/install/lifecycle#trusteddependencies */
trustedDependencies?: string[];
/** @see https://bun.sh/docs/install/catalogs */
catalog?: Record<string, string>;
/** @see https://bun.sh/docs/install/catalogs */
catalogs?: Record<string, Record<string, string>>;
/**
* ```

View File

@@ -10,6 +10,7 @@ declare module "bun" {
type NodeCryptoWebcryptoSubtleCrypto = import("crypto").webcrypto.SubtleCrypto;
type NodeCryptoWebcryptoCryptoKey = import("crypto").webcrypto.CryptoKey;
type NodeCryptoWebcryptoCryptoKeyPair = import("crypto").webcrypto.CryptoKeyPair;
type LibEmptyOrBunWebSocket = LibDomIsLoaded extends true ? {} : Bun.WebSocket;
@@ -884,6 +885,8 @@ declare var CryptoKey: {
new (): CryptoKey;
};
interface CryptoKeyPair extends Bun.__internal.NodeCryptoWebcryptoCryptoKeyPair {}
interface Position {
lineText: string;
file: string;

View File

@@ -146,7 +146,7 @@ declare module "bun:test" {
export function spyOn<T extends object, K extends keyof T>(
obj: T,
methodOrPropertyValue: K,
): Mock<T[K] extends (...args: any[]) => any ? T[K] : never>;
): Mock<Extract<T[K], (...args: any[]) => any>>;
interface FunctionLike {
readonly name: string;

View File

@@ -440,7 +440,7 @@ struct us_socket_t* us_socket_context_connect_resolved_dns(struct us_socket_cont
socket->flags.is_paused = 0;
socket->flags.is_ipc = 0;
socket->connect_state = NULL;
socket->connect_next = NULL;
us_internal_socket_context_link_socket(context, socket);
@@ -459,7 +459,7 @@ static void init_addr_with_port(struct addrinfo* info, int port, struct sockaddr
}
}
static int try_parse_ip(const char *ip_str, int port, struct sockaddr_storage *storage) {
static bool try_parse_ip(const char *ip_str, int port, struct sockaddr_storage *storage) {
memset(storage, 0, sizeof(struct sockaddr_storage));
// Try to parse as IPv4
struct sockaddr_in *addr4 = (struct sockaddr_in *)storage;
@@ -469,7 +469,7 @@ static int try_parse_ip(const char *ip_str, int port, struct sockaddr_storage *s
#ifdef __APPLE__
addr4->sin_len = sizeof(struct sockaddr_in);
#endif
return 0;
return 1;
}
// Try to parse as IPv6
@@ -480,17 +480,17 @@ static int try_parse_ip(const char *ip_str, int port, struct sockaddr_storage *s
#ifdef __APPLE__
addr6->sin6_len = sizeof(struct sockaddr_in6);
#endif
return 0;
return 1;
}
// If we reach here, the input is neither IPv4 nor IPv6
return 1;
return 0;
}
void *us_socket_context_connect(int ssl, struct us_socket_context_t *context, const char *host, int port, int options, int socket_ext_size, int* is_connecting) {
void *us_socket_context_connect(int ssl, struct us_socket_context_t *context, const char *host, int port, int options, int socket_ext_size, int* has_dns_resolved) {
#ifndef LIBUS_NO_SSL
if (ssl == 1) {
return us_internal_ssl_socket_context_connect((struct us_internal_ssl_socket_context_t *) context, host, port, options, socket_ext_size, is_connecting);
return us_internal_ssl_socket_context_connect((struct us_internal_ssl_socket_context_t *) context, host, port, options, socket_ext_size, has_dns_resolved);
}
#endif
@@ -498,8 +498,8 @@ void *us_socket_context_connect(int ssl, struct us_socket_context_t *context, co
// fast path for IP addresses in text form
struct sockaddr_storage addr;
if (try_parse_ip(host, port, &addr) == 0) {
*is_connecting = 1;
if (try_parse_ip(host, port, &addr)) {
*has_dns_resolved = 1;
return us_socket_context_connect_resolved_dns(context, &addr, options, socket_ext_size);
}
@@ -518,7 +518,7 @@ void *us_socket_context_connect(int ssl, struct us_socket_context_t *context, co
if (result->entries && result->entries->info.ai_next == NULL) {
struct sockaddr_storage addr;
init_addr_with_port(&result->entries->info, port, &addr);
*is_connecting = 1;
*has_dns_resolved = 1;
struct us_socket_t *s = us_socket_context_connect_resolved_dns(context, &addr, options, socket_ext_size);
Bun__addrinfo_freeRequest(ai_req, s == NULL);
return s;

View File

@@ -213,7 +213,7 @@ struct us_internal_ssl_socket_t *ssl_on_open(struct us_internal_ssl_socket_t *s,
s->ssl_read_wants_write = 0;
s->fatal_error = 0;
s->handshake_state = HANDSHAKE_PENDING;
SSL_set_bio(s->ssl, loop_ssl_data->shared_rbio, loop_ssl_data->shared_wbio);
// if we allow renegotiation, we need to set the mode here
@@ -255,7 +255,7 @@ struct us_internal_ssl_socket_t *ssl_on_open(struct us_internal_ssl_socket_t *s,
}
/// @brief Complete the shutdown or do a fast shutdown when needed, this should only be called before closing the socket
/// @param s
/// @param s
int us_internal_handle_shutdown(struct us_internal_ssl_socket_t *s, int force_fast_shutdown) {
// if we are already shutdown or in the middle of a handshake we dont need to do anything
// Scenarios:
@@ -265,7 +265,7 @@ int us_internal_handle_shutdown(struct us_internal_ssl_socket_t *s, int force_fa
// 4 - we are in the middle of a handshake
// 5 - we received a fatal error
if(us_internal_ssl_socket_is_shut_down(s) || s->fatal_error || !SSL_is_init_finished(s->ssl)) return 1;
// we are closing the socket but did not sent a shutdown yet
int state = SSL_get_shutdown(s->ssl);
int sent_shutdown = state & SSL_SENT_SHUTDOWN;
@@ -277,7 +277,7 @@ int us_internal_handle_shutdown(struct us_internal_ssl_socket_t *s, int force_fa
// Zero means that we should wait for the peer to close the connection
// but we are already closing the connection so we do a fast shutdown here
int ret = SSL_shutdown(s->ssl);
if(ret == 0 && force_fast_shutdown) {
if(ret == 0 && force_fast_shutdown) {
// do a fast shutdown (dont wait for peer)
ret = SSL_shutdown(s->ssl);
}
@@ -397,7 +397,7 @@ void us_internal_update_handshake(struct us_internal_ssl_socket_t *s) {
// nothing todo here, renegotiation must be handled in SSL_read
if (s->handshake_state != HANDSHAKE_PENDING)
return;
if (us_internal_ssl_socket_is_closed(s) || us_internal_ssl_socket_is_shut_down(s) ||
(s->ssl && SSL_get_shutdown(s->ssl) & SSL_RECEIVED_SHUTDOWN)) {
@@ -422,7 +422,7 @@ void us_internal_update_handshake(struct us_internal_ssl_socket_t *s) {
s->fatal_error = 1;
}
us_internal_trigger_handshake_callback(s, 0);
return;
}
s->handshake_state = HANDSHAKE_PENDING;
@@ -504,7 +504,7 @@ restart:
loop_ssl_data->ssl_read_output +
LIBUS_RECV_BUFFER_PADDING + read,
LIBUS_RECV_BUFFER_LENGTH - read);
if (just_read <= 0) {
int err = SSL_get_error(s->ssl, just_read);
// as far as I know these are the only errors we want to handle
@@ -603,7 +603,7 @@ restart:
goto restart;
}
}
// Trigger writable if we failed last SSL_write with SSL_ERROR_WANT_READ
// Trigger writable if we failed last SSL_write with SSL_ERROR_WANT_READ
// If we failed SSL_read because we need to write more data (SSL_ERROR_WANT_WRITE) we are not going to trigger on_writable, we will wait until the next on_data or on_writable event
// SSL_read will try to flush the write buffer and if fails with SSL_ERROR_WANT_WRITE means the socket is not in a writable state anymore and only makes sense to trigger on_writable if we can write more data
// Otherwise we possible would trigger on_writable -> on_data event in a recursive loop
@@ -1133,7 +1133,7 @@ int us_verify_callback(int preverify_ok, X509_STORE_CTX *ctx) {
}
SSL_CTX *create_ssl_context_from_bun_options(
struct us_bun_socket_context_options_t options,
struct us_bun_socket_context_options_t options,
enum create_bun_socket_error_t *err) {
ERR_clear_error();
@@ -1250,8 +1250,8 @@ SSL_CTX *create_ssl_context_from_bun_options(
return NULL;
}
// It may return spurious errors here.
ERR_clear_error();
// It may return spurious errors here.
ERR_clear_error();
if (options.reject_unauthorized) {
SSL_CTX_set_verify(ssl_context,
@@ -1755,7 +1755,7 @@ int us_internal_ssl_socket_raw_write(struct us_internal_ssl_socket_t *s,
int us_internal_ssl_socket_write(struct us_internal_ssl_socket_t *s,
const char *data, int length, int msg_more) {
if (us_socket_is_closed(0, &s->s) || us_internal_ssl_socket_is_shut_down(s) || length == 0) {
return 0;
}
@@ -1989,7 +1989,7 @@ ssl_wrapped_context_on_end(struct us_internal_ssl_socket_t *s) {
if (wrapped_context->events.on_end) {
wrapped_context->events.on_end((struct us_socket_t *)s);
}
return s;
}
@@ -2082,7 +2082,7 @@ struct us_internal_ssl_socket_t *us_internal_ssl_socket_wrap_with_tls(
struct us_socket_context_t *context = us_create_bun_ssl_socket_context(
old_context->loop, sizeof(struct us_wrapped_socket_context_t),
options, &err);
// Handle SSL context creation failure
if (UNLIKELY(!context)) {
return NULL;
@@ -2186,4 +2186,4 @@ us_socket_context_on_socket_connect_error(
return socket;
}
#endif
#endif

View File

@@ -785,6 +785,23 @@
"items": {
"type": "string"
}
},
"catalog": {
"type": "object",
"description": "A single default catalog for commonly used dependencies. Referenced with 'catalog:' in workspace package dependencies.",
"additionalProperties": {
"type": "string"
}
},
"catalogs": {
"type": "object",
"description": "Multiple named catalogs for grouping dependencies. Referenced with 'catalog:catalogName' in workspace package dependencies.",
"additionalProperties": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
}
}
}

View File

@@ -18,12 +18,14 @@ async function globSources(output, patterns, excludes = []) {
}
total += paths.length;
const sources = paths
.map(path => normalize(relative(root, path)))
.sort((a, b) => a.localeCompare(b))
.join("\n");
const sources =
paths
.map(path => normalize(relative(root, path)))
.sort((a, b) => a.localeCompare(b))
.join("\n")
.trim() + "\n";
await write(join(root, "cmake", output), sources);
await write(join(root, "cmake", "sources", output), sources);
}
const input = await file(join(root, "cmake", "Sources.json")).json();

View File

@@ -0,0 +1,26 @@
import { existsSync } from "node:fs";
import { dirname, join } from "node:path";
const bunRepo = dirname(import.meta.dir);
const webkitRepo = join(bunRepo, "vendor/WebKit");
if (!existsSync(webkitRepo)) {
console.log("could not find WebKit clone");
console.log("clone https://github.com/oven-sh/WebKit.git to vendor/WebKit");
console.log("or create a symlink/worktree to an existing clone");
process.exit(1);
}
process.chdir(webkitRepo);
const checkedOutCommit = (await Bun.$`git rev-parse HEAD`.text()).trim();
const cmakeContents = await Bun.file(join(bunRepo, "cmake/tools/SetupWebKit.cmake")).text();
const expectedCommit = cmakeContents.match(/set\(WEBKIT_VERSION ([0-9a-f]{40})\)/)![1];
if (checkedOutCommit == expectedCommit) {
console.log(`already at commit ${expectedCommit}`);
} else {
console.log(`changing from ${checkedOutCommit} to ${expectedCommit}`);
await Bun.$`git checkout main`;
await Bun.$`git pull`;
// it is OK that this leaves you with a detached HEAD
await Bun.$`git checkout ${expectedCommit}`;
}

View File

@@ -0,0 +1,155 @@
import * as fs from "fs";
import * as path from "path";
/**
* Removes unreferenced top-level const declarations from a Zig file
* Handles patterns like: const <IDENTIFIER> = @import(...) or const <IDENTIFIER> = ...
*/
export function removeUnreferencedImports(content: string): string {
let modified = true;
let result = content;
// Keep iterating until no more changes are made
while (modified) {
modified = false;
const lines = result.split("\n");
const newLines: string[] = [];
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
// Match top-level const declarations: const <IDENTIFIER> = ...
const constMatch = line.match(/^const\s+([a-zA-Z_][a-zA-Z0-9_]*)\s*=(.*)$/);
if (constMatch) {
const identifier = constMatch[1];
const assignmentPart = constMatch[2];
// Skip lines that contain '{' in the assignment (likely structs/objects)
if (assignmentPart.includes("{")) {
newLines.push(line);
continue;
}
// Check if this identifier is referenced anywhere else in the file
const isReferenced = isIdentifierReferenced(identifier, lines, i);
if (!isReferenced) {
// Skip this line (delete it)
modified = true;
console.log(`Removing unreferenced import: ${identifier}`);
continue;
}
}
newLines.push(line);
}
result = newLines.join("\n");
}
return result;
}
/**
* Check if an identifier is referenced anywhere in the file except at the declaration line
*/
function isIdentifierReferenced(identifier: string, lines: string[], declarationLineIndex: number): boolean {
// Create a regex that matches the identifier as a whole word
// This prevents matching partial words (e.g. "std" shouldn't match "stdx")
const identifierRegex = new RegExp(`\\b${escapeRegex(identifier)}\\b`);
for (let i = 0; i < lines.length; i++) {
// Skip the declaration line itself
if (i === declarationLineIndex) {
continue;
}
const line = lines[i];
// Check if the identifier appears in this line
if (identifierRegex.test(line)) {
return true;
}
}
return false;
}
/**
* Escape special regex characters in a string
*/
function escapeRegex(string: string): string {
return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
}
/**
* Process a single Zig file
*/
export function processZigFile(filePath: string): void {
try {
const content = fs.readFileSync(filePath, "utf-8");
const cleaned = removeUnreferencedImports(content);
if (content !== cleaned) {
fs.writeFileSync(filePath, cleaned);
console.log(`Cleaned: ${filePath}`);
} else {
console.log(`No changes: ${filePath}`);
}
} catch (error) {
console.error(`Error processing ${filePath}:`, error);
}
}
/**
* Process multiple Zig files or directories
*/
export function processFiles(paths: string[]): void {
for (const inputPath of paths) {
const stat = fs.statSync(inputPath);
if (stat.isDirectory()) {
// Process all .zig files in directory recursively
processDirectory(inputPath);
} else if (inputPath.endsWith(".zig")) {
processZigFile(inputPath);
} else {
console.warn(`Skipping non-Zig file: ${inputPath}`);
}
}
}
/**
* Recursively process all .zig files in a directory
*/
function processDirectory(dirPath: string): void {
const entries = fs.readdirSync(dirPath, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(dirPath, entry.name);
if (entry.isDirectory()) {
processDirectory(fullPath);
} else if (entry.name.endsWith(".zig")) {
processZigFile(fullPath);
}
}
}
// CLI usage
if (require.main === module) {
const args = process.argv.slice(2);
if (args.length === 0) {
console.log("Usage: bun zig-remove-unreferenced-top-level-decls.ts <file1.zig> [file2.zig] [directory]...");
console.log("");
console.log("Examples:");
console.log(" bun zig-remove-unreferenced-top-level-decls.ts file.zig");
console.log(" bun zig-remove-unreferenced-top-level-decls.ts src/");
console.log(" bun zig-remove-unreferenced-top-level-decls.ts file1.zig file2.zig src/");
process.exit(1);
}
processFiles(args);
}

View File

@@ -3,7 +3,6 @@ const Environment = @import("./env.zig");
const Output = @import("output.zig");
const use_mimalloc = bun.use_mimalloc;
const StringTypes = @import("./string_types.zig");
const Mimalloc = bun.Mimalloc;
const bun = @import("bun");

View File

@@ -1,6 +1,5 @@
const std = @import("std");
const bun = @import("bun");
const string = bun.string;
const ImportRecord = @import("./import_record.zig").ImportRecord;
const ImportKind = @import("./import_record.zig").ImportKind;
const lol = @import("./deps/lol-html.zig");

View File

@@ -26,7 +26,6 @@ const std = @import("std");
const builtin = @import("builtin");
const bun = @import("bun");
const assert = bun.assert;
const testing = std.testing;
const Thread = std.Thread;
const Futex = bun.Futex;

View File

@@ -17,7 +17,6 @@
const std = @import("std");
const builtin = @import("builtin");
const windows = std.os.windows;
const testing = std.testing;
const assert = bun.assert;
const Progress = @This();
const bun = @import("bun");

View File

@@ -670,12 +670,9 @@ const std = @import("std");
const bun = @import("bun");
const string = bun.string;
const Output = bun.Output;
const Global = bun.Global;
const Environment = bun.Environment;
const strings = bun.strings;
const stringZ = bun.stringZ;
const FeatureFlags = bun.FeatureFlags;
const options = @import("./options.zig");
const Mutex = bun.Mutex;
const Futex = @import("./futex.zig");
const PackageJSON = @import("./resolver/package_json.zig").PackageJSON;

View File

@@ -1,8 +1,6 @@
const std = @import("std");
const FeatureFlags = @import("./feature_flags.zig");
const Environment = @import("./env.zig");
const FixedBufferAllocator = std.heap.FixedBufferAllocator;
const bun = @import("bun");
const OOM = bun.OOM;

View File

@@ -1,12 +1,10 @@
const mem = @import("std").mem;
const builtin = @import("std").builtin;
const std = @import("std");
const bun = @import("bun");
const log = bun.Output.scoped(.mimalloc, true);
const assert = bun.assert;
const Allocator = mem.Allocator;
const mimalloc = @import("./mimalloc.zig");
const FeatureFlags = @import("../feature_flags.zig");
const Environment = @import("../env.zig");
fn mimalloc_free(

View File

@@ -1,5 +1,4 @@
const mem = @import("std").mem;
const builtin = @import("std").builtin;
const std = @import("std");
const mimalloc = @import("./mimalloc.zig");

View File

@@ -1,24 +1,9 @@
const bun = @import("bun");
const string = bun.string;
const Output = bun.Output;
const Global = bun.Global;
const Environment = bun.Environment;
const strings = bun.strings;
const MutableString = bun.MutableString;
const stringZ = bun.stringZ;
const default_allocator = bun.default_allocator;
const FeatureFlags = bun.FeatureFlags;
const sync = @import("../sync.zig");
const std = @import("std");
const HTTP = bun.http;
const URL = @import("../url.zig").URL;
const Fs = @import("../fs.zig");
const Analytics = @import("./analytics_schema.zig").analytics;
const Writer = @import("./analytics_schema.zig").Writer;
const Headers = bun.http.Headers;
const Futex = @import("../futex.zig");
const Semver = bun.Semver;
/// Enables analytics. This is used by:
@@ -274,7 +259,6 @@ pub const EventName = enum(u8) {
};
var random: std.rand.DefaultPrng = undefined;
const DotEnv = @import("../env_loader.zig");
const platform_arch = if (Environment.isAarch64) Analytics.Architecture.arm else Analytics.Architecture.x64;

View File

@@ -1,6 +1,5 @@
const std = @import("std");
const bun = @import("bun");
const unicode = std.unicode;
const js_ast = bun.JSAst;

View File

@@ -664,7 +664,7 @@ pub const FilePoll = struct {
/// Only intended to be used from EventLoop.Pollable
fn deactivate(this: *FilePoll, loop: *Loop) void {
loop.num_polls -= @as(i32, @intFromBool(this.flags.contains(.has_incremented_poll_count)));
if (this.flags.contains(.has_incremented_poll_count)) loop.dec();
this.flags.remove(.has_incremented_poll_count);
loop.subActive(@as(u32, @intFromBool(this.flags.contains(.has_incremented_active_count))));
@@ -676,7 +676,7 @@ pub const FilePoll = struct {
fn activate(this: *FilePoll, loop: *Loop) void {
this.flags.remove(.closed);
loop.num_polls += @as(i32, @intFromBool(!this.flags.contains(.has_incremented_poll_count)));
if (!this.flags.contains(.has_incremented_poll_count)) loop.inc();
this.flags.insert(.has_incremented_poll_count);
if (this.flags.contains(.keeps_event_loop_alive)) {

View File

@@ -104,11 +104,11 @@ JSC::JSInternalPromise* bakeModuleLoaderFetch(JSC::JSGlobalObject* globalObject,
auto& vm = JSC::getVM(globalObject);
auto scope = DECLARE_THROW_SCOPE(vm);
auto moduleKey = key.toWTFString(globalObject);
if (UNLIKELY(scope.exception()))
if (scope.exception()) [[unlikely]]
return rejectedInternalPromise(globalObject, scope.exception()->value());
if (moduleKey.startsWith("bake:/"_s)) {
if (LIKELY(global->m_perThreadData)) {
if (global->m_perThreadData) [[likely]] {
BunString source = BakeProdLoad(global->m_perThreadData, Bun::toString(moduleKey));
if (source.tag != BunStringTag::Dead) {
JSC::SourceOrigin origin = JSC::SourceOrigin(WTF::URL(moduleKey));

View File

@@ -40,7 +40,7 @@ extern "C" JSC::JSPromise* BakeRenderRoutesForProdStatic(
NakedPtr<JSC::Exception> returnedException = nullptr;
auto result = JSC::profiledCall(global, JSC::ProfilingReason::API, cb, callData, JSC::jsUndefined(), args, returnedException);
if (UNLIKELY(returnedException)) {
if (returnedException) [[unlikely]] {
// This should be impossible because it returns a promise.
return JSC::JSPromise::rejectedPromise(global, returnedException->value());
}

View File

@@ -11,7 +11,6 @@ const DevServer = @This();
pub const debug = bun.Output.Scoped(.DevServer, false);
pub const igLog = bun.Output.scoped(.IncrementalGraph, false);
pub const mapLog = bun.Output.scoped(.SourceMapStore, false);
const DebugHTTPServer = @import("../bun.js/api/server.zig").DebugHTTPServer;
pub const Options = struct {
/// Arena must live until DevServer.deinit()
@@ -441,7 +440,7 @@ pub fn init(options: Options) bun.JSOOM!*DevServer {
.memory_visualizer_timer = .initPaused(.DevServerMemoryVisualizerTick),
.has_pre_crash_handler = bun.FeatureFlags.bake_debugging_features and
options.dump_state_on_crash orelse
bun.getRuntimeFeatureFlag("BUN_DUMP_STATE_ON_CRASH"),
bun.getRuntimeFeatureFlag(.BUN_DUMP_STATE_ON_CRASH),
.frontend_only = options.framework.file_system_router_types.len == 0,
.client_graph = .empty,
.server_graph = .empty,
@@ -471,7 +470,7 @@ pub fn init(options: Options) bun.JSOOM!*DevServer {
else
true
else
bun.getRuntimeFeatureFlag("BUN_ASSUME_PERFECT_INCREMENTAL"),
bun.getRuntimeFeatureFlag(.BUN_ASSUME_PERFECT_INCREMENTAL),
.relative_path_buf_lock = .unlocked,
.testing_batch_events = .disabled,
.broadcast_console_log_from_browser_to_server = options.broadcast_console_log_from_browser_to_server,
@@ -8497,7 +8496,6 @@ const BundleV2 = bun.bundle_v2.BundleV2;
const Chunk = bun.bundle_v2.Chunk;
const ContentHasher = bun.bundle_v2.ContentHasher;
const Define = bun.options.Define;
const uws = bun.uws;
const AnyWebSocket = uws.AnyWebSocket;
@@ -8509,8 +8507,6 @@ const MimeType = bun.http.MimeType;
const JSC = bun.JSC;
const JSValue = JSC.JSValue;
const VirtualMachine = JSC.VirtualMachine;
const JSModuleLoader = JSC.JSModuleLoader;
const EventLoopHandle = JSC.EventLoopHandle;
const HTMLBundle = JSC.API.HTMLBundle;
const Plugin = JSC.API.JSBundler.Plugin;
const EventLoopTimer = bun.api.Timer.EventLoopTimer;

View File

@@ -1,12 +1,10 @@
const bun = @import("bun");
const logger = bun.logger;
const std = @import("std");
const Fs = bun.fs;
const string = bun.string;
const Resolver = @import("../resolver//resolver.zig").Resolver;
const JSC = bun.JSC;
const JSGlobalObject = JSC.JSGlobalObject;
const strings = bun.strings;
const default_allocator = bun.default_allocator;
const ZigString = JSC.ZigString;
const JSValue = JSC.JSValue;

View File

@@ -2030,7 +2030,7 @@ fn dumpSourceString(vm: *VirtualMachine, specifier: string, written: []const u8)
fn dumpSourceStringFailiable(vm: *VirtualMachine, specifier: string, written: []const u8) !void {
if (!Environment.isDebug) return;
if (bun.getRuntimeFeatureFlag("BUN_DEBUG_NO_DUMP")) return;
if (bun.getRuntimeFeatureFlag(.BUN_DEBUG_NO_DUMP)) return;
const BunDebugHolder = struct {
pub var dir: ?std.fs.Dir = null;
@@ -2632,7 +2632,6 @@ pub const FetchFlags = enum {
}
};
const SavedSourceMap = JSC.SavedSourceMap;
pub const HardcodedModule = enum {
bun,
@@ -3061,22 +3060,16 @@ export fn ModuleLoader__isBuiltin(data: [*]const u8, len: usize) bool {
}
const std = @import("std");
const StaticExport = @import("./bindings/static_export.zig");
const bun = @import("bun");
const string = bun.string;
const Output = bun.Output;
const Global = bun.Global;
const Environment = bun.Environment;
const strings = bun.strings;
const MutableString = bun.MutableString;
const stringZ = bun.stringZ;
const StoredFileDescriptorType = bun.StoredFileDescriptorType;
const Arena = @import("../allocators/mimalloc_arena.zig").Arena;
const Allocator = std.mem.Allocator;
const IdentityContext = @import("../identity_context.zig").IdentityContext;
const Fs = @import("../fs.zig");
const Resolver = @import("../resolver/resolver.zig");
const ast = @import("../import_record.zig");
const MacroEntryPoint = bun.transpiler.EntryPoints.MacroEntryPoint;
const ParseResult = bun.transpiler.ParseResult;
@@ -3086,15 +3079,11 @@ const options = @import("../options.zig");
const Transpiler = bun.Transpiler;
const PluginRunner = bun.transpiler.PluginRunner;
const js_printer = bun.js_printer;
const js_parser = bun.js_parser;
const js_ast = bun.JSAst;
const ImportKind = ast.ImportKind;
const Analytics = @import("../analytics/analytics_thread.zig");
const ZigString = bun.JSC.ZigString;
const Runtime = @import("../runtime.zig");
const Router = @import("./api/filesystem_router.zig");
const ImportRecord = ast.ImportRecord;
const DotEnv = @import("../env_loader.zig");
const PackageJSON = @import("../resolver/package_json.zig").PackageJSON;
const MacroRemap = @import("../resolver/package_json.zig").MacroMap;
const JSC = bun.JSC;
@@ -3102,22 +3091,8 @@ const JSValue = bun.JSC.JSValue;
const node_module_module = @import("./bindings/NodeModuleModule.zig");
const JSGlobalObject = bun.JSC.JSGlobalObject;
const ConsoleObject = bun.JSC.ConsoleObject;
const ZigException = bun.JSC.ZigException;
const ZigStackTrace = bun.JSC.ZigStackTrace;
const ResolvedSource = bun.JSC.ResolvedSource;
const JSPromise = bun.JSC.JSPromise;
const JSModuleLoader = bun.JSC.JSModuleLoader;
const JSPromiseRejectionOperation = bun.JSC.JSPromiseRejectionOperation;
const ErrorableZigString = bun.JSC.ErrorableZigString;
const VM = bun.JSC.VM;
const JSFunction = bun.JSC.JSFunction;
const Config = @import("./config.zig");
const URL = @import("../url.zig").URL;
const Bun = JSC.API.Bun;
const EventLoop = JSC.EventLoop;
const PendingResolution = @import("../resolver/resolver.zig").PendingResolution;
const ThreadSafeFunction = bun.api.napi.ThreadSafeFunction;
const PackageManager = @import("../install/install.zig").PackageManager;
const Install = @import("../install/install.zig");
const VirtualMachine = bun.JSC.VirtualMachine;

View File

@@ -15,7 +15,6 @@ const expected_version = 13;
const bun = @import("bun");
const std = @import("std");
const Output = bun.Output;
const JSC = bun.JSC;
const debug = Output.scoped(.cache, false);
const MINIMUM_CACHE_SIZE = 50 * 1024;

View File

@@ -438,7 +438,7 @@ pub fn loadExtraEnvAndSourceCodePrinter(this: *VirtualMachine) void {
this.hide_bun_stackframes = false;
}
if (bun.getRuntimeFeatureFlag("BUN_FEATURE_FLAG_DISABLE_ASYNC_TRANSPILER")) {
if (bun.getRuntimeFeatureFlag(.BUN_FEATURE_FLAG_DISABLE_ASYNC_TRANSPILER)) {
this.transpiler_store.enabled = false;
}
@@ -2484,8 +2484,8 @@ pub fn remapZigException(
) void {
error_instance.toZigException(this.global, exception);
const enable_source_code_preview = allow_source_code_preview and
!(bun.getRuntimeFeatureFlag("BUN_DISABLE_SOURCE_CODE_PREVIEW") or
bun.getRuntimeFeatureFlag("BUN_DISABLE_TRANSPILED_SOURCE_CODE_PREVIEW"));
!(bun.getRuntimeFeatureFlag(.BUN_DISABLE_SOURCE_CODE_PREVIEW) or
bun.getRuntimeFeatureFlag(.BUN_DISABLE_TRANSPILED_SOURCE_CODE_PREVIEW));
defer {
if (Environment.isDebug) {
@@ -3513,22 +3513,16 @@ const Async = bun.Async;
const Transpiler = bun.Transpiler;
const ImportWatcher = JSC.hot_reloader.ImportWatcher;
const MutableString = bun.MutableString;
const stringZ = bun.stringZ;
const default_allocator = bun.default_allocator;
const StoredFileDescriptorType = bun.StoredFileDescriptorType;
const ErrorableString = JSC.ErrorableString;
const Arena = @import("../allocators/mimalloc_arena.zig").Arena;
const Exception = JSC.Exception;
const Allocator = std.mem.Allocator;
const IdentityContext = @import("../identity_context.zig").IdentityContext;
const Fs = @import("../fs.zig");
const Resolver = @import("../resolver/resolver.zig");
const ast = @import("../import_record.zig");
const MacroEntryPoint = bun.transpiler.EntryPoints.MacroEntryPoint;
const ParseResult = bun.transpiler.ParseResult;
const logger = bun.logger;
const Api = @import("../api/schema.zig").Api;
const JSPrivateDataPtr = JSC.JSPrivateDataPtr;
const ConsoleObject = JSC.ConsoleObject;
const Node = JSC.Node;
const ZigException = JSC.ZigException;
@@ -3537,16 +3531,11 @@ const ErrorableResolvedSource = JSC.ErrorableResolvedSource;
const ResolvedSource = JSC.ResolvedSource;
const JSInternalPromise = JSC.JSInternalPromise;
const JSModuleLoader = JSC.JSModuleLoader;
const JSPromiseRejectionOperation = JSC.JSPromiseRejectionOperation;
const ErrorableZigString = JSC.ErrorableZigString;
const VM = JSC.VM;
const JSFunction = JSC.JSFunction;
const Config = @import("./config.zig");
const URL = @import("../url.zig").URL;
const Bun = JSC.API.Bun;
const EventLoop = JSC.EventLoop;
const PendingResolution = @import("../resolver/resolver.zig").PendingResolution;
const ThreadSafeFunction = bun.api.napi.ThreadSafeFunction;
const PackageManager = @import("../install/install.zig").PackageManager;
const IPC = @import("ipc.zig");
const DNSResolver = @import("api/bun/dns_resolver.zig").DNSResolver;

View File

@@ -38,6 +38,10 @@ pub const BunObject = struct {
pub const udpSocket = toJSCallback(host_fn.wrapStaticMethod(api.UDPSocket, "udpSocket", false));
pub const which = toJSCallback(Bun.which);
pub const write = toJSCallback(JSC.WebCore.Blob.writeFile);
pub const zstdCompressSync = toJSCallback(JSZstd.compressSync);
pub const zstdDecompressSync = toJSCallback(JSZstd.decompressSync);
pub const zstdCompress = toJSCallback(JSZstd.compress);
pub const zstdDecompress = toJSCallback(JSZstd.decompress);
// --- Callbacks ---
@@ -168,7 +172,10 @@ pub const BunObject = struct {
@export(&BunObject.udpSocket, .{ .name = callbackName("udpSocket") });
@export(&BunObject.which, .{ .name = callbackName("which") });
@export(&BunObject.write, .{ .name = callbackName("write") });
@export(&BunObject.zstdCompressSync, .{ .name = callbackName("zstdCompressSync") });
@export(&BunObject.zstdDecompressSync, .{ .name = callbackName("zstdDecompressSync") });
@export(&BunObject.zstdCompress, .{ .name = callbackName("zstdCompress") });
@export(&BunObject.zstdDecompress, .{ .name = callbackName("zstdDecompress") });
// -- Callbacks --
}
};
@@ -1716,6 +1723,287 @@ pub const JSZlib = struct {
}
};
pub const JSZstd = struct {
export fn deallocator(_: ?*anyopaque, ctx: ?*anyopaque) void {
comptime assert(bun.use_mimalloc);
bun.Mimalloc.mi_free(ctx);
}
inline fn getOptions(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!struct { JSC.Node.StringOrBuffer, ?JSValue } {
const arguments = callframe.arguments();
const buffer_value = if (arguments.len > 0) arguments[0] else .undefined;
const options_val: ?JSValue =
if (arguments.len > 1 and arguments[1].isObject())
arguments[1]
else if (arguments.len > 1 and !arguments[1].isUndefined()) {
return globalThis.throwInvalidArguments("Expected options to be an object", .{});
} else null;
if (try JSC.Node.StringOrBuffer.fromJS(globalThis, bun.default_allocator, buffer_value)) |buffer| {
return .{ buffer, options_val };
}
return globalThis.throwInvalidArguments("Expected buffer to be a string or buffer", .{});
}
fn getLevel(globalThis: *JSGlobalObject, options_val: ?JSValue) bun.JSError!i32 {
if (options_val) |option_obj| {
if (try option_obj.get(globalThis, "level")) |level_val| {
const value = level_val.coerce(i32, globalThis);
if (globalThis.hasException()) return error.JSError;
if (value < 1 or value > 22) {
return globalThis.throwInvalidArguments("Compression level must be between 1 and 22", .{});
}
return value;
}
}
return 3;
}
inline fn getOptionsAsync(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!struct { JSC.Node.StringOrBuffer, ?JSValue, i32 } {
const arguments = callframe.arguments();
const buffer_value = if (arguments.len > 0) arguments[0] else .undefined;
const options_val: ?JSValue =
if (arguments.len > 1 and arguments[1].isObject())
arguments[1]
else if (arguments.len > 1 and !arguments[1].isUndefined()) {
return globalThis.throwInvalidArguments("Expected options to be an object", .{});
} else null;
const level = try getLevel(globalThis, options_val);
const allow_string_object = true;
if (try JSC.Node.StringOrBuffer.fromJSMaybeAsync(globalThis, bun.default_allocator, buffer_value, true, allow_string_object)) |buffer| {
return .{ buffer, options_val, level };
}
return globalThis.throwInvalidArguments("Expected buffer to be a string or buffer", .{});
}
pub fn compressSync(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue {
const buffer, const options_val = try getOptions(globalThis, callframe);
defer buffer.deinit();
const level = try getLevel(globalThis, options_val);
const input = buffer.slice();
const allocator = bun.default_allocator;
// Calculate max compressed size
const max_size = bun.zstd.compressBound(input.len);
var output = try allocator.alloc(u8, max_size);
// Perform compression with context
const compressed_size = switch (bun.zstd.compress(output, input, level)) {
.success => |size| size,
.err => |err| {
allocator.free(output);
return globalThis.ERR(.ZSTD, "{s}", .{err}).throw();
},
};
// Resize to actual compressed size
if (compressed_size < output.len) {
output = try allocator.realloc(output, compressed_size);
}
return JSC.JSValue.createBuffer(globalThis, output, bun.default_allocator);
}
pub fn decompressSync(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue {
const buffer, _ = try getOptions(globalThis, callframe);
defer buffer.deinit();
const input = buffer.slice();
const allocator = bun.default_allocator;
// Try to get the decompressed size
const decompressed_size = bun.zstd.getDecompressedSize(input);
if (decompressed_size == std.math.maxInt(c_ulonglong) - 1 or decompressed_size == std.math.maxInt(c_ulonglong) - 2) {
// If size is unknown, we'll need to decompress in chunks
return globalThis.ERR(.ZSTD, "Decompressed size is unknown. Either the input is not a valid zstd compressed buffer or the decompressed size is too large. If you run into this error with a valid input, please file an issue at https://github.com/oven-sh/bun/issues", .{}).throw();
}
// Allocate output buffer based on decompressed size
var output = try allocator.alloc(u8, decompressed_size);
// Perform decompression
const actual_size = switch (bun.zstd.decompress(output, input)) {
.success => |actual_size| actual_size,
.err => |err| {
allocator.free(output);
return globalThis.ERR(.ZSTD, "{s}", .{err}).throw();
},
};
bun.debugAssert(actual_size <= output.len);
// mimalloc doesn't care about the self-reported size of the slice.
output.len = actual_size;
return JSC.JSValue.createBuffer(globalThis, output, bun.default_allocator);
}
// --- Async versions ---
pub const ZstdJob = struct {
buffer: JSC.Node.StringOrBuffer = JSC.Node.StringOrBuffer.empty,
is_compress: bool = true,
level: i32 = 3,
task: JSC.WorkPoolTask = .{ .callback = &runTask },
promise: JSC.JSPromise.Strong = .{},
vm: *JSC.VirtualMachine,
output: []u8 = &[_]u8{},
error_message: ?[]const u8 = null,
any_task: JSC.AnyTask = undefined,
poll: Async.KeepAlive = .{},
pub const new = bun.TrivialNew(@This());
pub fn runTask(task: *JSC.WorkPoolTask) void {
const job: *ZstdJob = @fieldParentPtr("task", task);
defer job.vm.enqueueTaskConcurrent(JSC.ConcurrentTask.create(job.any_task.task()));
const input = job.buffer.slice();
const allocator = bun.default_allocator;
if (job.is_compress) {
// Compression path
// Calculate max compressed size
const max_size = bun.zstd.compressBound(input.len);
job.output = allocator.alloc(u8, max_size) catch {
job.error_message = "Out of memory";
return;
};
// Perform compression
job.output = switch (bun.zstd.compress(job.output, input, job.level)) {
.success => |size| blk: {
// Resize to actual compressed size
if (size < job.output.len) {
break :blk allocator.realloc(job.output, size) catch {
job.error_message = "Out of memory";
return;
};
}
break :blk job.output;
},
.err => |err| {
allocator.free(job.output);
job.output = &[_]u8{};
job.error_message = err;
return;
},
};
} else {
// Decompression path
// Try to get the decompressed size
const decompressed_size = bun.zstd.getDecompressedSize(input);
if (decompressed_size == std.math.maxInt(c_ulonglong) - 1 or decompressed_size == std.math.maxInt(c_ulonglong) - 2) {
job.error_message = "Decompressed size is unknown. Either the input is not a valid zstd compressed buffer or the decompressed size is too large";
return;
}
// Allocate output buffer based on decompressed size
job.output = allocator.alloc(u8, decompressed_size) catch {
job.error_message = "Out of memory";
return;
};
// Perform decompression
switch (bun.zstd.decompress(job.output, input)) {
.success => |actual_size| {
if (actual_size < job.output.len) {
job.output.len = actual_size;
}
},
.err => |err| {
allocator.free(job.output);
job.output = &[_]u8{};
job.error_message = err;
return;
},
}
}
}
pub fn runFromJS(this: *ZstdJob) void {
defer this.deinit();
if (this.vm.isShuttingDown()) {
return;
}
const globalThis = this.vm.global;
const promise = this.promise.swap();
if (this.error_message) |err_msg| {
promise.reject(globalThis, globalThis.ERR(.ZSTD, "{s}", .{err_msg}).toJS());
return;
}
const output_slice = this.output;
const buffer_value = JSC.JSValue.createBuffer(globalThis, output_slice, bun.default_allocator);
if (globalThis.hasException()) {
promise.reject(globalThis, error.JSError);
return;
}
if (buffer_value == .zero) {
promise.reject(globalThis, ZigString.init("Failed to create buffer").toErrorInstance(globalThis));
return;
}
this.output = &[_]u8{};
promise.resolve(globalThis, buffer_value);
}
pub fn deinit(this: *ZstdJob) void {
this.poll.unref(this.vm);
this.buffer.deinitAndUnprotect();
this.promise.deinit();
bun.default_allocator.free(this.output);
bun.destroy(this);
}
pub fn create(vm: *JSC.VirtualMachine, globalThis: *JSC.JSGlobalObject, buffer: JSC.Node.StringOrBuffer, is_compress: bool, level: i32) *ZstdJob {
var job = ZstdJob.new(.{
.buffer = buffer,
.is_compress = is_compress,
.level = level,
.vm = vm,
.any_task = undefined,
});
job.promise = JSC.JSPromise.Strong.init(globalThis);
job.any_task = JSC.AnyTask.New(@This(), &runFromJS).init(job);
job.poll.ref(vm);
JSC.WorkPool.schedule(&job.task);
return job;
}
};
pub fn compress(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue {
const buffer, _, const level = try getOptionsAsync(globalThis, callframe);
const vm = globalThis.bunVM();
var job = ZstdJob.create(vm, globalThis, buffer, true, level);
return job.promise.value();
}
pub fn decompress(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue {
const buffer, _, _ = try getOptionsAsync(globalThis, callframe);
const vm = globalThis.bunVM();
var job = ZstdJob.create(vm, globalThis, buffer, false, 0); // level is ignored for decompression
return job.promise.value();
}
};
// const InternalTestingAPIs = struct {
// pub fn BunInternalFunction__syntaxHighlighter(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue {
// const args = callframe.arguments_old(1);
@@ -1753,84 +2041,39 @@ comptime {
const assert = bun.assert;
const conv = std.builtin.CallingConvention.Unspecified;
const S3File = @import("../webcore/S3File.zig");
const Bun = @This();
const default_allocator = bun.default_allocator;
const bun = @import("bun");
const uv = bun.windows.libuv;
const Environment = bun.Environment;
const Global = bun.Global;
const strings = bun.strings;
const string = bun.string;
const Output = bun.Output;
const MutableString = bun.MutableString;
const std = @import("std");
const Allocator = std.mem.Allocator;
const IdentityContext = @import("../../identity_context.zig").IdentityContext;
const Fs = @import("../../fs.zig");
const Resolver = @import("../../resolver/resolver.zig");
const ast = @import("../../import_record.zig");
const MacroEntryPoint = bun.transpiler.MacroEntryPoint;
const logger = bun.logger;
const Api = @import("../../api/schema.zig").Api;
const options = @import("../../options.zig");
const js_printer = bun.js_printer;
const js_parser = bun.js_parser;
const js_ast = bun.JSAst;
const NodeFallbackModules = @import("../../node_fallbacks.zig");
const ImportKind = ast.ImportKind;
const Analytics = @import("../../analytics/analytics_thread.zig");
const ZigString = bun.JSC.ZigString;
const Runtime = @import("../../runtime.zig");
const Router = @import("./filesystem_router.zig");
const ImportRecord = ast.ImportRecord;
const DotEnv = @import("../../env_loader.zig");
const ParseResult = bun.transpiler.ParseResult;
const PackageJSON = @import("../../resolver/package_json.zig").PackageJSON;
const MacroRemap = @import("../../resolver/package_json.zig").MacroMap;
const WebCore = bun.JSC.WebCore;
const Request = WebCore.Request;
const Response = WebCore.Response;
const Headers = WebCore.Headers;
const Fetch = WebCore.Fetch;
const JSC = bun.JSC;
const JSValue = bun.JSC.JSValue;
const JSGlobalObject = bun.JSC.JSGlobalObject;
const JSPrivateDataPtr = bun.JSC.JSPrivateDataPtr;
const ConsoleObject = bun.JSC.ConsoleObject;
const api = bun.api;
const node = bun.api.node;
const host_fn = bun.jsc.host_fn;
const ZigException = bun.JSC.ZigException;
const ZigStackTrace = bun.JSC.ZigStackTrace;
const ErrorableResolvedSource = bun.JSC.ErrorableResolvedSource;
const ResolvedSource = bun.JSC.ResolvedSource;
const JSPromise = bun.JSC.JSPromise;
const JSInternalPromise = bun.JSC.JSInternalPromise;
const JSModuleLoader = bun.JSC.JSModuleLoader;
const JSPromiseRejectionOperation = bun.JSC.JSPromiseRejectionOperation;
const ErrorableZigString = bun.JSC.ErrorableZigString;
const VM = bun.JSC.VM;
const JSFunction = bun.JSC.JSFunction;
const Config = @import("../config.zig");
const URL = @import("../../url.zig").URL;
const Transpiler = bun.JSC.API.JSTranspiler;
const JSBundler = bun.JSC.API.JSBundler;
const VirtualMachine = JSC.VirtualMachine;
const IOTask = JSC.IOTask;
const zlib = @import("../../zlib.zig");
const Which = @import("../../which.zig");
const ErrorableString = JSC.ErrorableString;
const glob = @import("../../glob.zig");
const Async = bun.Async;
const SemverObject = bun.Semver.SemverObject;
const Braces = @import("../../shell/braces.zig");
const Shell = @import("../../shell/shell.zig");
const Debugger = JSC.Debugger;
const HashObject = bun.api.HashObject;
const UnsafeObject = bun.api.UnsafeObject;
const TOMLObject = bun.api.TOMLObject;
const Timer = bun.api.Timer;
const FFIObject = bun.api.FFIObject;

View File

@@ -138,7 +138,6 @@ const HashObject = @This();
const JSC = bun.JSC;
const JSValue = JSC.JSValue;
const JSGlobalObject = JSC.JSGlobalObject;
const JSObject = JSC.JSObject;
const std = @import("std");
const bun = @import("bun");
const ZigString = JSC.ZigString;

View File

@@ -1,7 +1,5 @@
const std = @import("std");
const Api = @import("../../api/schema.zig").Api;
const QueryStringMap = @import("../../url.zig").QueryStringMap;
const CombinedScanner = @import("../../url.zig").CombinedScanner;
const bun = @import("bun");
const string = bun.string;
const JSC = bun.JSC;
@@ -9,33 +7,17 @@ const WebCore = bun.webcore;
const Transpiler = bun.transpiler;
const options = @import("../../options.zig");
const resolve_path = @import("../../resolver/resolve_path.zig");
const ScriptSrcStream = std.io.FixedBufferStream([]u8);
const ZigString = JSC.ZigString;
const Fs = @import("../../fs.zig");
const JSObject = JSC.JSObject;
const JSValue = bun.JSC.JSValue;
const JSGlobalObject = JSC.JSGlobalObject;
const strings = bun.strings;
const JSError = bun.JSError;
const OOM = bun.OOM;
const Request = WebCore.Request;
const String = bun.String;
const FetchEvent = WebCore.FetchEvent;
const MacroMap = @import("../../resolver/package_json.zig").MacroMap;
const TSConfigJSON = @import("../../resolver/tsconfig_json.zig").TSConfigJSON;
const PackageJSON = @import("../../resolver/package_json.zig").PackageJSON;
const logger = bun.logger;
const Loader = options.Loader;
const Target = options.Target;
const JSAst = bun.JSAst;
const JSParser = bun.js_parser;
const JSPrinter = bun.js_printer;
const ScanPassResult = JSParser.ScanPassResult;
const Mimalloc = @import("../../allocators/mimalloc_arena.zig");
const Runtime = @import("../../runtime.zig").Runtime;
const JSLexer = bun.js_lexer;
const Expr = JSAst.Expr;
const Index = @import("../../ast/base.zig").Index;
const debug = bun.Output.scoped(.Transpiler, false);

View File

@@ -1,21 +1,15 @@
const std = @import("std");
const Api = @import("../../api/schema.zig").Api;
const QueryStringMap = @import("../../url.zig").QueryStringMap;
const CombinedScanner = @import("../../url.zig").CombinedScanner;
const bun = @import("bun");
const string = bun.string;
const JSC = bun.JSC;
const Transpiler = bun.transpiler;
const options = @import("../../options.zig");
const ScriptSrcStream = std.io.FixedBufferStream([]u8);
const ZigString = JSC.ZigString;
const Fs = @import("../../fs.zig");
const JSObject = JSC.JSObject;
const JSValue = bun.JSC.JSValue;
const JSGlobalObject = JSC.JSGlobalObject;
const strings = bun.strings;
const Request = bun.webcore.Request;
const FetchEvent = bun.webcore.FetchEvent;
const MacroMap = @import("../../resolver/package_json.zig").MacroMap;
const TSConfigJSON = @import("../../resolver/tsconfig_json.zig").TSConfigJSON;
const PackageJSON = @import("../../resolver/package_json.zig").PackageJSON;

View File

@@ -56,12 +56,9 @@ pub fn parse(
return out.toJSByParseJSON(globalThis);
}
const TOMLObject = @This();
const JSC = bun.JSC;
const JSValue = JSC.JSValue;
const JSGlobalObject = JSC.JSGlobalObject;
const JSObject = JSC.JSObject;
const std = @import("std");
const ZigString = JSC.ZigString;
const logger = bun.logger;
const bun = @import("bun");

View File

@@ -70,7 +70,6 @@ fn dump_mimalloc(globalObject: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSErr
const JSC = bun.JSC;
const JSValue = JSC.JSValue;
const JSGlobalObject = JSC.JSGlobalObject;
const JSObject = JSC.JSObject;
const std = @import("std");
const bun = @import("bun");
const ZigString = JSC.ZigString;

View File

@@ -1,4 +1,3 @@
const Bun = @This();
const default_allocator = bun.default_allocator;
const bun = @import("bun");
const Environment = bun.Environment;
@@ -7,7 +6,6 @@ const Global = bun.Global;
const strings = bun.strings;
const string = bun.string;
const Output = bun.Output;
const MutableString = bun.MutableString;
const std = @import("std");
const Allocator = std.mem.Allocator;
const JSC = bun.JSC;
@@ -1629,7 +1627,7 @@ pub const InternalDNS = struct {
getaddrinfo_calls += 1;
var timestamp_to_store: u32 = 0;
// is there a cache hit?
if (!bun.getRuntimeFeatureFlag("BUN_FEATURE_FLAG_DISABLE_DNS_CACHE")) {
if (!bun.getRuntimeFeatureFlag(.BUN_FEATURE_FLAG_DISABLE_DNS_CACHE)) {
if (global_cache.get(key, &timestamp_to_store)) |entry| {
if (preload) {
global_cache.lock.unlock();
@@ -1668,7 +1666,7 @@ pub const InternalDNS = struct {
global_cache.lock.unlock();
if (comptime Environment.isMac) {
if (!bun.getRuntimeFeatureFlag("BUN_FEATURE_FLAG_DISABLE_DNS_CACHE_LIBINFO")) {
if (!bun.getRuntimeFeatureFlag(.BUN_FEATURE_FLAG_DISABLE_DNS_CACHE_LIBINFO)) {
const res = lookupLibinfo(req, loop.internal_loop_data.getParent());
log("getaddrinfo({s}) = cache miss (libinfo)", .{host orelse ""});
if (res) return req;

View File

@@ -36,7 +36,6 @@ const MAX_WINDOW_SIZE = std.math.maxInt(i32);
const MAX_HEADER_TABLE_SIZE = std.math.maxInt(u32);
const MAX_STREAM_ID = std.math.maxInt(i32);
const WINDOW_INCREMENT_SIZE = std.math.maxInt(u16);
const MAX_HPACK_HEADER_SIZE = std.math.maxInt(u16);
const MAX_FRAME_SIZE = std.math.maxInt(u24);
const PaddingStrategy = enum {
@@ -658,7 +657,7 @@ pub const H2FrameParser = struct {
const RefCount = bun.ptr.RefCount(@This(), "ref_count", deinit, .{});
pub const ref = RefCount.ref;
pub const deref = RefCount.deref;
const ENABLE_AUTO_CORK = true; // ENABLE CORK OPTIMIZATION
const ENABLE_AUTO_CORK = false; // ENABLE CORK OPTIMIZATION
const ENABLE_ALLOCATOR_POOL = true; // ENABLE HIVE ALLOCATOR OPTIMIZATION
const MAX_BUFFER_SIZE = 32768;
@@ -1678,6 +1677,7 @@ pub const H2FrameParser = struct {
JSC.markBinding(@src());
log("write {}", .{bytes.len});
if (comptime ENABLE_AUTO_CORK) {
// TODO: make this use AutoFlusher
this.cork();
const available = CORK_BUFFER[CORK_OFFSET..];
if (bytes.len > available.len) {

View File

@@ -2,17 +2,13 @@ const default_allocator = bun.default_allocator;
const bun = @import("bun");
const Environment = bun.Environment;
const Global = bun.Global;
const strings = bun.strings;
const string = bun.string;
const Output = bun.Output;
const MutableString = bun.MutableString;
const std = @import("std");
const Allocator = std.mem.Allocator;
const JSC = bun.JSC;
const JSValue = JSC.JSValue;
const JSGlobalObject = JSC.JSGlobalObject;
const Which = @import("../../../which.zig");
const uws = bun.uws;
const ZigString = JSC.ZigString;
const BoringSSL = bun.BoringSSL.c;
@@ -1066,6 +1062,10 @@ pub const Listener = struct {
}
pub fn connect(globalObject: *JSC.JSGlobalObject, opts: JSValue) bun.JSError!JSValue {
return connectInner(globalObject, null, null, opts);
}
pub fn connectInner(globalObject: *JSC.JSGlobalObject, prev_maybe_tcp: ?*TCPSocket, prev_maybe_tls: ?*TLSSocket, opts: JSValue) bun.JSError!JSValue {
if (opts.isEmptyOrUndefinedOrNull() or opts.isBoolean() or !opts.isObject()) {
return globalObject.throwInvalidArguments("Expected options object", .{});
}
@@ -1133,14 +1133,23 @@ pub const Listener = struct {
var handlers_ptr = handlers.vm.allocator.create(Handlers) catch bun.outOfMemory();
handlers_ptr.* = handlers;
handlers_ptr.is_server = false;
var promise = JSC.JSPromise.create(globalObject);
const promise_value = promise.toJS();
handlers_ptr.promise.set(globalObject, promise_value);
if (ssl_enabled) {
var tls = TLSSocket.new(.{
var tls = if (prev_maybe_tls) |prev| blk: {
bun.destroy(prev.handlers);
bun.assert(prev.this_value != .zero);
prev.handlers = handlers_ptr;
bun.assert(prev.socket.socket == .detached);
prev.connection = connection;
prev.protos = if (protos) |p| (bun.default_allocator.dupe(u8, p) catch bun.outOfMemory()) else null;
prev.server_name = server_name;
prev.socket_context = null;
break :blk prev;
} else TLSSocket.new(.{
.ref_count = .init(),
.handlers = handlers_ptr,
.this_value = .zero,
@@ -1166,7 +1175,16 @@ pub const Listener = struct {
tls.socket = TLSSocket.Socket.fromNamedPipe(named_pipe);
}
} else {
var tcp = TCPSocket.new(.{
var tcp = if (prev_maybe_tcp) |prev| blk: {
bun.assert(prev.this_value != .zero);
prev.handlers = handlers_ptr;
bun.assert(prev.socket.socket == .detached);
bun.assert(prev.connection == null);
bun.assert(prev.protos == null);
bun.assert(prev.server_name == null);
prev.socket_context = null;
break :blk prev;
} else TCPSocket.new(.{
.ref_count = .init(),
.handlers = handlers_ptr,
.this_value = .zero,
@@ -1242,7 +1260,18 @@ pub const Listener = struct {
switch (ssl_enabled) {
inline else => |is_ssl_enabled| {
const SocketType = NewSocket(is_ssl_enabled);
const socket = bun.new(SocketType, .{
const maybe_previous: ?*SocketType = if (is_ssl_enabled) prev_maybe_tls else prev_maybe_tcp;
const socket = if (maybe_previous) |prev| blk: {
bun.assert(prev.this_value != .zero);
prev.handlers = handlers_ptr;
bun.assert(prev.socket.socket == .detached);
prev.connection = connection;
prev.protos = if (protos) |p| (bun.default_allocator.dupe(u8, p) catch bun.outOfMemory()) else null;
prev.server_name = server_name;
prev.socket_context = socket_context;
break :blk prev;
} else bun.new(SocketType, .{
.ref_count = .init(),
.handlers = handlers_ptr,
.this_value = .zero,
@@ -1252,7 +1281,7 @@ pub const Listener = struct {
.server_name = server_name,
.socket_context = socket_context, // owns the socket context
});
socket.ref();
SocketType.js.dataSetCached(socket.getThisValue(globalObject), globalObject, default_data);
socket.flags.allow_half_open = socket_config.allowHalfOpen;
socket.doConnect(connection) catch {
@@ -1260,7 +1289,9 @@ pub const Listener = struct {
return promise_value;
};
socket.poll_ref.ref(handlers.vm);
// if this is from node:net there's surface where the user can .ref() and .deref() before the connection starts. make sure we honor that here.
// in the Bun.connect path, this will always be true at this point in time.
if (socket.ref_pollref_on_connect) socket.poll_ref.ref(handlers.vm);
return promise_value;
},
@@ -1346,9 +1377,11 @@ fn NewSocket(comptime ssl: bool) type {
flags: Flags = .{},
ref_count: RefCount,
wrapped: WrappedType = .none,
// TODO: make this optional
handlers: *Handlers,
this_value: JSC.JSValue = .zero,
poll_ref: Async.KeepAlive = Async.KeepAlive.init(),
ref_pollref_on_connect: bool = true,
connection: ?Listener.UnixOrHost = null,
protos: ?[]const u8,
server_name: ?[]const u8 = null,
@@ -1445,9 +1478,7 @@ fn NewSocket(comptime ssl: bool) type {
pub fn doConnect(this: *This, connection: Listener.UnixOrHost) !void {
bun.assert(this.socket_context != null);
this.ref();
errdefer {
this.deref();
}
errdefer this.deref();
switch (connection) {
.host => |c| {
@@ -1480,6 +1511,7 @@ fn NewSocket(comptime ssl: bool) type {
pub fn resumeFromJS(this: *This, _: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue {
JSC.markBinding(@src());
if (this.socket.isDetached()) return .undefined;
log("resume", .{});
// we should not allow pausing/resuming a wrapped socket because a wrapped socket is 2 sockets and this can cause issues
@@ -1491,6 +1523,7 @@ fn NewSocket(comptime ssl: bool) type {
pub fn pauseFromJS(this: *This, _: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue {
JSC.markBinding(@src());
if (this.socket.isDetached()) return .undefined;
log("pause", .{});
// we should not allow pausing/resuming a wrapped socket because a wrapped socket is 2 sockets and this can cause issues
@@ -1612,7 +1645,7 @@ fn NewSocket(comptime ssl: bool) type {
}
fn handleConnectError(this: *This, errno: c_int) void {
log("onConnectError {s} ({d}, {})", .{ if (this.handlers.is_server) "S" else "C", errno, this.ref_count });
log("onConnectError {s} ({d}, {d})", .{ if (this.handlers.is_server) "S" else "C", errno, this.ref_count.active_counts });
// Ensure the socket is still alive for any defer's we have
this.ref();
defer this.deref();
@@ -1630,17 +1663,19 @@ fn NewSocket(comptime ssl: bool) type {
return;
}
bun.assert(errno >= 0);
var errno_: c_int = if (errno == @intFromEnum(bun.sys.SystemErrno.ENOENT)) @intFromEnum(bun.sys.SystemErrno.ENOENT) else @intFromEnum(bun.sys.SystemErrno.ECONNREFUSED);
const code_ = if (errno == @intFromEnum(bun.sys.SystemErrno.ENOENT)) bun.String.static("ENOENT") else bun.String.static("ECONNREFUSED");
if (Environment.isWindows and errno_ == @intFromEnum(bun.sys.SystemErrno.ENOENT)) errno_ = @intFromEnum(bun.sys.SystemErrno.UV_ENOENT);
if (Environment.isWindows and errno_ == @intFromEnum(bun.sys.SystemErrno.ECONNREFUSED)) errno_ = @intFromEnum(bun.sys.SystemErrno.UV_ECONNREFUSED);
const callback = handlers.onConnectError;
const globalObject = handlers.globalObject;
const err = JSC.SystemError{
.errno = errno,
.errno = -errno_,
.message = bun.String.static("Failed to connect"),
.syscall = bun.String.static("connect"),
// For some reason errno is 0 which causes this to be success.
// Unix socket emits ENOENT
.code = if (errno == @intFromEnum(bun.sys.SystemErrno.ENOENT)) bun.String.static("ENOENT") else bun.String.static("ECONNREFUSED"),
// .code = bun.String.static(@tagName(bun.sys.getErrno(errno))),
// .code = bun.String.static(@tagName(@as(bun.sys.E, @enumFromInt(errno)))),
.code = code_,
};
vm.eventLoop().enter();
defer {
@@ -1728,11 +1763,11 @@ fn NewSocket(comptime ssl: bool) type {
}
pub fn onOpen(this: *This, socket: Socket) void {
log("onOpen {s} {*} {} {}", .{ if (this.handlers.is_server) "S" else "C", this, this.socket.isDetached(), this.ref_count.active_counts });
// Ensure the socket remains alive until this is finished
this.ref();
defer this.deref();
log("onOpen {s} {} {}", .{ if (this.handlers.is_server) "S" else "C", this.socket.isDetached(), this.ref_count });
// update the internal socket instance to the one that was just connected
// This socket must be replaced because the previous one is a connecting socket not a uSockets socket
this.socket = socket;
@@ -1798,9 +1833,7 @@ fn NewSocket(comptime ssl: bool) type {
const vm = handlers.vm;
vm.eventLoop().enter();
defer vm.eventLoop().exit();
const result = callback.call(globalObject, this_value, &[_]JSValue{
this_value,
}) catch |err| globalObject.takeException(err);
const result = callback.call(globalObject, this_value, &[_]JSValue{this_value}) catch |err| globalObject.takeException(err);
if (result.toError()) |err| {
defer this.markInactive();
@@ -1811,7 +1844,7 @@ fn NewSocket(comptime ssl: bool) type {
}
if (handlers.rejectPromise(err)) return;
_ = handlers.callErrorHandler(this_value, &[_]JSC.JSValue{ this_value, err });
_ = handlers.callErrorHandler(this_value, &.{ this_value, err });
}
}
@@ -2022,8 +2055,6 @@ fn NewSocket(comptime ssl: bool) type {
}
pub fn getReadyState(this: *This, _: *JSC.JSGlobalObject) JSValue {
log("getReadyState()", .{});
if (this.socket.isDetached()) {
return JSValue.jsNumber(@as(i32, -1));
} else if (this.socket.isClosed()) {
@@ -2560,6 +2591,15 @@ fn NewSocket(comptime ssl: bool) type {
return JSValue.jsUndefined();
}
pub fn close(this: *This, globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue {
JSC.markBinding(@src());
_ = callframe;
this.socket.close(.normal);
this.socket.detach();
this.poll_ref.unref(globalObject.bunVM());
return .jsUndefined();
}
pub fn end(this: *This, globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue {
JSC.markBinding(@src());
@@ -2585,14 +2625,18 @@ fn NewSocket(comptime ssl: bool) type {
}
pub fn jsRef(this: *This, globalObject: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue {
log("ref {s}", .{if (this.handlers.is_server) "S" else "C"});
JSC.markBinding(@src());
if (this.socket.isDetached()) this.ref_pollref_on_connect = true;
if (this.socket.isDetached()) return JSValue.jsUndefined();
this.poll_ref.ref(globalObject.bunVM());
return JSValue.jsUndefined();
}
pub fn jsUnref(this: *This, globalObject: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue {
log("unref {s}", .{if (this.handlers.is_server) "S" else "C"});
JSC.markBinding(@src());
if (this.socket.isDetached()) this.ref_pollref_on_connect = false;
this.poll_ref.unref(globalObject.bunVM());
return JSValue.jsUndefined();
}
@@ -3347,7 +3391,7 @@ fn NewSocket(comptime ssl: bool) type {
return .zero;
}
const handlers = try Handlers.fromJS(globalObject, socket_obj, false);
const handlers = try Handlers.fromJS(globalObject, socket_obj, this.handlers.is_server);
if (globalObject.hasException()) {
return .zero;
@@ -3606,6 +3650,8 @@ pub fn NewWrappedHandler(comptime tls: bool) type {
}
}
pub const onFd = null;
pub fn onWritable(this: WrappedSocket, socket: Socket) void {
if (comptime tls) {
TLSSocket.onWritable(this.tls, socket);

View File

@@ -5,7 +5,6 @@
//! TODO: add a inspect method (under `Symbol.for("nodejs.util.inspect.custom")`).
//! Requires updating bindgen.
const SocketAddress = @This();
const validators = @import("./../../../node/util/validators.zig");
pub const js = JSC.Codegen.JSSocketAddress;
pub const toJS = js.toJS;
pub const fromJS = js.fromJS;
@@ -658,15 +657,12 @@ const ares = bun.c_ares;
const net = std.net;
const Environment = bun.Environment;
const string = bun.string;
const Output = bun.Output;
const JSC = bun.JSC;
const ZigString = JSC.ZigString;
const CallFrame = JSC.CallFrame;
const JSValue = JSC.JSValue;
const isDebug = bun.Environment.isDebug;
const allow_assert = bun.Environment.allow_assert;
pub const inet = if (bun.Environment.isWindows)
win: {

View File

@@ -1,8 +1,6 @@
const JSC = bun.JSC;
const bun = @import("bun");
const string = bun.string;
const std = @import("std");
const Output = bun.Output;
const Environment = bun.Environment;
const system = std.posix.system;

View File

@@ -1,16 +1,11 @@
const Allocator = std.mem.Allocator;
const uws = bun.uws;
const std = @import("std");
const default_allocator = bun.default_allocator;
const bun = @import("bun");
const Environment = bun.Environment;
const Async = bun.Async;
const JSC = bun.JSC;
const JSValue = JSC.JSValue;
const JSGlobalObject = JSC.JSGlobalObject;
const posix = std.posix;
const Output = bun.Output;
const os = std.os;
const uv = bun.windows.libuv;
pub const Stdio = union(enum) {

View File

@@ -1,9 +1,9 @@
const bun = @import("bun");
const BoringSSL = bun.BoringSSL.c;
const X509 = @import("./x509.zig");
const JSC = bun.JSC;
const uws = bun.uws;
const log = bun.Output.scoped(.SSLWrapper, true);
/// Mimics the behavior of openssl.c in uSockets, wrapping data that can be received from any where (network, DuplexStream, etc)
pub fn SSLWrapper(comptime T: type) type {
@@ -25,7 +25,9 @@ pub fn SSLWrapper(comptime T: type) type {
return struct {
const This = @This();
const BUFFER_SIZE = 16384;
// 64kb nice buffer size for SSL reads and writes, should be enough for most cases
// in reads we loop until we have no more data to read and in writes we loop until we have no more data to write/backpressure
const BUFFER_SIZE = 65536;
handlers: Handlers,
ssl: ?*BoringSSL.SSL,
@@ -110,6 +112,12 @@ pub fn SSLWrapper(comptime T: type) type {
// start the handshake
this.handleTraffic();
}
pub fn startWithPayload(this: *This, payload: []const u8) void {
this.handlers.onOpen(this.handlers.ctx);
this.receiveData(payload);
// start the handshake
this.handleTraffic();
}
/// Shutdown the read direction of the SSL (fake it just for convenience)
pub fn shutdownRead(this: *This) void {
@@ -180,10 +188,15 @@ pub fn SSLWrapper(comptime T: type) type {
// Return if we have pending data to be read or write
pub fn hasPendingData(this: *const This) bool {
const ssl = this.ssl orelse return false;
return BoringSSL.BIO_ctrl_pending(BoringSSL.SSL_get_wbio(ssl)) > 0 or BoringSSL.BIO_ctrl_pending(BoringSSL.SSL_get_rbio(ssl)) > 0;
}
/// Return if we buffered data inside the BIO read buffer, not necessarily will return data to read
/// this dont reflect SSL_pending()
fn hasPendingRead(this: *const This) bool {
const ssl = this.ssl orelse return false;
return BoringSSL.BIO_ctrl_pending(BoringSSL.SSL_get_rbio(ssl)) > 0;
}
// We sent or received a shutdown (closing or closed)
pub fn isShutdown(this: *const This) bool {
return this.flags.closed_notified or this.flags.received_ssl_shutdown or this.flags.sent_ssl_shutdown;
@@ -382,18 +395,12 @@ pub fn SSLWrapper(comptime T: type) type {
// read data from the input BIO
while (true) {
log("handleReading", .{});
const ssl = this.ssl orelse return false;
const input = BoringSSL.SSL_get_rbio(ssl) orelse return true;
const pending = BoringSSL.BIO_ctrl_pending(input);
if (pending <= 0) {
// no data to write
break;
}
const available = buffer[read..];
const just_read = BoringSSL.SSL_read(ssl, available.ptr, @intCast(available.len));
log("just read {d}", .{just_read});
if (just_read <= 0) {
const err = BoringSSL.SSL_get_error(ssl, just_read);
BoringSSL.ERR_clear_error();
@@ -424,11 +431,13 @@ pub fn SSLWrapper(comptime T: type) type {
// flush the reading
if (read > 0) {
log("triggering data callback (read {d})", .{read});
this.triggerDataCallback(buffer[0..read]);
}
this.triggerCloseCallback();
return false;
} else {
log("wanna read/write just break", .{});
// we wanna read/write just break
break;
}
@@ -438,6 +447,7 @@ pub fn SSLWrapper(comptime T: type) type {
read += @intCast(just_read);
if (read == buffer.len) {
log("triggering data callback (read {d}) and resetting read buffer", .{read});
// we filled the buffer
this.triggerDataCallback(buffer[0..read]);
read = 0;
@@ -445,41 +455,45 @@ pub fn SSLWrapper(comptime T: type) type {
}
// we finished reading
if (read > 0) {
log("triggering data callback (read {d})", .{read});
this.triggerDataCallback(buffer[0..read]);
}
return true;
}
fn handleWriting(this: *This, buffer: *[BUFFER_SIZE]u8) void {
var read: usize = 0;
while (true) {
const ssl = this.ssl orelse return;
const output = BoringSSL.SSL_get_wbio(ssl) orelse return;
// read data from the output BIO
const pending = BoringSSL.BIO_ctrl_pending(output);
if (pending <= 0) {
// no data to write
const available = buffer[read..];
const just_read = BoringSSL.BIO_read(output, available.ptr, @intCast(available.len));
if (just_read > 0) {
read += @intCast(just_read);
if (read == buffer.len) {
this.triggerWannaWriteCallback(buffer[0..read]);
read = 0;
}
} else {
break;
}
// limit the read to the buffer size
const len = @min(pending, buffer.len);
const pending_buffer = buffer[0..len];
const read = BoringSSL.BIO_read(output, pending_buffer.ptr, len);
if (read > 0) {
this.triggerWannaWriteCallback(buffer[0..@intCast(read)]);
}
}
if (read > 0) {
this.triggerWannaWriteCallback(buffer[0..read]);
}
}
fn handleTraffic(this: *This) void {
// always handle the handshake first
if (this.updateHandshakeState()) {
// shared stack buffer for reading and writing
var buffer: [BUFFER_SIZE]u8 = undefined;
// drain the input BIO first
this.handleWriting(&buffer);
// drain the output BIO
if (this.handleReading(&buffer)) {
// drain the output BIO in loop, because read can trigger writing and vice versa
while (this.hasPendingRead() and this.handleReading(&buffer)) {
// read data can trigger writing so we need to handle it
this.handleWriting(&buffer);
}

View File

@@ -2242,7 +2242,7 @@ pub fn spawnMaybeSync(
!jsc_vm.auto_killer.enabled and
!jsc_vm.jsc.hasExecutionTimeLimit() and
!jsc_vm.isInspectorEnabled() and
!bun.getRuntimeFeatureFlag("BUN_FEATURE_FLAG_DISABLE_SPAWNSYNC_FAST_PATH");
!bun.getRuntimeFeatureFlag(.BUN_FEATURE_FLAG_DISABLE_SPAWNSYNC_FAST_PATH);
const spawn_options = bun.spawn.SpawnOptions{
.cwd = cwd,
@@ -2680,12 +2680,10 @@ const default_allocator = bun.default_allocator;
const bun = @import("bun");
const Environment = bun.Environment;
const Global = bun.Global;
const strings = bun.strings;
const string = bun.string;
const Output = bun.Output;
const CowString = bun.ptr.CowString;
const MutableString = bun.MutableString;
const std = @import("std");
const Allocator = std.mem.Allocator;
const JSC = bun.JSC;
@@ -2698,14 +2696,11 @@ const IPC = @import("../../ipc.zig");
const uws = bun.uws;
const windows = bun.windows;
const uv = windows.libuv;
const LifecycleScriptSubprocess = bun.install.LifecycleScriptSubprocess;
const Body = JSC.WebCore.Body;
const IPClog = Output.scoped(.IPC, false);
const PosixSpawn = bun.spawn;
const Rusage = bun.spawn.Rusage;
const Process = bun.spawn.Process;
const WaiterThread = bun.spawn.WaiterThread;
const Stdio = bun.spawn.Stdio;
const StdioResult = if (Environment.isWindows) bun.spawn.WindowsSpawnResult.StdioResult else ?bun.FileDescriptor;

View File

@@ -2,7 +2,6 @@ const std = @import("std");
const uws = @import("../../../deps/uws.zig");
const bun = @import("bun");
const strings = bun.strings;
const default_allocator = bun.default_allocator;
const Output = bun.Output;
const Async = bun.Async;

View File

@@ -1,7 +1,5 @@
const BoringSSL = bun.BoringSSL.c;
const bun = @import("bun");
const ZigString = JSC.ZigString;
const std = @import("std");
const JSC = bun.JSC;
const JSValue = JSC.JSValue;
const JSGlobalObject = JSC.JSGlobalObject;

View File

@@ -21,12 +21,7 @@ comptime {
CryptoHasher.Extern.@"export"();
}
const std = @import("std");
const bun = @import("bun");
const string = bun.string;
const strings = bun.strings;
const MutableString = bun.MutableString;
const stringZ = bun.stringZ;
const JSC = bun.JSC;
const JSValue = JSC.JSValue;
const JSGlobalObject = JSC.JSGlobalObject;

View File

@@ -883,16 +883,12 @@ const std = @import("std");
const bun = @import("bun");
const string = bun.string;
const strings = bun.strings;
const MutableString = bun.MutableString;
const stringZ = bun.stringZ;
const default_allocator = bun.default_allocator;
const JSC = bun.JSC;
const Async = bun.Async;
const ZigString = JSC.ZigString;
const JSValue = JSC.JSValue;
const JSGlobalObject = JSC.JSGlobalObject;
const CallFrame = JSC.CallFrame;
const assert = bun.assert;
const HMAC = Crypto.HMAC;
const EVP = Crypto.EVP;
const BoringSSL = bun.BoringSSL.c;

View File

@@ -205,17 +205,9 @@ pub const pbkdf2 = PBKDF2.pbkdf2;
const std = @import("std");
const bun = @import("bun");
const string = bun.string;
const strings = bun.strings;
const MutableString = bun.MutableString;
const stringZ = bun.stringZ;
const default_allocator = bun.default_allocator;
const JSC = bun.JSC;
const Async = bun.Async;
const ZigString = JSC.ZigString;
const JSValue = JSC.JSValue;
const JSGlobalObject = JSC.JSGlobalObject;
const CallFrame = JSC.CallFrame;
const assert = bun.assert;
const EVP = @This();
const BoringSSL = bun.BoringSSL.c;

View File

@@ -246,9 +246,6 @@ pub fn pbkdf2(
const std = @import("std");
const bun = @import("bun");
const string = bun.string;
const strings = bun.strings;
const MutableString = bun.MutableString;
const stringZ = bun.stringZ;
const default_allocator = bun.default_allocator;
const JSC = bun.JSC;
const Async = bun.Async;

View File

@@ -758,8 +758,6 @@ const std = @import("std");
const bun = @import("bun");
const string = bun.string;
const strings = bun.strings;
const MutableString = bun.MutableString;
const stringZ = bun.stringZ;
const default_allocator = bun.default_allocator;
const JSC = bun.JSC;
const Async = bun.Async;

View File

@@ -1,8 +1,6 @@
const Bun = @This();
const bun = @import("bun");
const Environment = bun.Environment;
const Global = bun.Global;
const strings = bun.strings;
const string = bun.string;
const Output = bun.Output;

Some files were not shown because too many files have changed in this diff Show More