mirror of
https://github.com/oven-sh/bun
synced 2026-02-08 09:58:55 +00:00
Compare commits
126 Commits
patch-1
...
claude/imp
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ed00f377cd | ||
|
|
a0e4bd541d | ||
|
|
02e46e65eb | ||
|
|
ef8148f7f2 | ||
|
|
52c41ed931 | ||
|
|
1f06b43d30 | ||
|
|
93910f34da | ||
|
|
8e27087853 | ||
|
|
3c5565184e | ||
|
|
7d10e57422 | ||
|
|
562b79c57f | ||
|
|
625e537f5d | ||
|
|
d3ff6a5e35 | ||
|
|
3143c9216c | ||
|
|
c0660674fb | ||
|
|
5f1ca176cd | ||
|
|
de6ea7375a | ||
|
|
ed0d932a6d | ||
|
|
95ebe828fa | ||
|
|
b289828de2 | ||
|
|
92ec83a92a | ||
|
|
5e8feca98b | ||
|
|
bcbba97807 | ||
|
|
6c6849cbf5 | ||
|
|
420d51985b | ||
|
|
3077081646 | ||
|
|
0b7aed1d0d | ||
|
|
680e668efd | ||
|
|
d273f7fdde | ||
|
|
d92d2e5770 | ||
|
|
85f89a100e | ||
|
|
5b51d421da | ||
|
|
5fca74a979 | ||
|
|
79ac412323 | ||
|
|
13c6a945e4 | ||
|
|
90c0c72212 | ||
|
|
fc9db832dc | ||
|
|
b22e19baed | ||
|
|
3c232b0fb4 | ||
|
|
166c8ff4f0 | ||
|
|
639d998055 | ||
|
|
ec050e6d6e | ||
|
|
08cee69ff4 | ||
|
|
b81018707d | ||
|
|
f7da0ac6fd | ||
|
|
1c363f0ad0 | ||
|
|
d292dcad26 | ||
|
|
a9c0ec63e8 | ||
|
|
dd08a707e2 | ||
|
|
bf26d725ab | ||
|
|
fcbd57ac48 | ||
|
|
f0295ce0a5 | ||
|
|
67647c3522 | ||
|
|
83060e4b3e | ||
|
|
f0eb0472e6 | ||
|
|
624911180f | ||
|
|
db37c36d31 | ||
|
|
13a3c4de60 | ||
|
|
3c96c8a63d | ||
|
|
46e7a3b3c5 | ||
|
|
6c8635da63 | ||
|
|
2e86f74764 | ||
|
|
3c9433f9af | ||
|
|
4424c5ed08 | ||
|
|
9cab1fbfe0 | ||
|
|
578a47ce4a | ||
|
|
9993e12050 | ||
|
|
02d0586da5 | ||
|
|
46d6e0885b | ||
|
|
8d28289407 | ||
|
|
d8350c2c59 | ||
|
|
e3bd03628a | ||
|
|
f1204ea2fd | ||
|
|
2aa373ab63 | ||
|
|
f14f3b03bb | ||
|
|
ddfc3f7fbc | ||
|
|
a9b383bac5 | ||
|
|
c8cb7713fc | ||
|
|
666180d7fc | ||
|
|
693e7995bb | ||
|
|
79e0aa9bcf | ||
|
|
d99d622472 | ||
|
|
55f8e8add3 | ||
|
|
84f94ca6dd | ||
|
|
86924f36e8 | ||
|
|
4a86d070cf | ||
|
|
6ab3d931c9 | ||
|
|
76545140af | ||
|
|
2caa5dc8f2 | ||
|
|
d7eebef6f8 | ||
|
|
861fdacebc | ||
|
|
9e6ba35ff7 | ||
|
|
613aea1787 | ||
|
|
1fb9be3880 | ||
|
|
f19a1cc3a5 | ||
|
|
eac82e2184 | ||
|
|
dac1ee73c6 | ||
|
|
9aa3c7863d | ||
|
|
b88cecfe66 | ||
|
|
b613790451 | ||
|
|
5fe3e3774c | ||
|
|
c5005a37d7 | ||
|
|
a89e61fcaa | ||
|
|
2b7fc18092 | ||
|
|
e3a1ae09f3 | ||
|
|
25e156c95b | ||
|
|
badcfe8a14 | ||
|
|
8d7ca660ef | ||
|
|
933c6fd260 | ||
|
|
f9a69773ab | ||
|
|
e88d151241 | ||
|
|
debd9cc35d | ||
|
|
e0b6183571 | ||
|
|
8d2953c097 | ||
|
|
057fa31a75 | ||
|
|
9c2590ca07 | ||
|
|
b5a56c183b | ||
|
|
41be6aeb3c | ||
|
|
8025fa4046 | ||
|
|
a37b00e477 | ||
|
|
621066d0c4 | ||
|
|
51a05ae2e3 | ||
|
|
52629145ca | ||
|
|
f4218ed40b | ||
|
|
9c75db45fa | ||
|
|
f6e722b594 |
88
.claude/hooks/post-edit-zig-format.js
Executable file
88
.claude/hooks/post-edit-zig-format.js
Executable file
@@ -0,0 +1,88 @@
|
||||
#!/usr/bin/env bun
|
||||
import { extname } from "path";
|
||||
import { spawnSync } from "child_process";
|
||||
|
||||
const input = await Bun.stdin.json();
|
||||
|
||||
const toolName = input.tool_name;
|
||||
const toolInput = input.tool_input || {};
|
||||
const filePath = toolInput.file_path;
|
||||
|
||||
// Only process Write, Edit, and MultiEdit tools
|
||||
if (!["Write", "Edit", "MultiEdit"].includes(toolName)) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const ext = extname(filePath);
|
||||
|
||||
// Only format known files
|
||||
if (!filePath) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
function formatZigFile() {
|
||||
try {
|
||||
// Format the Zig file
|
||||
const result = spawnSync("vendor/zig/zig.exe", ["fmt", filePath], {
|
||||
cwd: process.env.CLAUDE_PROJECT_DIR || process.cwd(),
|
||||
encoding: "utf-8",
|
||||
});
|
||||
|
||||
if (result.error) {
|
||||
console.error(`Failed to format ${filePath}: ${result.error.message}`);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
if (result.status !== 0) {
|
||||
console.error(`zig fmt failed for ${filePath}:`);
|
||||
if (result.stderr) {
|
||||
console.error(result.stderr);
|
||||
}
|
||||
process.exit(0);
|
||||
}
|
||||
} catch (error) {}
|
||||
}
|
||||
|
||||
function formatTypeScriptFile() {
|
||||
try {
|
||||
// Format the TypeScript file
|
||||
const result = spawnSync(
|
||||
"./node_modules/.bin/prettier",
|
||||
["--plugin=prettier-plugin-organize-imports", "--config", ".prettierrc", "--write", filePath],
|
||||
{
|
||||
cwd: process.env.CLAUDE_PROJECT_DIR || process.cwd(),
|
||||
encoding: "utf-8",
|
||||
},
|
||||
);
|
||||
} catch (error) {}
|
||||
}
|
||||
|
||||
if (ext === ".zig") {
|
||||
formatZigFile();
|
||||
} else if (
|
||||
[
|
||||
".cjs",
|
||||
".css",
|
||||
".html",
|
||||
".js",
|
||||
".json",
|
||||
".jsonc",
|
||||
".jsx",
|
||||
".less",
|
||||
".mjs",
|
||||
".pcss",
|
||||
".postcss",
|
||||
".sass",
|
||||
".scss",
|
||||
".styl",
|
||||
".stylus",
|
||||
".toml",
|
||||
".ts",
|
||||
".tsx",
|
||||
".yaml",
|
||||
].includes(ext)
|
||||
) {
|
||||
formatTypeScriptFile();
|
||||
}
|
||||
|
||||
process.exit(0);
|
||||
175
.claude/hooks/pre-bash-zig-build.js
Executable file
175
.claude/hooks/pre-bash-zig-build.js
Executable file
@@ -0,0 +1,175 @@
|
||||
#!/usr/bin/env bun
|
||||
import { basename, extname } from "path";
|
||||
|
||||
const input = await Bun.stdin.json();
|
||||
|
||||
const toolName = input.tool_name;
|
||||
const toolInput = input.tool_input || {};
|
||||
const command = toolInput.command || "";
|
||||
const timeout = toolInput.timeout;
|
||||
const cwd = input.cwd || "";
|
||||
|
||||
// Get environment variables from the hook context
|
||||
// Note: We check process.env directly as env vars are inherited
|
||||
let useSystemBun = process.env.USE_SYSTEM_BUN;
|
||||
|
||||
if (toolName !== "Bash" || !command) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
function denyWithReason(reason) {
|
||||
const output = {
|
||||
hookSpecificOutput: {
|
||||
hookEventName: "PreToolUse",
|
||||
permissionDecision: "deny",
|
||||
permissionDecisionReason: reason,
|
||||
},
|
||||
};
|
||||
console.log(JSON.stringify(output));
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
// Parse the command to extract argv0 and positional args
|
||||
let tokens;
|
||||
try {
|
||||
// Simple shell parsing - split on spaces but respect quotes (both single and double)
|
||||
tokens = command.match(/(?:[^\s"']+|"[^"]*"|'[^']*')+/g)?.map(t => t.replace(/^['"]|['"]$/g, "")) || [];
|
||||
} catch {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
if (tokens.length === 0) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
// Strip inline environment variable assignments (e.g., FOO=1 bun test)
|
||||
const inlineEnv = new Map();
|
||||
let commandStart = 0;
|
||||
while (
|
||||
commandStart < tokens.length &&
|
||||
/^[A-Za-z_][A-Za-z0-9_]*=/.test(tokens[commandStart]) &&
|
||||
!tokens[commandStart].includes("/")
|
||||
) {
|
||||
const [name, value = ""] = tokens[commandStart].split("=", 2);
|
||||
inlineEnv.set(name, value);
|
||||
commandStart++;
|
||||
}
|
||||
if (commandStart >= tokens.length) {
|
||||
process.exit(0);
|
||||
}
|
||||
tokens = tokens.slice(commandStart);
|
||||
useSystemBun = inlineEnv.get("USE_SYSTEM_BUN") ?? useSystemBun;
|
||||
|
||||
// Get the executable name (argv0)
|
||||
const argv0 = basename(tokens[0], extname(tokens[0]));
|
||||
|
||||
// Check if it's zig or zig.exe
|
||||
if (argv0 === "zig") {
|
||||
// Filter out flags (starting with -) to get positional arguments
|
||||
const positionalArgs = tokens.slice(1).filter(arg => !arg.startsWith("-"));
|
||||
|
||||
// Check if the positional args contain "build" followed by "obj"
|
||||
if (positionalArgs.length >= 2 && positionalArgs[0] === "build" && positionalArgs[1] === "obj") {
|
||||
denyWithReason("error: Use `bun bd` to build Bun and wait patiently");
|
||||
}
|
||||
}
|
||||
|
||||
// Check if argv0 is timeout and the command is "bun bd"
|
||||
if (argv0 === "timeout") {
|
||||
// Find the actual command after timeout and its arguments
|
||||
const timeoutArgEndIndex = tokens.slice(1).findIndex(t => !t.startsWith("-") && !/^\d/.test(t));
|
||||
if (timeoutArgEndIndex === -1) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const actualCommandIndex = timeoutArgEndIndex + 1;
|
||||
if (actualCommandIndex >= tokens.length) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const actualCommand = basename(tokens[actualCommandIndex]);
|
||||
const restArgs = tokens.slice(actualCommandIndex + 1);
|
||||
|
||||
// Check if it's "bun bd" or "bun-debug bd" without other positional args
|
||||
if (actualCommand === "bun" || actualCommand.includes("bun-debug")) {
|
||||
const positionalArgs = restArgs.filter(arg => !arg.startsWith("-"));
|
||||
if (positionalArgs.length === 1 && positionalArgs[0] === "bd") {
|
||||
denyWithReason("error: Run `bun bd` without a timeout");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if command is "bun .* test" or "bun-debug test" with -u/--update-snapshots AND -t/--test-name-pattern
|
||||
if (argv0 === "bun" || argv0.includes("bun-debug")) {
|
||||
const allArgs = tokens.slice(1);
|
||||
|
||||
// Check if "test" is in positional args or "bd" followed by "test"
|
||||
const positionalArgs = allArgs.filter(arg => !arg.startsWith("-"));
|
||||
const hasTest = positionalArgs.includes("test") || (positionalArgs[0] === "bd" && positionalArgs[1] === "test");
|
||||
|
||||
if (hasTest) {
|
||||
const hasUpdateSnapshots = allArgs.some(arg => arg === "-u" || arg === "--update-snapshots");
|
||||
const hasTestNamePattern = allArgs.some(arg => arg === "-t" || arg === "--test-name-pattern");
|
||||
|
||||
if (hasUpdateSnapshots && hasTestNamePattern) {
|
||||
denyWithReason("error: Cannot use -u/--update-snapshots with -t/--test-name-pattern");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if timeout option is set for "bun bd" command
|
||||
if (timeout !== undefined && (argv0 === "bun" || argv0.includes("bun-debug"))) {
|
||||
const positionalArgs = tokens.slice(1).filter(arg => !arg.startsWith("-"));
|
||||
if (positionalArgs.length === 1 && positionalArgs[0] === "bd") {
|
||||
denyWithReason("error: Run `bun bd` without a timeout");
|
||||
}
|
||||
}
|
||||
|
||||
// Check if running "bun test <file>" without USE_SYSTEM_BUN=1
|
||||
if ((argv0 === "bun" || argv0.includes("bun-debug")) && useSystemBun !== "1") {
|
||||
const allArgs = tokens.slice(1);
|
||||
const positionalArgs = allArgs.filter(arg => !arg.startsWith("-"));
|
||||
|
||||
// Check if it's "test" (not "bd test")
|
||||
if (positionalArgs.length >= 1 && positionalArgs[0] === "test" && positionalArgs[0] !== "bd") {
|
||||
denyWithReason(
|
||||
"error: In development, use `bun bd test <file>` to test your changes. If you meant to use a release version, set USE_SYSTEM_BUN=1",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Check if running "bun bd test" from bun repo root or test folder without a file path
|
||||
if (argv0 === "bun" || argv0.includes("bun-debug")) {
|
||||
const allArgs = tokens.slice(1);
|
||||
const positionalArgs = allArgs.filter(arg => !arg.startsWith("-"));
|
||||
|
||||
// Check if it's "bd test"
|
||||
if (positionalArgs.length >= 2 && positionalArgs[0] === "bd" && positionalArgs[1] === "test") {
|
||||
// Check if cwd is the bun repo root or test folder
|
||||
const isBunRepoRoot = cwd === "/workspace/bun" || cwd.endsWith("/bun");
|
||||
const isTestFolder = cwd.endsWith("/bun/test");
|
||||
|
||||
if (isBunRepoRoot || isTestFolder) {
|
||||
// Check if there's a file path argument (looks like a path: contains / or has test extension)
|
||||
const hasFilePath = positionalArgs
|
||||
.slice(2)
|
||||
.some(
|
||||
arg =>
|
||||
arg.includes("/") ||
|
||||
arg.endsWith(".test.ts") ||
|
||||
arg.endsWith(".test.js") ||
|
||||
arg.endsWith(".test.tsx") ||
|
||||
arg.endsWith(".test.jsx"),
|
||||
);
|
||||
|
||||
if (!hasFilePath) {
|
||||
denyWithReason(
|
||||
"error: `bun bd test` from repo root or test folder will run all tests. Use `bun bd test <path>` with a specific test file.",
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Allow the command to proceed
|
||||
process.exit(0);
|
||||
26
.claude/settings.json
Normal file
26
.claude/settings.json
Normal file
@@ -0,0 +1,26 @@
|
||||
{
|
||||
"hooks": {
|
||||
"PreToolUse": [
|
||||
{
|
||||
"matcher": "Bash",
|
||||
"hooks": [
|
||||
{
|
||||
"type": "command",
|
||||
"command": "\"$CLAUDE_PROJECT_DIR\"/.claude/hooks/pre-bash-zig-build.js"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"PostToolUse": [
|
||||
{
|
||||
"matcher": "Write|Edit|MultiEdit",
|
||||
"hooks": [
|
||||
{
|
||||
"type": "command",
|
||||
"command": "\"$CLAUDE_PROJECT_DIR\"/.claude/hooks/post-edit-zig-format.js"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -30,7 +30,7 @@ bun bd <file> <...args>
|
||||
Debug logs look like this:
|
||||
|
||||
```zig
|
||||
const log = bun.Output.scoped(.${SCOPE}, false);
|
||||
const log = bun.Output.scoped(.${SCOPE}, .hidden);
|
||||
|
||||
// ...later
|
||||
log("MY DEBUG LOG", .{})
|
||||
|
||||
19
.github/workflows/update-sqlite3.yml
vendored
19
.github/workflows/update-sqlite3.yml
vendored
@@ -70,24 +70,7 @@ jobs:
|
||||
- name: Update SQLite if needed
|
||||
if: success() && steps.check-version.outputs.current_num < steps.check-version.outputs.latest_num
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
TEMP_DIR=$(mktemp -d)
|
||||
cd $TEMP_DIR
|
||||
|
||||
echo "Downloading from: https://sqlite.org/${{ steps.check-version.outputs.latest_year }}/sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
|
||||
|
||||
# Download and extract latest version
|
||||
wget "https://sqlite.org/${{ steps.check-version.outputs.latest_year }}/sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
|
||||
unzip "sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
|
||||
cd "sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}"
|
||||
|
||||
# Add header comment and copy files
|
||||
echo "// clang-format off" > $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3.c
|
||||
cat sqlite3.c >> $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3.c
|
||||
|
||||
echo "// clang-format off" > $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3_local.h
|
||||
cat sqlite3.h >> $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3_local.h
|
||||
./scripts/update-sqlite-amalgamation.sh ${{ steps.check-version.outputs.latest_num }} ${{ steps.check-version.outputs.latest_year }}
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current_num < steps.check-version.outputs.latest_num
|
||||
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -1,7 +1,9 @@
|
||||
.claude/settings.local.json
|
||||
.DS_Store
|
||||
.env
|
||||
.envrc
|
||||
.eslintcache
|
||||
.gdb_history
|
||||
.idea
|
||||
.next
|
||||
.ninja_deps
|
||||
@@ -189,4 +191,4 @@ scratch*.{js,ts,tsx,cjs,mjs}
|
||||
scripts/lldb-inline
|
||||
|
||||
# We regenerate these in all the build scripts
|
||||
cmake/sources/*.txt
|
||||
cmake/sources/*.txt
|
||||
|
||||
@@ -19,6 +19,12 @@
|
||||
"options": {
|
||||
"printWidth": 80
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["src/codegen/bindgenv2/**/*.ts", "*.bindv2.ts"],
|
||||
"options": {
|
||||
"printWidth": 100
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
69
CLAUDE.md
69
CLAUDE.md
@@ -143,19 +143,6 @@ When implementing JavaScript classes in C++:
|
||||
3. Add iso subspaces for classes with C++ fields
|
||||
4. Cache structures in ZigGlobalObject
|
||||
|
||||
## Development Workflow
|
||||
|
||||
### Code Formatting
|
||||
|
||||
- `bun run prettier` - Format JS/TS files
|
||||
- `bun run zig-format` - Format Zig files
|
||||
- `bun run clang-format` - Format C++ files
|
||||
|
||||
### Watching for Changes
|
||||
|
||||
- `bun run watch` - Incremental Zig compilation with error checking
|
||||
- `bun run watch-windows` - Windows-specific watch mode
|
||||
|
||||
### Code Generation
|
||||
|
||||
Code generation happens automatically as part of the build process. The main scripts are:
|
||||
@@ -177,47 +164,6 @@ Built-in JavaScript modules use special syntax and are organized as:
|
||||
- `internal/` - Internal modules not exposed to users
|
||||
- `builtins/` - Core JavaScript builtins (streams, console, etc.)
|
||||
|
||||
### Special Syntax in Built-in Modules
|
||||
|
||||
1. **`$` prefix** - Access to private properties and JSC intrinsics:
|
||||
|
||||
```js
|
||||
const arr = $Array.from(...); // Private global
|
||||
map.$set(...); // Private method
|
||||
const arr2 = $newArrayWithSize(5); // JSC intrinsic
|
||||
```
|
||||
|
||||
2. **`require()`** - Must use string literals, resolved at compile time:
|
||||
|
||||
```js
|
||||
const fs = require("fs"); // Directly loads by numeric ID
|
||||
```
|
||||
|
||||
3. **Debug helpers**:
|
||||
- `$debug()` - Like console.log but stripped in release builds
|
||||
- `$assert()` - Assertions stripped in release builds
|
||||
- `if($debug) {}` - Check if debug env var is set
|
||||
|
||||
4. **Platform detection**: `process.platform` and `process.arch` are inlined and dead-code eliminated
|
||||
|
||||
5. **Export syntax**: Use `export default` which gets converted to a return statement:
|
||||
```js
|
||||
export default {
|
||||
readFile,
|
||||
writeFile,
|
||||
};
|
||||
```
|
||||
|
||||
Note: These are NOT ES modules. The preprocessor converts `$` to `@` (JSC's actual syntax) and handles the special functions.
|
||||
|
||||
## CI
|
||||
|
||||
Bun uses BuildKite for CI. To get the status of a PR, you can use the following command:
|
||||
|
||||
```bash
|
||||
bun ci
|
||||
```
|
||||
|
||||
## Important Development Notes
|
||||
|
||||
1. **Never use `bun test` or `bun <file>` directly** - always use `bun bd test` or `bun bd <command>`. `bun bd` compiles & runs the debug build.
|
||||
@@ -229,19 +175,6 @@ bun ci
|
||||
7. **Avoid shell commands** - Don't use `find` or `grep` in tests; use Bun's Glob and built-in tools
|
||||
8. **Memory management** - In Zig code, be careful with allocators and use defer for cleanup
|
||||
9. **Cross-platform** - Run `bun run zig:check-all` to compile the Zig code on all platforms when making platform-specific changes
|
||||
10. **Debug builds** - Use `BUN_DEBUG_QUIET_LOGS=1` to disable debug logging, or `BUN_DEBUG_<scope>=1` to enable specific scopes
|
||||
10. **Debug builds** - Use `BUN_DEBUG_QUIET_LOGS=1` to disable debug logging, or `BUN_DEBUG_<scopeName>=1` to enable specific `Output.scoped(.${scopeName}, .visible)`s
|
||||
11. **Be humble & honest** - NEVER overstate what you got done or what actually works in commits, PRs or in messages to the user.
|
||||
12. **Branch names must start with `claude/`** - This is a requirement for the CI to work.
|
||||
|
||||
## Key APIs and Features
|
||||
|
||||
### Bun-Specific APIs
|
||||
|
||||
- **Bun.serve()** - High-performance HTTP server
|
||||
- **Bun.spawn()** - Process spawning with better performance than Node.js
|
||||
- **Bun.file()** - Fast file I/O operations
|
||||
- **Bun.write()** - Unified API for writing to files, stdout, etc.
|
||||
- **Bun.$ (Shell)** - Cross-platform shell scripting
|
||||
- **Bun.SQLite** - Native SQLite integration
|
||||
- **Bun.FFI** - Call native libraries from JavaScript
|
||||
- **Bun.Glob** - Fast file pattern matching
|
||||
|
||||
@@ -149,7 +149,7 @@ Bun generally takes about 2.5 minutes to compile a debug build when there are Zi
|
||||
- Batch up your changes
|
||||
- Ensure zls is running with incremental watching for LSP errors (if you use VSCode and install Zig and run `bun run build` once to download Zig, this should just work)
|
||||
- Prefer using the debugger ("CodeLLDB" in VSCode) to step through the code.
|
||||
- Use debug logs. `BUN_DEBUG_<scope>=1` will enable debug logging for the corresponding `Output.scoped(.<scope>, false)` logs. You can also set `BUN_DEBUG_QUIET_LOGS=1` to disable all debug logging that isn't explicitly enabled. To dump debug lgos into a file, `BUN_DEBUG=<path-to-file>.log`. Debug logs are aggressively removed in release builds.
|
||||
- Use debug logs. `BUN_DEBUG_<scope>=1` will enable debug logging for the corresponding `Output.scoped(.<scope>, .hidden)` logs. You can also set `BUN_DEBUG_QUIET_LOGS=1` to disable all debug logging that isn't explicitly enabled. To dump debug lgos into a file, `BUN_DEBUG=<path-to-file>.log`. Debug logs are aggressively removed in release builds.
|
||||
- src/js/\*\*.ts changes are pretty much instant to rebuild. C++ changes are a bit slower, but still much faster than the Zig code (Zig is one compilation unit, C++ is many).
|
||||
|
||||
## Code generation scripts
|
||||
|
||||
17
build.zig
17
build.zig
@@ -49,6 +49,7 @@ const BunBuildOptions = struct {
|
||||
enable_logs: bool = false,
|
||||
enable_asan: bool,
|
||||
enable_valgrind: bool,
|
||||
use_mimalloc: bool,
|
||||
tracy_callstack_depth: u16,
|
||||
reported_nodejs_version: Version,
|
||||
/// To make iterating on some '@embedFile's faster, we load them at runtime
|
||||
@@ -68,6 +69,7 @@ const BunBuildOptions = struct {
|
||||
|
||||
cached_options_module: ?*Module = null,
|
||||
windows_shim: ?WindowsShim = null,
|
||||
llvm_codegen_threads: ?u32 = null,
|
||||
|
||||
pub fn isBaseline(this: *const BunBuildOptions) bool {
|
||||
return this.arch.isX86() and
|
||||
@@ -96,6 +98,7 @@ const BunBuildOptions = struct {
|
||||
opts.addOption(bool, "enable_logs", this.enable_logs);
|
||||
opts.addOption(bool, "enable_asan", this.enable_asan);
|
||||
opts.addOption(bool, "enable_valgrind", this.enable_valgrind);
|
||||
opts.addOption(bool, "use_mimalloc", this.use_mimalloc);
|
||||
opts.addOption([]const u8, "reported_nodejs_version", b.fmt("{}", .{this.reported_nodejs_version}));
|
||||
opts.addOption(bool, "zig_self_hosted_backend", this.no_llvm);
|
||||
opts.addOption(bool, "override_no_export_cpp_apis", this.override_no_export_cpp_apis);
|
||||
@@ -269,6 +272,8 @@ pub fn build(b: *Build) !void {
|
||||
.enable_logs = b.option(bool, "enable_logs", "Enable logs in release") orelse false,
|
||||
.enable_asan = b.option(bool, "enable_asan", "Enable asan") orelse false,
|
||||
.enable_valgrind = b.option(bool, "enable_valgrind", "Enable valgrind") orelse false,
|
||||
.use_mimalloc = b.option(bool, "use_mimalloc", "Use mimalloc as default allocator") orelse false,
|
||||
.llvm_codegen_threads = b.option(u32, "llvm_codegen_threads", "Number of threads to use for LLVM codegen") orelse 1,
|
||||
};
|
||||
|
||||
// zig build obj
|
||||
@@ -498,6 +503,7 @@ fn addMultiCheck(
|
||||
.no_llvm = root_build_options.no_llvm,
|
||||
.enable_asan = root_build_options.enable_asan,
|
||||
.enable_valgrind = root_build_options.enable_valgrind,
|
||||
.use_mimalloc = root_build_options.use_mimalloc,
|
||||
.override_no_export_cpp_apis = root_build_options.override_no_export_cpp_apis,
|
||||
};
|
||||
|
||||
@@ -603,7 +609,15 @@ fn configureObj(b: *Build, opts: *BunBuildOptions, obj: *Compile) void {
|
||||
|
||||
// Object options
|
||||
obj.use_llvm = !opts.no_llvm;
|
||||
obj.use_lld = if (opts.os == .mac) false else !opts.no_llvm;
|
||||
obj.use_lld = if (opts.os == .mac or opts.os == .linux) false else !opts.no_llvm;
|
||||
|
||||
if (opts.optimize == .Debug) {
|
||||
if (@hasField(std.meta.Child(@TypeOf(obj)), "llvm_codegen_threads"))
|
||||
obj.llvm_codegen_threads = opts.llvm_codegen_threads orelse 0;
|
||||
}
|
||||
|
||||
obj.no_link_obj = true;
|
||||
|
||||
if (opts.enable_asan and !enableFastBuild(b)) {
|
||||
if (@hasField(Build.Module, "sanitize_address")) {
|
||||
obj.root_module.sanitize_address = true;
|
||||
@@ -710,6 +724,7 @@ fn addInternalImports(b: *Build, mod: *Module, opts: *BunBuildOptions) void {
|
||||
// Generated code exposed as individual modules.
|
||||
inline for (.{
|
||||
.{ .file = "ZigGeneratedClasses.zig", .import = "ZigGeneratedClasses" },
|
||||
.{ .file = "bindgen_generated.zig", .import = "bindgen_generated" },
|
||||
.{ .file = "ResolvedSourceTag.zig", .import = "ResolvedSourceTag" },
|
||||
.{ .file = "ErrorCode.zig", .import = "ErrorCode" },
|
||||
.{ .file = "runtime.out.js", .enable = opts.shouldEmbedCode() },
|
||||
|
||||
12
bun.lock
12
bun.lock
@@ -8,14 +8,14 @@
|
||||
"@lezer/cpp": "^1.1.3",
|
||||
"@types/bun": "workspace:*",
|
||||
"bun-tracestrings": "github:oven-sh/bun.report#912ca63e26c51429d3e6799aa2a6ab079b188fd8",
|
||||
"esbuild": "^0.21.4",
|
||||
"mitata": "^0.1.11",
|
||||
"esbuild": "^0.21.5",
|
||||
"mitata": "^0.1.14",
|
||||
"peechy": "0.4.34",
|
||||
"prettier": "^3.5.3",
|
||||
"prettier-plugin-organize-imports": "^4.0.0",
|
||||
"prettier": "^3.6.2",
|
||||
"prettier-plugin-organize-imports": "^4.3.0",
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"source-map-js": "^1.2.0",
|
||||
"source-map-js": "^1.2.1",
|
||||
"typescript": "5.9.2",
|
||||
},
|
||||
},
|
||||
@@ -284,7 +284,7 @@
|
||||
|
||||
"prettier": ["prettier@3.6.2", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ=="],
|
||||
|
||||
"prettier-plugin-organize-imports": ["prettier-plugin-organize-imports@4.2.0", "", { "peerDependencies": { "prettier": ">=2.0", "typescript": ">=2.9", "vue-tsc": "^2.1.0 || 3" }, "optionalPeers": ["vue-tsc"] }, "sha512-Zdy27UhlmyvATZi67BTnLcKTo8fm6Oik59Sz6H64PgZJVs6NJpPD1mT240mmJn62c98/QaL+r3kx9Q3gRpDajg=="],
|
||||
"prettier-plugin-organize-imports": ["prettier-plugin-organize-imports@4.3.0", "", { "peerDependencies": { "prettier": ">=2.0", "typescript": ">=2.9", "vue-tsc": "^2.1.0 || 3" }, "optionalPeers": ["vue-tsc"] }, "sha512-FxFz0qFhyBsGdIsb697f/EkvHzi5SZOhWAjxcx2dLt+Q532bAlhswcXGYB1yzjZ69kW8UoadFBw7TyNwlq96Iw=="],
|
||||
|
||||
"react": ["react@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ=="],
|
||||
|
||||
|
||||
@@ -10,3 +10,4 @@ preload = "./test/preload.ts"
|
||||
|
||||
[install]
|
||||
linker = "isolated"
|
||||
minimumReleaseAge = 1
|
||||
|
||||
@@ -202,4 +202,9 @@ optionx(USE_WEBKIT_ICU BOOL "Use the ICU libraries from WebKit" DEFAULT ${DEFAUL
|
||||
|
||||
optionx(ERROR_LIMIT STRING "Maximum number of errors to show when compiling C++ code" DEFAULT "100")
|
||||
|
||||
# This is not an `option` because setting this variable to OFF is experimental
|
||||
# and unsupported. This replaces the `use_mimalloc` variable previously in
|
||||
# bun.zig, and enables C++ code to also be aware of the option.
|
||||
set(USE_MIMALLOC_AS_DEFAULT_ALLOCATOR ON)
|
||||
|
||||
list(APPEND CMAKE_ARGS -DCMAKE_EXPORT_COMPILE_COMMANDS=ON)
|
||||
|
||||
@@ -31,6 +31,14 @@
|
||||
"output": "BindgenSources.txt",
|
||||
"paths": ["src/**/*.bind.ts"]
|
||||
},
|
||||
{
|
||||
"output": "BindgenV2Sources.txt",
|
||||
"paths": ["src/**/*.bindv2.ts"]
|
||||
},
|
||||
{
|
||||
"output": "BindgenV2InternalSources.txt",
|
||||
"paths": ["src/codegen/bindgenv2/**/*.ts"]
|
||||
},
|
||||
{
|
||||
"output": "ZigSources.txt",
|
||||
"paths": ["src/**/*.zig"]
|
||||
|
||||
@@ -44,6 +44,14 @@ else()
|
||||
set(CONFIGURE_DEPENDS "")
|
||||
endif()
|
||||
|
||||
set(LLVM_ZIG_CODEGEN_THREADS 0)
|
||||
# This makes the build slower, so we turn it off for now.
|
||||
# if (DEBUG)
|
||||
# include(ProcessorCount)
|
||||
# ProcessorCount(CPU_COUNT)
|
||||
# set(LLVM_ZIG_CODEGEN_THREADS ${CPU_COUNT})
|
||||
# endif()
|
||||
|
||||
# --- Dependencies ---
|
||||
|
||||
set(BUN_DEPENDENCIES
|
||||
@@ -387,6 +395,54 @@ register_command(
|
||||
${BUN_BAKE_RUNTIME_OUTPUTS}
|
||||
)
|
||||
|
||||
set(BUN_BINDGENV2_SCRIPT ${CWD}/src/codegen/bindgenv2/script.ts)
|
||||
|
||||
absolute_sources(BUN_BINDGENV2_SOURCES ${CWD}/cmake/sources/BindgenV2Sources.txt)
|
||||
# These sources include the script itself.
|
||||
absolute_sources(BUN_BINDGENV2_INTERNAL_SOURCES
|
||||
${CWD}/cmake/sources/BindgenV2InternalSources.txt)
|
||||
string(REPLACE ";" "," BUN_BINDGENV2_SOURCES_COMMA_SEPARATED
|
||||
"${BUN_BINDGENV2_SOURCES}")
|
||||
|
||||
execute_process(
|
||||
COMMAND ${BUN_EXECUTABLE} run ${BUN_BINDGENV2_SCRIPT}
|
||||
--command=list-outputs
|
||||
--sources=${BUN_BINDGENV2_SOURCES_COMMA_SEPARATED}
|
||||
--codegen-path=${CODEGEN_PATH}
|
||||
RESULT_VARIABLE bindgen_result
|
||||
OUTPUT_VARIABLE bindgen_outputs
|
||||
)
|
||||
if(${bindgen_result})
|
||||
message(FATAL_ERROR "bindgenv2/script.ts exited with non-zero status")
|
||||
endif()
|
||||
foreach(output IN LISTS bindgen_outputs)
|
||||
if(output MATCHES "\.cpp$")
|
||||
list(APPEND BUN_BINDGENV2_CPP_OUTPUTS ${output})
|
||||
elseif(output MATCHES "\.zig$")
|
||||
list(APPEND BUN_BINDGENV2_ZIG_OUTPUTS ${output})
|
||||
else()
|
||||
message(FATAL_ERROR "unexpected bindgen output: [${output}]")
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
bun-bindgen-v2
|
||||
COMMENT
|
||||
"Generating bindings (v2)"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE} run ${BUN_BINDGENV2_SCRIPT}
|
||||
--command=generate
|
||||
--codegen-path=${CODEGEN_PATH}
|
||||
--sources=${BUN_BINDGENV2_SOURCES_COMMA_SEPARATED}
|
||||
SOURCES
|
||||
${BUN_BINDGENV2_SOURCES}
|
||||
${BUN_BINDGENV2_INTERNAL_SOURCES}
|
||||
OUTPUTS
|
||||
${BUN_BINDGENV2_CPP_OUTPUTS}
|
||||
${BUN_BINDGENV2_ZIG_OUTPUTS}
|
||||
)
|
||||
|
||||
set(BUN_BINDGEN_SCRIPT ${CWD}/src/codegen/bindgen.ts)
|
||||
|
||||
absolute_sources(BUN_BINDGEN_SOURCES ${CWD}/cmake/sources/BindgenSources.txt)
|
||||
@@ -565,6 +621,7 @@ set(BUN_ZIG_GENERATED_SOURCES
|
||||
${BUN_ZIG_GENERATED_CLASSES_OUTPUTS}
|
||||
${BUN_JAVASCRIPT_OUTPUTS}
|
||||
${BUN_CPP_OUTPUTS}
|
||||
${BUN_BINDGENV2_ZIG_OUTPUTS}
|
||||
)
|
||||
|
||||
# In debug builds, these are not embedded, but rather referenced at runtime.
|
||||
@@ -578,7 +635,13 @@ if (TEST)
|
||||
set(BUN_ZIG_OUTPUT ${BUILD_PATH}/bun-test.o)
|
||||
set(ZIG_STEPS test)
|
||||
else()
|
||||
set(BUN_ZIG_OUTPUT ${BUILD_PATH}/bun-zig.o)
|
||||
if (LLVM_ZIG_CODEGEN_THREADS GREATER 1)
|
||||
foreach(i RANGE ${LLVM_ZIG_CODEGEN_THREADS})
|
||||
list(APPEND BUN_ZIG_OUTPUT ${BUILD_PATH}/bun-zig.${i}.o)
|
||||
endforeach()
|
||||
else()
|
||||
set(BUN_ZIG_OUTPUT ${BUILD_PATH}/bun-zig.o)
|
||||
endif()
|
||||
set(ZIG_STEPS obj)
|
||||
endif()
|
||||
|
||||
@@ -622,6 +685,8 @@ register_command(
|
||||
-Denable_logs=$<IF:$<BOOL:${ENABLE_LOGS}>,true,false>
|
||||
-Denable_asan=$<IF:$<BOOL:${ENABLE_ZIG_ASAN}>,true,false>
|
||||
-Denable_valgrind=$<IF:$<BOOL:${ENABLE_VALGRIND}>,true,false>
|
||||
-Duse_mimalloc=$<IF:$<BOOL:${USE_MIMALLOC_AS_DEFAULT_ALLOCATOR}>,true,false>
|
||||
-Dllvm_codegen_threads=${LLVM_ZIG_CODEGEN_THREADS}
|
||||
-Dversion=${VERSION}
|
||||
-Dreported_nodejs_version=${NODEJS_VERSION}
|
||||
-Dcanary=${CANARY_REVISION}
|
||||
@@ -697,6 +762,7 @@ list(APPEND BUN_CPP_SOURCES
|
||||
${BUN_JAVASCRIPT_OUTPUTS}
|
||||
${BUN_OBJECT_LUT_OUTPUTS}
|
||||
${BUN_BINDGEN_CPP_OUTPUTS}
|
||||
${BUN_BINDGENV2_CPP_OUTPUTS}
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
@@ -834,6 +900,10 @@ if(WIN32)
|
||||
)
|
||||
endif()
|
||||
|
||||
if(USE_MIMALLOC_AS_DEFAULT_ALLOCATOR)
|
||||
target_compile_definitions(${bun} PRIVATE USE_MIMALLOC=1)
|
||||
endif()
|
||||
|
||||
target_compile_definitions(${bun} PRIVATE
|
||||
_HAS_EXCEPTIONS=0
|
||||
LIBUS_USE_OPENSSL=1
|
||||
|
||||
@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
|
||||
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
|
||||
|
||||
if(NOT WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION 69fa2714ab5f917c2d15501ff8cfdccfaea78882)
|
||||
set(WEBKIT_VERSION 6d0f3aac0b817cc01a846b3754b21271adedac12)
|
||||
endif()
|
||||
|
||||
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)
|
||||
|
||||
@@ -20,7 +20,7 @@ else()
|
||||
unsupported(CMAKE_SYSTEM_NAME)
|
||||
endif()
|
||||
|
||||
set(ZIG_COMMIT "e0b7c318f318196c5f81fdf3423816a7b5bb3112")
|
||||
set(ZIG_COMMIT "55fdbfa0c86be86b68d43a4ba761e6909eb0d7b2")
|
||||
optionx(ZIG_TARGET STRING "The zig target to use" DEFAULT ${DEFAULT_ZIG_TARGET})
|
||||
|
||||
if(CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
|
||||
@@ -233,6 +233,7 @@ In addition to the standard fetch options, Bun provides several extensions:
|
||||
```ts
|
||||
const response = await fetch("http://example.com", {
|
||||
// Control automatic response decompression (default: true)
|
||||
// Supports gzip, deflate, brotli (br), and zstd
|
||||
decompress: true,
|
||||
|
||||
// Disable connection reuse for this request
|
||||
@@ -339,7 +340,7 @@ This will print the request and response headers to your terminal:
|
||||
[fetch] > User-Agent: Bun/$BUN_LATEST_VERSION
|
||||
[fetch] > Accept: */*
|
||||
[fetch] > Host: example.com
|
||||
[fetch] > Accept-Encoding: gzip, deflate, br
|
||||
[fetch] > Accept-Encoding: gzip, deflate, br, zstd
|
||||
|
||||
[fetch] < 200 OK
|
||||
[fetch] < Content-Encoding: gzip
|
||||
|
||||
@@ -155,3 +155,24 @@ const glob = new Glob("\\!index.ts");
|
||||
glob.match("!index.ts"); // => true
|
||||
glob.match("index.ts"); // => false
|
||||
```
|
||||
|
||||
## Node.js `fs.glob()` compatibility
|
||||
|
||||
Bun also implements Node.js's `fs.glob()` functions with additional features:
|
||||
|
||||
```ts
|
||||
import { glob, globSync, promises } from "node:fs";
|
||||
|
||||
// Array of patterns
|
||||
const files = await promises.glob(["**/*.ts", "**/*.js"]);
|
||||
|
||||
// Exclude patterns
|
||||
const filtered = await promises.glob("**/*", {
|
||||
exclude: ["node_modules/**", "*.test.*"],
|
||||
});
|
||||
```
|
||||
|
||||
All three functions (`fs.glob()`, `fs.globSync()`, `fs.promises.glob()`) support:
|
||||
|
||||
- Array of patterns as the first argument
|
||||
- `exclude` option to filter results
|
||||
|
||||
@@ -88,6 +88,9 @@ await redis.set("user:1:name", "Alice");
|
||||
// Get a key
|
||||
const name = await redis.get("user:1:name");
|
||||
|
||||
// Get a key as Uint8Array
|
||||
const buffer = await redis.getBuffer("user:1:name");
|
||||
|
||||
// Delete a key
|
||||
await redis.del("user:1:name");
|
||||
|
||||
@@ -132,6 +135,10 @@ await redis.hmset("user:123", [
|
||||
const userFields = await redis.hmget("user:123", ["name", "email"]);
|
||||
console.log(userFields); // ["Alice", "alice@example.com"]
|
||||
|
||||
// Get single field from hash (returns value directly, null if missing)
|
||||
const userName = await redis.hget("user:123", "name");
|
||||
console.log(userName); // "Alice"
|
||||
|
||||
// Increment a numeric field in a hash
|
||||
await redis.hincrby("user:123", "visits", 1);
|
||||
|
||||
|
||||
@@ -377,6 +377,22 @@ const users = [
|
||||
await sql`SELECT * FROM users WHERE id IN ${sql(users, "id")}`;
|
||||
```
|
||||
|
||||
### `sql.array` helper
|
||||
|
||||
The `sql.array` helper creates PostgreSQL array literals from JavaScript arrays:
|
||||
|
||||
```ts
|
||||
// Create array literals for PostgreSQL
|
||||
await sql`INSERT INTO tags (items) VALUES (${sql.array(["red", "blue", "green"])})`;
|
||||
// Generates: INSERT INTO tags (items) VALUES (ARRAY['red', 'blue', 'green'])
|
||||
|
||||
// Works with numeric arrays too
|
||||
await sql`SELECT * FROM products WHERE ids = ANY(${sql.array([1, 2, 3])})`;
|
||||
// Generates: SELECT * FROM products WHERE ids = ANY(ARRAY[1, 2, 3])
|
||||
```
|
||||
|
||||
**Note**: `sql.array` is PostgreSQL-only. Multi-dimensional arrays and NULL elements may not be supported yet.
|
||||
|
||||
## `sql``.simple()`
|
||||
|
||||
The PostgreSQL wire protocol supports two types of queries: "simple" and "extended". Simple queries can contain multiple statements but don't support parameters, while extended queries (the default) support parameters but only allow one statement.
|
||||
|
||||
@@ -663,6 +663,8 @@ class Statement<Params, ReturnType> {
|
||||
toString(): string; // serialize to SQL
|
||||
|
||||
columnNames: string[]; // the column names of the result set
|
||||
columnTypes: string[]; // types based on actual values in first row (call .get()/.all() first)
|
||||
declaredTypes: (string | null)[]; // types from CREATE TABLE schema (call .get()/.all() first)
|
||||
paramsCount: number; // the number of parameters expected by the statement
|
||||
native: any; // the native object representing the statement
|
||||
|
||||
|
||||
@@ -28,6 +28,20 @@ for await (const chunk of stream) {
|
||||
}
|
||||
```
|
||||
|
||||
`ReadableStream` also provides convenience methods for consuming the entire stream:
|
||||
|
||||
```ts
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue("hello world");
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
const data = await stream.text(); // => "hello world"
|
||||
// Also available: .json(), .bytes(), .blob()
|
||||
```
|
||||
|
||||
## Direct `ReadableStream`
|
||||
|
||||
Bun implements an optimized version of `ReadableStream` that avoid unnecessary data copying & queue management logic. With a traditional `ReadableStream`, chunks of data are _enqueued_. Each chunk is copied into a queue, where it sits until the stream is ready to send more data.
|
||||
|
||||
@@ -602,6 +602,40 @@ dec.decode(decompressed);
|
||||
// => "hellohellohello..."
|
||||
```
|
||||
|
||||
## `Bun.zstdCompress()` / `Bun.zstdCompressSync()`
|
||||
|
||||
Compresses a `Uint8Array` using the Zstandard algorithm.
|
||||
|
||||
```ts
|
||||
const buf = Buffer.from("hello".repeat(100));
|
||||
|
||||
// Synchronous
|
||||
const compressedSync = Bun.zstdCompressSync(buf);
|
||||
// Asynchronous
|
||||
const compressedAsync = await Bun.zstdCompress(buf);
|
||||
|
||||
// With compression level (1-22, default: 3)
|
||||
const compressedLevel = Bun.zstdCompressSync(buf, { level: 6 });
|
||||
```
|
||||
|
||||
## `Bun.zstdDecompress()` / `Bun.zstdDecompressSync()`
|
||||
|
||||
Decompresses a `Uint8Array` using the Zstandard algorithm.
|
||||
|
||||
```ts
|
||||
const buf = Buffer.from("hello".repeat(100));
|
||||
const compressed = Bun.zstdCompressSync(buf);
|
||||
|
||||
// Synchronous
|
||||
const decompressedSync = Bun.zstdDecompressSync(compressed);
|
||||
// Asynchronous
|
||||
const decompressedAsync = await Bun.zstdDecompress(compressed);
|
||||
|
||||
const dec = new TextDecoder();
|
||||
dec.decode(decompressedSync);
|
||||
// => "hellohellohello..."
|
||||
```
|
||||
|
||||
## `Bun.inspect()`
|
||||
|
||||
Serializes an object to a `string` exactly as it would be printed by `console.log`.
|
||||
|
||||
@@ -114,8 +114,7 @@ type WebSocketData = {
|
||||
authToken: string;
|
||||
};
|
||||
|
||||
// TypeScript: specify the type of `data`
|
||||
Bun.serve<WebSocketData>({
|
||||
Bun.serve({
|
||||
fetch(req, server) {
|
||||
const cookies = new Bun.CookieMap(req.headers.get("cookie")!);
|
||||
|
||||
@@ -131,8 +130,12 @@ Bun.serve<WebSocketData>({
|
||||
return undefined;
|
||||
},
|
||||
websocket: {
|
||||
// TypeScript: specify the type of ws.data like this
|
||||
data: {} as WebSocketData,
|
||||
|
||||
// handler called when a message is received
|
||||
async message(ws, message) {
|
||||
// ws.data is now properly typed as WebSocketData
|
||||
const user = getUserFromToken(ws.data.authToken);
|
||||
|
||||
await saveMessageToDatabase({
|
||||
@@ -164,7 +167,7 @@ socket.addEventListener("message", event => {
|
||||
Bun's `ServerWebSocket` implementation implements a native publish-subscribe API for topic-based broadcasting. Individual sockets can `.subscribe()` to a topic (specified with a string identifier) and `.publish()` messages to all other subscribers to that topic (excluding itself). This topic-based broadcast API is similar to [MQTT](https://en.wikipedia.org/wiki/MQTT) and [Redis Pub/Sub](https://redis.io/topics/pubsub).
|
||||
|
||||
```ts
|
||||
const server = Bun.serve<{ username: string }>({
|
||||
const server = Bun.serve({
|
||||
fetch(req, server) {
|
||||
const url = new URL(req.url);
|
||||
if (url.pathname === "/chat") {
|
||||
@@ -179,6 +182,9 @@ const server = Bun.serve<{ username: string }>({
|
||||
return new Response("Hello world");
|
||||
},
|
||||
websocket: {
|
||||
// TypeScript: specify the type of ws.data like this
|
||||
data: {} as { username: string },
|
||||
|
||||
open(ws) {
|
||||
const msg = `${ws.data.username} has entered the chat`;
|
||||
ws.subscribe("the-group-chat");
|
||||
@@ -279,6 +285,9 @@ Bun implements the `WebSocket` class. To create a WebSocket client that connects
|
||||
|
||||
```ts
|
||||
const socket = new WebSocket("ws://localhost:3000");
|
||||
|
||||
// With subprotocol negotiation
|
||||
const socket2 = new WebSocket("ws://localhost:3000", ["soap", "wamp"]);
|
||||
```
|
||||
|
||||
In browsers, the cookies that are currently set on the page will be sent with the WebSocket upgrade request. This is a standard feature of the `WebSocket` API.
|
||||
@@ -293,6 +302,17 @@ const socket = new WebSocket("ws://localhost:3000", {
|
||||
});
|
||||
```
|
||||
|
||||
### Client compression
|
||||
|
||||
WebSocket clients support permessage-deflate compression. The `extensions` property shows negotiated compression:
|
||||
|
||||
```ts
|
||||
const socket = new WebSocket("wss://echo.websocket.org");
|
||||
socket.addEventListener("open", () => {
|
||||
console.log(socket.extensions); // => "permessage-deflate"
|
||||
});
|
||||
```
|
||||
|
||||
To add event listeners to the socket:
|
||||
|
||||
```ts
|
||||
|
||||
@@ -282,6 +282,31 @@ const worker = new Worker("./i-am-smol.ts", {
|
||||
Setting `smol: true` sets `JSC::HeapSize` to be `Small` instead of the default `Large`.
|
||||
{% /details %}
|
||||
|
||||
## Environment Data
|
||||
|
||||
Share data between the main thread and workers using `setEnvironmentData()` and `getEnvironmentData()`.
|
||||
|
||||
```js
|
||||
import { setEnvironmentData, getEnvironmentData } from "worker_threads";
|
||||
|
||||
// In main thread
|
||||
setEnvironmentData("config", { apiUrl: "https://api.example.com" });
|
||||
|
||||
// In worker
|
||||
const config = getEnvironmentData("config");
|
||||
console.log(config); // => { apiUrl: "https://api.example.com" }
|
||||
```
|
||||
|
||||
## Worker Events
|
||||
|
||||
Listen for worker creation events using `process.emit()`:
|
||||
|
||||
```js
|
||||
process.on("worker", worker => {
|
||||
console.log("New worker created:", worker.threadId);
|
||||
});
|
||||
```
|
||||
|
||||
## `Bun.isMainThread`
|
||||
|
||||
You can check if you're in the main thread by checking `Bun.isMainThread`.
|
||||
|
||||
@@ -140,6 +140,19 @@ The `--sourcemap` argument embeds a sourcemap compressed with zstd, so that erro
|
||||
|
||||
The `--bytecode` argument enables bytecode compilation. Every time you run JavaScript code in Bun, JavaScriptCore (the engine) will compile your source code into bytecode. We can move this parsing work from runtime to bundle time, saving you startup time.
|
||||
|
||||
## Embedding runtime arguments
|
||||
|
||||
**`--compile-exec-argv="args"`** - Embed runtime arguments that are available via `process.execArgv`:
|
||||
|
||||
```bash
|
||||
bun build --compile --compile-exec-argv="--smol --user-agent=MyBot" ./app.ts --outfile myapp
|
||||
```
|
||||
|
||||
```js
|
||||
// In the compiled app
|
||||
console.log(process.execArgv); // ["--smol", "--user-agent=MyBot"]
|
||||
```
|
||||
|
||||
## Act as the Bun CLI
|
||||
|
||||
{% note %}
|
||||
|
||||
@@ -313,6 +313,14 @@ $ bun build --entrypoints ./index.ts --outdir ./out --target browser
|
||||
|
||||
Depending on the target, Bun will apply different module resolution rules and optimizations.
|
||||
|
||||
### Module resolution
|
||||
|
||||
Bun supports the `NODE_PATH` environment variable for additional module resolution paths:
|
||||
|
||||
```bash
|
||||
NODE_PATH=./src bun build ./entry.js --outdir ./dist
|
||||
```
|
||||
|
||||
<!-- - Module resolution. For example, when bundling for the browser, Bun will prioritize the `"browser"` export condition when resolving imports. An error will be thrown if any Node.js or Bun built-ins are imported or used, e.g. `node:fs` or `Bun.serve`. -->
|
||||
|
||||
{% table %}
|
||||
@@ -392,6 +400,55 @@ $ bun build ./index.tsx --outdir ./out --format cjs
|
||||
|
||||
TODO: document IIFE once we support globalNames.
|
||||
|
||||
### `jsx`
|
||||
|
||||
Configure JSX transform behavior. Allows fine-grained control over how JSX is compiled.
|
||||
|
||||
**Classic runtime example** (uses `factory` and `fragment`):
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
```ts#JavaScript
|
||||
await Bun.build({
|
||||
entrypoints: ['./app.tsx'],
|
||||
outdir: './out',
|
||||
jsx: {
|
||||
factory: 'h',
|
||||
fragment: 'Fragment',
|
||||
runtime: 'classic',
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
```bash#CLI
|
||||
# JSX configuration is handled via bunfig.toml or tsconfig.json
|
||||
$ bun build ./app.tsx --outdir ./out
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
**Automatic runtime example** (uses `importSource`):
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
```ts#JavaScript
|
||||
await Bun.build({
|
||||
entrypoints: ['./app.tsx'],
|
||||
outdir: './out',
|
||||
jsx: {
|
||||
importSource: 'preact',
|
||||
runtime: 'automatic',
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
```bash#CLI
|
||||
# JSX configuration is handled via bunfig.toml or tsconfig.json
|
||||
$ bun build ./app.tsx --outdir ./out
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
### `splitting`
|
||||
|
||||
Whether to enable code splitting.
|
||||
@@ -1519,6 +1576,15 @@ interface BuildConfig {
|
||||
* @default "esm"
|
||||
*/
|
||||
format?: "esm" | "cjs" | "iife";
|
||||
/**
|
||||
* JSX configuration object for controlling JSX transform behavior
|
||||
*/
|
||||
jsx?: {
|
||||
factory?: string;
|
||||
fragment?: string;
|
||||
importSource?: string;
|
||||
runtime?: "automatic" | "classic";
|
||||
};
|
||||
naming?:
|
||||
| string
|
||||
| {
|
||||
|
||||
@@ -176,7 +176,21 @@ When a `bun.lock` exists and `package.json` hasn’t changed, Bun downloads miss
|
||||
|
||||
## Platform-specific dependencies?
|
||||
|
||||
bun stores normalized `cpu` and `os` values from npm in the lockfile, along with the resolved packages. It skips downloading, extracting, and installing packages disabled for the current target at runtime. This means the lockfile won’t change between platforms/architectures even if the packages ultimately installed do change.
|
||||
bun stores normalized `cpu` and `os` values from npm in the lockfile, along with the resolved packages. It skips downloading, extracting, and installing packages disabled for the current target at runtime. This means the lockfile won't change between platforms/architectures even if the packages ultimately installed do change.
|
||||
|
||||
### `--cpu` and `--os` flags
|
||||
|
||||
You can override the target platform for package selection:
|
||||
|
||||
```bash
|
||||
bun install --cpu=x64 --os=linux
|
||||
```
|
||||
|
||||
This installs packages for the specified platform instead of the current system. Useful for cross-platform builds or when preparing deployments for different environments.
|
||||
|
||||
**Accepted values for `--cpu`**: `arm64`, `x64`, `ia32`, `ppc64`, `s390x`
|
||||
|
||||
**Accepted values for `--os`**: `linux`, `darwin`, `win32`, `freebsd`, `openbsd`, `sunos`, `aix`
|
||||
|
||||
## Peer dependencies?
|
||||
|
||||
@@ -245,3 +259,91 @@ bun uses a binary format for caching NPM registry responses. This loads much fas
|
||||
You will see these files in `~/.bun/install/cache/*.npm`. The filename pattern is `${hash(packageName)}.npm`. It’s a hash so that extra directories don’t need to be created for scoped packages.
|
||||
|
||||
Bun's usage of `Cache-Control` ignores `Age`. This improves performance, but means bun may be about 5 minutes out of date to receive the latest package version metadata from npm.
|
||||
|
||||
## pnpm migration
|
||||
|
||||
Bun automatically migrates projects from pnpm to bun. When a `pnpm-lock.yaml` file is detected and no `bun.lock` file exists, Bun will automatically migrate the lockfile to `bun.lock` during installation. The original `pnpm-lock.yaml` file remains unmodified.
|
||||
|
||||
```bash
|
||||
bun install
|
||||
```
|
||||
|
||||
**Note**: Migration only runs when `bun.lock` is absent. There is currently no opt-out flag for pnpm migration.
|
||||
|
||||
The migration process handles:
|
||||
|
||||
### Lockfile Migration
|
||||
|
||||
- Converts `pnpm-lock.yaml` to `bun.lock` format
|
||||
- Preserves package versions and resolution information
|
||||
- Maintains dependency relationships and peer dependencies
|
||||
- Handles patched dependencies with integrity hashes
|
||||
|
||||
### Workspace Configuration
|
||||
|
||||
When a `pnpm-workspace.yaml` file exists, Bun migrates workspace settings to your root `package.json`:
|
||||
|
||||
```yaml
|
||||
# pnpm-workspace.yaml
|
||||
packages:
|
||||
- "apps/*"
|
||||
- "packages/*"
|
||||
|
||||
catalog:
|
||||
react: ^18.0.0
|
||||
typescript: ^5.0.0
|
||||
|
||||
catalogs:
|
||||
build:
|
||||
webpack: ^5.0.0
|
||||
babel: ^7.0.0
|
||||
```
|
||||
|
||||
The workspace packages list and catalogs are moved to the `workspaces` field in `package.json`:
|
||||
|
||||
```json
|
||||
{
|
||||
"workspaces": {
|
||||
"packages": ["apps/*", "packages/*"],
|
||||
"catalog": {
|
||||
"react": "^18.0.0",
|
||||
"typescript": "^5.0.0"
|
||||
},
|
||||
"catalogs": {
|
||||
"build": {
|
||||
"webpack": "^5.0.0",
|
||||
"babel": "^7.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Catalog Dependencies
|
||||
|
||||
Dependencies using pnpm's `catalog:` protocol are preserved:
|
||||
|
||||
```json
|
||||
{
|
||||
"dependencies": {
|
||||
"react": "catalog:",
|
||||
"webpack": "catalog:build"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Configuration Migration
|
||||
|
||||
The following pnpm configuration is migrated from both `pnpm-lock.yaml` and `pnpm-workspace.yaml`:
|
||||
|
||||
- **Overrides**: Moved from `pnpm.overrides` to root-level `overrides` in `package.json`
|
||||
- **Patched Dependencies**: Moved from `pnpm.patchedDependencies` to root-level `patchedDependencies` in `package.json`
|
||||
- **Workspace Overrides**: Applied from `pnpm-workspace.yaml` to root `package.json`
|
||||
|
||||
### Requirements
|
||||
|
||||
- Requires pnpm lockfile version 7 or higher
|
||||
- Workspace packages must have a `name` field in their `package.json`
|
||||
- All catalog entries referenced by dependencies must exist in the catalogs definition
|
||||
|
||||
After migration, you can safely remove `pnpm-lock.yaml` and `pnpm-workspace.yaml` files.
|
||||
|
||||
@@ -63,6 +63,15 @@ $ bunx --bun my-cli # good
|
||||
$ bunx my-cli --bun # bad
|
||||
```
|
||||
|
||||
## Package flag
|
||||
|
||||
**`--package <pkg>` or `-p <pkg>`** - Run binary from specific package. Useful when binary name differs from package name:
|
||||
|
||||
```bash
|
||||
bunx -p renovate renovate-config-validator
|
||||
bunx --package @angular/cli ng
|
||||
```
|
||||
|
||||
To force bun to always be used with a script, use a shebang.
|
||||
|
||||
```
|
||||
|
||||
@@ -33,6 +33,11 @@ It creates:
|
||||
- an entry point which defaults to `index.ts` unless any of `index.{tsx, jsx, js, mts, mjs}` exist or the `package.json` specifies a `module` or `main` field
|
||||
- a `README.md` file
|
||||
|
||||
AI Agent rules (disable with `$BUN_AGENT_RULE_DISABLED=1`):
|
||||
|
||||
- a `CLAUDE.md` file when Claude CLI is detected (disable with `CLAUDE_CODE_AGENT_RULE_DISABLED` env var)
|
||||
- a `.cursor/rules/*.mdc` file to guide [Cursor AI](https://cursor.sh) to use Bun instead of Node.js and npm when Cursor is detected
|
||||
|
||||
If you pass `-y` or `--yes`, it will assume you want to continue without asking questions.
|
||||
|
||||
At the end, it runs `bun install` to install `@types/bun`.
|
||||
|
||||
@@ -221,6 +221,38 @@ Bun uses a global cache at `~/.bun/install/cache/` to minimize disk usage. Packa
|
||||
|
||||
For complete documentation refer to [Package manager > Global cache](https://bun.com/docs/install/cache).
|
||||
|
||||
## Minimum release age
|
||||
|
||||
To protect against supply chain attacks where malicious packages are quickly published, you can configure a minimum age requirement for npm packages. Package versions published more recently than the specified threshold (in seconds) will be filtered out during installation.
|
||||
|
||||
```bash
|
||||
# Only install package versions published at least 3 days ago
|
||||
$ bun add @types/bun --minimum-release-age 259200 # seconds
|
||||
```
|
||||
|
||||
You can also configure this in `bunfig.toml`:
|
||||
|
||||
```toml
|
||||
[install]
|
||||
# Only install package versions published at least 3 days ago
|
||||
minimumReleaseAge = 259200 # seconds
|
||||
|
||||
# Exclude trusted packages from the age gate
|
||||
minimumReleaseAgeExcludes = ["@types/node", "typescript"]
|
||||
```
|
||||
|
||||
When the minimum age filter is active:
|
||||
|
||||
- Only affects new package resolution - existing packages in `bun.lock` remain unchanged
|
||||
- All dependencies (direct and transitive) are filtered to meet the age requirement when being resolved
|
||||
- When versions are blocked by the age gate, a stability check detects rapid bugfix patterns
|
||||
- If multiple versions were published close together just outside your age gate, it extends the filter to skip those potentially unstable versions and selects an older, more mature version
|
||||
- Searches up to 7 days after the age gate, however if still finding rapid releases it ignores stability check
|
||||
- Exact version requests (like `package@1.1.1`) still respect the age gate but bypass the stability check
|
||||
- Versions without a `time` field are treated as passing the age check (npm registry should always provide timestamps)
|
||||
|
||||
For more advanced security scanning, including integration with services & custom filtering, see [Package manager > Security Scanner API](https://bun.com/docs/install/security-scanner-api).
|
||||
|
||||
## Configuration
|
||||
|
||||
The default behavior of `bun install` can be configured in `bunfig.toml`. The default values are shown below.
|
||||
@@ -255,6 +287,10 @@ concurrentScripts = 16 # (cpu count or GOMAXPROCS) x2
|
||||
# installation strategy: "hoisted" or "isolated"
|
||||
# default: "hoisted"
|
||||
linker = "hoisted"
|
||||
|
||||
# minimum age config
|
||||
minimumReleaseAge = 259200 # seconds
|
||||
minimumReleaseAgeExcludes = ["@types/node", "typescript"]
|
||||
```
|
||||
|
||||
## CI/CD
|
||||
|
||||
@@ -44,4 +44,47 @@ You can also pass glob patterns to filter by workspace names:
|
||||
|
||||
{% bunOutdatedTerminal glob="{e,t}*" displayGlob="--filter='@monorepo/{types,cli}'" /%}
|
||||
|
||||
### Catalog Dependencies
|
||||
|
||||
`bun outdated` supports checking catalog dependencies defined in `package.json`:
|
||||
|
||||
```sh
|
||||
$ bun outdated -r
|
||||
┌────────────────────┬─────────┬─────────┬─────────┬────────────────────────────────┐
|
||||
│ Package │ Current │ Update │ Latest │ Workspace │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ body-parser │ 1.19.0 │ 1.19.0 │ 2.2.0 │ @test/shared │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ cors │ 2.8.0 │ 2.8.0 │ 2.8.5 │ @test/shared │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ chalk │ 4.0.0 │ 4.0.0 │ 5.6.2 │ @test/utils │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ uuid │ 8.0.0 │ 8.0.0 │ 13.0.0 │ @test/utils │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ axios │ 0.21.0 │ 0.21.0 │ 1.12.2 │ catalog (@test/app) │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ lodash │ 4.17.15 │ 4.17.15 │ 4.17.21 │ catalog (@test/app, @test/app) │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ react │ 17.0.0 │ 17.0.0 │ 19.1.1 │ catalog (@test/app) │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ react-dom │ 17.0.0 │ 17.0.0 │ 19.1.1 │ catalog (@test/app) │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ express │ 4.17.0 │ 4.17.0 │ 5.1.0 │ catalog (@test/shared) │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ moment │ 2.24.0 │ 2.24.0 │ 2.30.1 │ catalog (@test/utils) │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ @types/node (dev) │ 14.0.0 │ 14.0.0 │ 24.5.2 │ @test/shared │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ @types/react (dev) │ 17.0.0 │ 17.0.0 │ 19.1.15 │ catalog:testing (@test/app) │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ eslint (dev) │ 7.0.0 │ 7.0.0 │ 9.36.0 │ catalog:testing (@test/app) │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ typescript (dev) │ 4.9.5 │ 4.9.5 │ 5.9.2 │ catalog:build (@test/app) │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ jest (dev) │ 26.0.0 │ 26.0.0 │ 30.2.0 │ catalog:testing (@test/shared) │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ prettier (dev) │ 2.0.0 │ 2.0.0 │ 3.6.2 │ catalog:build (@test/utils) │
|
||||
└────────────────────┴─────────┴─────────┴─────────┴────────────────────────────────┘
|
||||
```
|
||||
|
||||
{% bunCLIUsage command="outdated" /%}
|
||||
|
||||
@@ -82,6 +82,16 @@ The `--dry-run` flag can be used to simulate the publish process without actuall
|
||||
$ bun publish --dry-run
|
||||
```
|
||||
|
||||
### `--tolerate-republish`
|
||||
|
||||
The `--tolerate-republish` flag makes `bun publish` exit with code 0 instead of code 1 when attempting to republish over an existing version number. This is useful in automated workflows where republishing the same version might occur and should not be treated as an error.
|
||||
|
||||
```sh
|
||||
$ bun publish --tolerate-republish
|
||||
```
|
||||
|
||||
Without this flag, attempting to publish a version that already exists will result in an error and exit code 1. With this flag, the command will exit successfully even when trying to republish an existing version.
|
||||
|
||||
### `--gzip-level`
|
||||
|
||||
Specify the level of gzip compression to use when packing the package. Only applies to `bun publish` without a tarball path argument. Values range from `0` to `9` (default is `9`).
|
||||
|
||||
@@ -151,6 +151,14 @@ By default, Bun respects this shebang and executes the script with `node`. Howev
|
||||
$ bun run --bun vite
|
||||
```
|
||||
|
||||
### `--no-addons`
|
||||
|
||||
Disable native addons and use the `node-addons` export condition.
|
||||
|
||||
```bash
|
||||
$ bun --no-addons run server.js
|
||||
```
|
||||
|
||||
### Filtering
|
||||
|
||||
In monorepos containing multiple packages, you can use the `--filter` argument to execute scripts in many packages at once.
|
||||
@@ -166,6 +174,14 @@ will execute `<script>` in both `bar` and `baz`, but not in `foo`.
|
||||
|
||||
Find more details in the docs page for [filter](https://bun.com/docs/cli/filter#running-scripts-with-filter).
|
||||
|
||||
### `--workspaces`
|
||||
|
||||
Run scripts across all workspaces in the monorepo:
|
||||
|
||||
```bash
|
||||
bun run --workspaces test
|
||||
```
|
||||
|
||||
## `bun run -` to pipe code from stdin
|
||||
|
||||
`bun run -` lets you read JavaScript, TypeScript, TSX, or JSX from stdin and execute it without writing to a temporary file first.
|
||||
@@ -212,6 +228,14 @@ $ bun --smol run index.tsx
|
||||
|
||||
This causes the garbage collector to run more frequently, which can slow down execution. However, it can be useful in environments with limited memory. Bun automatically adjusts the garbage collector's heap size based on the available memory (accounting for cgroups and other memory limits) with and without the `--smol` flag, so this is mostly useful for cases where you want to make the heap size grow more slowly.
|
||||
|
||||
## `--user-agent`
|
||||
|
||||
**`--user-agent <string>`** - Set User-Agent header for all `fetch()` requests:
|
||||
|
||||
```bash
|
||||
bun --user-agent "MyBot/1.0" run index.tsx
|
||||
```
|
||||
|
||||
## Resolution order
|
||||
|
||||
Absolute paths and paths starting with `./` or `.\\` are always executed as source files. Unless using `bun run`, running a file with an allowed extension will prefer the file over a package.json script.
|
||||
@@ -223,4 +247,15 @@ When there is a package.json script and a file with the same name, `bun run` pri
|
||||
3. Binaries from project packages, eg `bun add eslint && bun run eslint`
|
||||
4. (`bun run` only) System commands, eg `bun run ls`
|
||||
|
||||
### `--unhandled-rejections`
|
||||
|
||||
Configure how unhandled promise rejections are handled:
|
||||
|
||||
```bash
|
||||
$ bun --unhandled-rejections=throw script.js # Throw exception (terminate immediately)
|
||||
$ bun --unhandled-rejections=strict script.js # Throw exception (emit rejectionHandled if handled later)
|
||||
$ bun --unhandled-rejections=warn script.js # Print warning to stderr (default in Node.js)
|
||||
$ bun --unhandled-rejections=none script.js # Silently ignore
|
||||
```
|
||||
|
||||
{% bunCLIUsage command="run" /%}
|
||||
|
||||
@@ -47,6 +47,8 @@ To filter by _test name_, use the `-t`/`--test-name-pattern` flag.
|
||||
$ bun test --test-name-pattern addition
|
||||
```
|
||||
|
||||
When no tests match the filter, `bun test` exits with code 1.
|
||||
|
||||
To run a specific file in the test runner, make sure the path starts with `./` or `/` to distinguish it from a filter name.
|
||||
|
||||
```bash
|
||||
@@ -186,6 +188,11 @@ test.serial("second serial test", () => {
|
||||
test("independent test", () => {
|
||||
expect(true).toBe(true);
|
||||
});
|
||||
|
||||
// Chaining test qualifiers
|
||||
test.failing.each([1, 2, 3])("chained qualifiers %d", input => {
|
||||
expect(input).toBe(0); // This test is expected to fail for each input
|
||||
});
|
||||
```
|
||||
|
||||
## Rerun tests
|
||||
|
||||
@@ -90,6 +90,17 @@ Packages are organized in sections by dependency type:
|
||||
|
||||
Within each section, individual packages may have additional suffixes (` dev`, ` peer`, ` optional`) for extra clarity.
|
||||
|
||||
## `--recursive`
|
||||
|
||||
Use the `--recursive` flag with `--interactive` to update dependencies across all workspaces in a monorepo:
|
||||
|
||||
```sh
|
||||
$ bun update --interactive --recursive
|
||||
$ bun update -i -r
|
||||
```
|
||||
|
||||
This displays an additional "Workspace" column showing which workspace each dependency belongs to.
|
||||
|
||||
## `--latest`
|
||||
|
||||
By default, `bun update` will update to the latest version of a dependency that satisfies the version range specified in your `package.json`.
|
||||
|
||||
@@ -7,7 +7,7 @@ When building a WebSocket server, it's typically necessary to store some identif
|
||||
With [Bun.serve()](https://bun.com/docs/api/websockets#contextual-data), this "contextual data" is set when the connection is initially upgraded by passing a `data` parameter in the `server.upgrade()` call.
|
||||
|
||||
```ts
|
||||
Bun.serve<{ socketId: number }>({
|
||||
Bun.serve({
|
||||
fetch(req, server) {
|
||||
const success = server.upgrade(req, {
|
||||
data: {
|
||||
@@ -20,6 +20,9 @@ Bun.serve<{ socketId: number }>({
|
||||
// ...
|
||||
},
|
||||
websocket: {
|
||||
// TypeScript: specify the type of ws.data like this
|
||||
data: {} as { socketId: number },
|
||||
|
||||
// define websocket handlers
|
||||
async message(ws, message) {
|
||||
// the contextual data is available as the `data` property
|
||||
@@ -41,8 +44,7 @@ type WebSocketData = {
|
||||
userId: string;
|
||||
};
|
||||
|
||||
// TypeScript: specify the type of `data`
|
||||
Bun.serve<WebSocketData>({
|
||||
Bun.serve({
|
||||
async fetch(req, server) {
|
||||
// use a library to parse cookies
|
||||
const cookies = parseCookies(req.headers.get("Cookie"));
|
||||
@@ -60,6 +62,9 @@ Bun.serve<WebSocketData>({
|
||||
if (upgraded) return undefined;
|
||||
},
|
||||
websocket: {
|
||||
// TypeScript: specify the type of ws.data like this
|
||||
data: {} as WebSocketData,
|
||||
|
||||
async message(ws, message) {
|
||||
// save the message to a database
|
||||
await saveMessageToDatabase({
|
||||
|
||||
@@ -7,7 +7,7 @@ Bun's server-side `WebSocket` API provides a native pub-sub API. Sockets can be
|
||||
This code snippet implements a simple single-channel chat server.
|
||||
|
||||
```ts
|
||||
const server = Bun.serve<{ username: string }>({
|
||||
const server = Bun.serve({
|
||||
fetch(req, server) {
|
||||
const cookies = req.headers.get("cookie");
|
||||
const username = getUsernameFromCookies(cookies);
|
||||
@@ -17,6 +17,9 @@ const server = Bun.serve<{ username: string }>({
|
||||
return new Response("Hello world");
|
||||
},
|
||||
websocket: {
|
||||
// TypeScript: specify the type of ws.data like this
|
||||
data: {} as { username: string },
|
||||
|
||||
open(ws) {
|
||||
const msg = `${ws.data.username} has entered the chat`;
|
||||
ws.subscribe("the-group-chat");
|
||||
|
||||
@@ -7,7 +7,7 @@ Start a simple WebSocket server using [`Bun.serve`](https://bun.com/docs/api/htt
|
||||
Inside `fetch`, we attempt to upgrade incoming `ws:` or `wss:` requests to WebSocket connections.
|
||||
|
||||
```ts
|
||||
const server = Bun.serve<{ authToken: string }>({
|
||||
const server = Bun.serve({
|
||||
fetch(req, server) {
|
||||
const success = server.upgrade(req);
|
||||
if (success) {
|
||||
|
||||
@@ -24,6 +24,26 @@ To update all dependencies to the latest versions (including breaking changes):
|
||||
bun update --latest
|
||||
```
|
||||
|
||||
### Filtering options
|
||||
|
||||
**`--audit-level=<low|moderate|high|critical>`** - Only show vulnerabilities at this severity level or higher:
|
||||
|
||||
```bash
|
||||
bun audit --audit-level=high
|
||||
```
|
||||
|
||||
**`--prod`** - Audit only production dependencies (excludes devDependencies):
|
||||
|
||||
```bash
|
||||
bun audit --prod
|
||||
```
|
||||
|
||||
**`--ignore <CVE>`** - Ignore specific CVEs (can be used multiple times):
|
||||
|
||||
```bash
|
||||
bun audit --ignore CVE-2022-25883 --ignore CVE-2023-26136
|
||||
```
|
||||
|
||||
### `--json`
|
||||
|
||||
Use the `--json` flag to print the raw JSON response from the registry instead of the formatted report:
|
||||
|
||||
@@ -46,3 +46,13 @@ print = "yarn"
|
||||
Bun v1.2 changed the default lockfile format to the text-based `bun.lock`. Existing binary `bun.lockb` lockfiles can be migrated to the new format by running `bun install --save-text-lockfile --frozen-lockfile --lockfile-only` and deleting `bun.lockb`.
|
||||
|
||||
More information about the new lockfile format can be found on [our blogpost](https://bun.com/blog/bun-lock-text-lockfile).
|
||||
|
||||
#### Automatic lockfile migration
|
||||
|
||||
When running `bun install` in a project without a `bun.lock`, Bun automatically migrates existing lockfiles:
|
||||
|
||||
- `yarn.lock` (v1)
|
||||
- `package-lock.json` (npm)
|
||||
- `pnpm-lock.yaml` (pnpm)
|
||||
|
||||
The original lockfile is preserved and can be removed manually after verification.
|
||||
|
||||
@@ -73,3 +73,33 @@ The equivalent `bunfig.toml` option is to add a key in [`install.scopes`](https:
|
||||
[install.scopes]
|
||||
myorg = { url = "http://localhost:4873/", username = "myusername", password = "$NPM_PASSWORD" }
|
||||
```
|
||||
|
||||
### `link-workspace-packages`: Control workspace package installation
|
||||
|
||||
Controls how workspace packages are installed when available locally:
|
||||
|
||||
```ini
|
||||
link-workspace-packages=true
|
||||
```
|
||||
|
||||
The equivalent `bunfig.toml` option is [`install.linkWorkspacePackages`](https://bun.com/docs/runtime/bunfig#install-linkworkspacepackages):
|
||||
|
||||
```toml
|
||||
[install]
|
||||
linkWorkspacePackages = true
|
||||
```
|
||||
|
||||
### `save-exact`: Save exact versions
|
||||
|
||||
Always saves exact versions without the `^` prefix:
|
||||
|
||||
```ini
|
||||
save-exact=true
|
||||
```
|
||||
|
||||
The equivalent `bunfig.toml` option is [`install.exact`](https://bun.com/docs/runtime/bunfig#install-exact):
|
||||
|
||||
```toml
|
||||
[install]
|
||||
exact = true
|
||||
```
|
||||
|
||||
@@ -81,7 +81,7 @@ Workspaces have a couple major benefits.
|
||||
|
||||
- **Code can be split into logical parts.** If one package relies on another, you can simply add it as a dependency in `package.json`. If package `b` depends on `a`, `bun install` will install your local `packages/a` directory into `node_modules` instead of downloading it from the npm registry.
|
||||
- **Dependencies can be de-duplicated.** If `a` and `b` share a common dependency, it will be _hoisted_ to the root `node_modules` directory. This reduces redundant disk usage and minimizes "dependency hell" issues associated with having multiple versions of a package installed simultaneously.
|
||||
- **Run scripts in multiple packages.** You can use the [`--filter` flag](https://bun.com/docs/cli/filter) to easily run `package.json` scripts in multiple packages in your workspace.
|
||||
- **Run scripts in multiple packages.** You can use the [`--filter` flag](https://bun.com/docs/cli/filter) to easily run `package.json` scripts in multiple packages in your workspace, or `--workspaces` to run scripts across all workspaces.
|
||||
|
||||
## Share versions with Catalogs
|
||||
|
||||
|
||||
@@ -249,6 +249,46 @@ This is useful for:
|
||||
|
||||
The `--concurrent` CLI flag will override this setting when specified.
|
||||
|
||||
### `test.randomize`
|
||||
|
||||
Run tests in random order. Default `false`.
|
||||
|
||||
```toml
|
||||
[test]
|
||||
randomize = true
|
||||
```
|
||||
|
||||
This helps catch bugs related to test interdependencies by running tests in a different order each time. When combined with `seed`, the random order becomes reproducible.
|
||||
|
||||
The `--randomize` CLI flag will override this setting when specified.
|
||||
|
||||
### `test.seed`
|
||||
|
||||
Set the random seed for test randomization. This option requires `randomize` to be `true`.
|
||||
|
||||
```toml
|
||||
[test]
|
||||
randomize = true
|
||||
seed = 2444615283
|
||||
```
|
||||
|
||||
Using a seed makes the randomized test order reproducible across runs, which is useful for debugging flaky tests. When you encounter a test failure with randomization enabled, you can use the same seed to reproduce the exact test order.
|
||||
|
||||
The `--seed` CLI flag will override this setting when specified.
|
||||
|
||||
### `test.rerunEach`
|
||||
|
||||
Re-run each test file a specified number of times. Default `0` (run once).
|
||||
|
||||
```toml
|
||||
[test]
|
||||
rerunEach = 3
|
||||
```
|
||||
|
||||
This is useful for catching flaky tests or non-deterministic behavior. Each test file will be executed the specified number of times.
|
||||
|
||||
The `--rerun-each` CLI flag will override this setting when specified.
|
||||
|
||||
## Package manager
|
||||
|
||||
Package management is a complex issue; to support a range of use cases, the behavior of `bun install` can be configured under the `[install]` section.
|
||||
@@ -570,6 +610,20 @@ Valid values are:
|
||||
|
||||
{% /table %}
|
||||
|
||||
### `install.minimumReleaseAge`
|
||||
|
||||
Configure a minimum age (in seconds) for npm package versions. Package versions published more recently than this threshold will be filtered out during installation. Default is `null` (disabled).
|
||||
|
||||
```toml
|
||||
[install]
|
||||
# Only install package versions published at least 3 days ago
|
||||
minimumReleaseAge = 259200
|
||||
# These packages will bypass the 3-day minimum age requirement
|
||||
minimumReleaseAgeExcludes = ["@types/bun", "typescript"]
|
||||
```
|
||||
|
||||
For more details see [Minimum release age](https://bun.com/docs/cli/install#minimum-release-age) in the install documentation.
|
||||
|
||||
<!-- ## Debugging -->
|
||||
|
||||
<!--
|
||||
|
||||
@@ -220,6 +220,11 @@ These environment variables are read by Bun and configure aspects of its behavio
|
||||
- `DO_NOT_TRACK`
|
||||
- Disable uploading crash reports to `bun.report` on crash. On macOS & Windows, crash report uploads are enabled by default. Otherwise, telemetry is not sent yet as of May 21st, 2024, but we are planning to add telemetry in the coming weeks. If `DO_NOT_TRACK=1`, then auto-uploading crash reports and telemetry are both [disabled](https://do-not-track.dev/).
|
||||
|
||||
---
|
||||
|
||||
- `BUN_OPTIONS`
|
||||
- Prepends command-line arguments to any Bun execution. For example, `BUN_OPTIONS="--hot"` makes `bun run dev` behave like `bun --hot run dev`.
|
||||
|
||||
{% /table %}
|
||||
|
||||
## Runtime transpiler caching
|
||||
|
||||
@@ -124,7 +124,7 @@ This page is updated regularly to reflect compatibility status of the latest ver
|
||||
|
||||
### [`node:perf_hooks`](https://nodejs.org/api/perf_hooks.html)
|
||||
|
||||
🟡 Missing `createHistogram` `monitorEventLoopDelay`. It's recommended to use `performance` global instead of `perf_hooks.performance`.
|
||||
🟡 APIs are implemented, but Node.js test suite does not pass yet for this module.
|
||||
|
||||
### [`node:process`](https://nodejs.org/api/process.html)
|
||||
|
||||
@@ -156,7 +156,7 @@ This page is updated regularly to reflect compatibility status of the latest ver
|
||||
|
||||
### [`node:worker_threads`](https://nodejs.org/api/worker_threads.html)
|
||||
|
||||
🟡 `Worker` doesn't support the following options: `stdin` `stdout` `stderr` `trackedUnmanagedFds` `resourceLimits`. Missing `markAsUntransferable` `moveMessagePortToContext` `getHeapSnapshot`.
|
||||
🟡 `Worker` doesn't support the following options: `stdin` `stdout` `stderr` `trackedUnmanagedFds` `resourceLimits`. Missing `markAsUntransferable` `moveMessagePortToContext`.
|
||||
|
||||
### [`node:inspector`](https://nodejs.org/api/inspector.html)
|
||||
|
||||
|
||||
@@ -8,6 +8,8 @@
|
||||
# Thread::initializePlatformThreading() in ThreadingPOSIX.cpp) to the JS thread to suspend or resume
|
||||
# it. So stopping the process would just create noise when debugging any long-running script.
|
||||
process handle -p true -s false -n false SIGPWR
|
||||
process handle -p true -s false -n false SIGUSR1
|
||||
process handle -p true -s false -n false SIGUSR2
|
||||
|
||||
command script import -c lldb_pretty_printers.py
|
||||
type category enable zig.lang
|
||||
|
||||
@@ -78,6 +78,12 @@
|
||||
"no-empty-file": "off",
|
||||
"no-unnecessary-await": "off"
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["src/js/builtins/**"],
|
||||
"rules": {
|
||||
"no-unused-expressions": "off"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
10
package.json
10
package.json
@@ -11,14 +11,14 @@
|
||||
"@lezer/cpp": "^1.1.3",
|
||||
"@types/bun": "workspace:*",
|
||||
"bun-tracestrings": "github:oven-sh/bun.report#912ca63e26c51429d3e6799aa2a6ab079b188fd8",
|
||||
"esbuild": "^0.21.4",
|
||||
"mitata": "^0.1.11",
|
||||
"esbuild": "^0.21.5",
|
||||
"mitata": "^0.1.14",
|
||||
"peechy": "0.4.34",
|
||||
"prettier": "^3.5.3",
|
||||
"prettier-plugin-organize-imports": "^4.0.0",
|
||||
"prettier": "^3.6.2",
|
||||
"prettier-plugin-organize-imports": "^4.3.0",
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"source-map-js": "^1.2.0",
|
||||
"source-map-js": "^1.2.1",
|
||||
"typescript": "5.9.2"
|
||||
},
|
||||
"resolutions": {
|
||||
|
||||
1471
packages/bun-types/bun.d.ts
vendored
1471
packages/bun-types/bun.d.ts
vendored
File diff suppressed because it is too large
Load Diff
2
packages/bun-types/bun.ns.d.ts
vendored
2
packages/bun-types/bun.ns.d.ts
vendored
@@ -3,5 +3,3 @@ import * as BunModule from "bun";
|
||||
declare global {
|
||||
export import Bun = BunModule;
|
||||
}
|
||||
|
||||
export {};
|
||||
|
||||
5
packages/bun-types/deprecated.d.ts
vendored
5
packages/bun-types/deprecated.d.ts
vendored
@@ -98,6 +98,11 @@ declare module "bun" {
|
||||
): void;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link Serve.Options Bun.Serve.Options<T, R>} instead
|
||||
*/
|
||||
type ServeOptions<T = undefined, R extends string = never> = Serve.Options<T, R>;
|
||||
|
||||
/** @deprecated Use {@link SQL.Query Bun.SQL.Query} */
|
||||
type SQLQuery<T = any> = SQL.Query<T>;
|
||||
|
||||
|
||||
76
packages/bun-types/globals.d.ts
vendored
76
packages/bun-types/globals.d.ts
vendored
@@ -7,6 +7,13 @@ declare module "bun" {
|
||||
type LibWorkerOrBunWorker = LibDomIsLoaded extends true ? {} : Bun.Worker;
|
||||
type LibEmptyOrBunWebSocket = LibDomIsLoaded extends true ? {} : Bun.WebSocket;
|
||||
|
||||
type LibEmptyOrNodeStreamWebCompressionStream = LibDomIsLoaded extends true
|
||||
? {}
|
||||
: import("node:stream/web").CompressionStream;
|
||||
type LibEmptyOrNodeStreamWebDecompressionStream = LibDomIsLoaded extends true
|
||||
? {}
|
||||
: import("node:stream/web").DecompressionStream;
|
||||
|
||||
type LibPerformanceOrNodePerfHooksPerformance = LibDomIsLoaded extends true ? {} : import("perf_hooks").Performance;
|
||||
type LibEmptyOrPerformanceEntry = LibDomIsLoaded extends true ? {} : import("node:perf_hooks").PerformanceEntry;
|
||||
type LibEmptyOrPerformanceMark = LibDomIsLoaded extends true ? {} : import("node:perf_hooks").PerformanceMark;
|
||||
@@ -271,6 +278,30 @@ declare var Event: {
|
||||
new (type: string, eventInitDict?: Bun.EventInit): Event;
|
||||
};
|
||||
|
||||
/**
|
||||
* Unimplemented in Bun
|
||||
*/
|
||||
interface CompressionStream extends Bun.__internal.LibEmptyOrNodeStreamWebCompressionStream {}
|
||||
/**
|
||||
* Unimplemented in Bun
|
||||
*/
|
||||
declare var CompressionStream: Bun.__internal.UseLibDomIfAvailable<
|
||||
"CompressionStream",
|
||||
typeof import("node:stream/web").CompressionStream
|
||||
>;
|
||||
|
||||
/**
|
||||
* Unimplemented in Bun
|
||||
*/
|
||||
interface DecompressionStream extends Bun.__internal.LibEmptyOrNodeStreamWebCompressionStream {}
|
||||
/**
|
||||
* Unimplemented in Bun
|
||||
*/
|
||||
declare var DecompressionStream: Bun.__internal.UseLibDomIfAvailable<
|
||||
"DecompressionStream",
|
||||
typeof import("node:stream/web").DecompressionStream
|
||||
>;
|
||||
|
||||
interface EventTarget {
|
||||
/**
|
||||
* Adds a new handler for the `type` event. Any given `listener` is added only once per `type` and per `capture` option value.
|
||||
@@ -860,7 +891,10 @@ interface ErrnoException extends Error {
|
||||
syscall?: string | undefined;
|
||||
}
|
||||
|
||||
/** An abnormal event (called an exception) which occurs as a result of calling a method or accessing a property of a web API. */
|
||||
/**
|
||||
* An abnormal event (called an exception) which occurs as a result of calling a
|
||||
* method or accessing a property of a web API
|
||||
*/
|
||||
interface DOMException extends Error {
|
||||
readonly message: string;
|
||||
readonly name: string;
|
||||
@@ -890,11 +924,35 @@ interface DOMException extends Error {
|
||||
readonly INVALID_NODE_TYPE_ERR: 24;
|
||||
readonly DATA_CLONE_ERR: 25;
|
||||
}
|
||||
|
||||
// declare var DOMException: {
|
||||
// prototype: DOMException;
|
||||
// new (message?: string, name?: string): DOMException;
|
||||
// };
|
||||
declare var DOMException: {
|
||||
prototype: DOMException;
|
||||
new (message?: string, name?: string): DOMException;
|
||||
readonly INDEX_SIZE_ERR: 1;
|
||||
readonly DOMSTRING_SIZE_ERR: 2;
|
||||
readonly HIERARCHY_REQUEST_ERR: 3;
|
||||
readonly WRONG_DOCUMENT_ERR: 4;
|
||||
readonly INVALID_CHARACTER_ERR: 5;
|
||||
readonly NO_DATA_ALLOWED_ERR: 6;
|
||||
readonly NO_MODIFICATION_ALLOWED_ERR: 7;
|
||||
readonly NOT_FOUND_ERR: 8;
|
||||
readonly NOT_SUPPORTED_ERR: 9;
|
||||
readonly INUSE_ATTRIBUTE_ERR: 10;
|
||||
readonly INVALID_STATE_ERR: 11;
|
||||
readonly SYNTAX_ERR: 12;
|
||||
readonly INVALID_MODIFICATION_ERR: 13;
|
||||
readonly NAMESPACE_ERR: 14;
|
||||
readonly INVALID_ACCESS_ERR: 15;
|
||||
readonly VALIDATION_ERR: 16;
|
||||
readonly TYPE_MISMATCH_ERR: 17;
|
||||
readonly SECURITY_ERR: 18;
|
||||
readonly NETWORK_ERR: 19;
|
||||
readonly ABORT_ERR: 20;
|
||||
readonly URL_MISMATCH_ERR: 21;
|
||||
readonly QUOTA_EXCEEDED_ERR: 22;
|
||||
readonly TIMEOUT_ERR: 23;
|
||||
readonly INVALID_NODE_TYPE_ERR: 24;
|
||||
readonly DATA_CLONE_ERR: 25;
|
||||
};
|
||||
|
||||
declare function alert(message?: string): void;
|
||||
declare function confirm(message?: string): boolean;
|
||||
@@ -1605,12 +1663,6 @@ declare var AbortSignal: Bun.__internal.UseLibDomIfAvailable<
|
||||
}
|
||||
>;
|
||||
|
||||
interface DOMException {}
|
||||
declare var DOMException: Bun.__internal.UseLibDomIfAvailable<
|
||||
"DOMException",
|
||||
{ prototype: DOMException; new (): DOMException }
|
||||
>;
|
||||
|
||||
interface FormData {
|
||||
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/append) */
|
||||
append(name: string, value: string | Blob): void;
|
||||
|
||||
1
packages/bun-types/index.d.ts
vendored
1
packages/bun-types/index.d.ts
vendored
@@ -21,6 +21,7 @@
|
||||
/// <reference path="./redis.d.ts" />
|
||||
/// <reference path="./shell.d.ts" />
|
||||
/// <reference path="./experimental.d.ts" />
|
||||
/// <reference path="./serve.d.ts" />
|
||||
/// <reference path="./sql.d.ts" />
|
||||
/// <reference path="./security.d.ts" />
|
||||
|
||||
|
||||
2593
packages/bun-types/redis.d.ts
vendored
2593
packages/bun-types/redis.d.ts
vendored
File diff suppressed because it is too large
Load Diff
1272
packages/bun-types/serve.d.ts
vendored
Normal file
1272
packages/bun-types/serve.d.ts
vendored
Normal file
File diff suppressed because it is too large
Load Diff
33
packages/bun-types/test.d.ts
vendored
33
packages/bun-types/test.d.ts
vendored
@@ -390,11 +390,20 @@ declare module "bun:test" {
|
||||
*/
|
||||
repeats?: number;
|
||||
}
|
||||
type IsTuple<T> = T extends readonly unknown[]
|
||||
? number extends T["length"]
|
||||
? false // It's an array with unknown length, not a tuple
|
||||
: true // It's an array with a fixed length (a tuple)
|
||||
: false; // Not an array at all
|
||||
|
||||
namespace __internal {
|
||||
type IsTuple<T> = T extends readonly unknown[]
|
||||
? number extends T["length"]
|
||||
? false // It's an array with unknown length, not a tuple
|
||||
: true // It's an array with a fixed length (a tuple)
|
||||
: false; // Not an array at all
|
||||
|
||||
/**
|
||||
* Accepts `[1, 2, 3] | ["a", "b", "c"]` and returns `[1 | "a", 2 | "b", 3 | "c"]`
|
||||
*/
|
||||
type Flatten<T, Copy extends T = T> = { [Key in keyof T]: Copy[Key] };
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs a test.
|
||||
*
|
||||
@@ -418,10 +427,16 @@ declare module "bun:test" {
|
||||
*
|
||||
* @category Testing
|
||||
*/
|
||||
export interface Test<T extends Readonly<any[]>> {
|
||||
export interface Test<T extends ReadonlyArray<unknown>> {
|
||||
(
|
||||
label: string,
|
||||
fn: (...args: IsTuple<T> extends true ? [...T, (err?: unknown) => void] : T) => void | Promise<unknown>,
|
||||
|
||||
fn: (
|
||||
...args: __internal.IsTuple<T> extends true
|
||||
? [...table: __internal.Flatten<T>, done: (err?: unknown) => void]
|
||||
: T
|
||||
) => void | Promise<unknown>,
|
||||
|
||||
/**
|
||||
* - If a `number`, sets the timeout for the test in milliseconds.
|
||||
* - If an `object`, sets the options for the test.
|
||||
@@ -513,8 +528,8 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param table Array of Arrays with the arguments that are passed into the test fn for each row.
|
||||
*/
|
||||
each<T extends Readonly<[any, ...any[]]>>(table: readonly T[]): Test<[...T]>;
|
||||
each<T extends any[]>(table: readonly T[]): Test<[...T]>;
|
||||
each<T extends Readonly<[unknown, ...unknown[]]>>(table: readonly T[]): Test<T>;
|
||||
each<T extends unknown[]>(table: readonly T[]): Test<T>;
|
||||
each<T>(table: T[]): Test<[T]>;
|
||||
}
|
||||
/**
|
||||
|
||||
@@ -717,6 +717,25 @@ LIBUS_SOCKET_DESCRIPTOR bsd_accept_socket(LIBUS_SOCKET_DESCRIPTOR fd, struct bsd
|
||||
return LIBUS_SOCKET_ERROR;
|
||||
}
|
||||
|
||||
#ifdef __APPLE__
|
||||
/* A bug in XNU (the macOS kernel) can cause accept() to return a socket but addrlen=0.
|
||||
* This happens when an IPv4 connection is made to an IPv6 dual-stack listener
|
||||
* and the connection is immediately aborted (sends RST packet).
|
||||
* However, there might be buffered data from connectx() before the abort. */
|
||||
if (addr->len == 0) {
|
||||
/* Check if there's any pending data before discarding the socket */
|
||||
char peek_buf[1];
|
||||
ssize_t has_data = recv(accepted_fd, peek_buf, 1, MSG_PEEK | MSG_DONTWAIT);
|
||||
|
||||
if (has_data <= 0) {
|
||||
/* No data available, socket is truly dead - discard it */
|
||||
bsd_close_socket(accepted_fd);
|
||||
continue; /* Try to accept the next connection */
|
||||
}
|
||||
/* If has_data > 0, let the socket through - there's buffered data to read */
|
||||
}
|
||||
#endif
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
@@ -226,11 +226,11 @@ struct us_bun_socket_context_options_t {
|
||||
const char *ca_file_name;
|
||||
const char *ssl_ciphers;
|
||||
int ssl_prefer_low_memory_usage; /* Todo: rename to prefer_low_memory_usage and apply for TCP as well */
|
||||
const char **key;
|
||||
const char * const *key;
|
||||
unsigned int key_count;
|
||||
const char **cert;
|
||||
const char * const *cert;
|
||||
unsigned int cert_count;
|
||||
const char **ca;
|
||||
const char * const *ca;
|
||||
unsigned int ca_count;
|
||||
unsigned int secure_options;
|
||||
int reject_unauthorized;
|
||||
|
||||
@@ -303,10 +303,10 @@ public:
|
||||
auto context = (struct us_socket_context_t *)this->httpContext;
|
||||
struct us_socket_t *s = context->head_sockets;
|
||||
while (s) {
|
||||
HttpResponseData<SSL> *httpResponseData = HttpResponse<SSL>::getHttpResponseDataS(s);
|
||||
httpResponseData->shouldCloseOnceIdle = true;
|
||||
// no matter the type of socket will always contain the AsyncSocketData
|
||||
auto *data = ((AsyncSocket<SSL> *) s)->getAsyncSocketData();
|
||||
struct us_socket_t *next = s->next;
|
||||
if (httpResponseData->isIdle) {
|
||||
if (data->isIdle) {
|
||||
us_socket_close(SSL, s, LIBUS_SOCKET_CLOSE_CODE_CLEAN_SHUTDOWN, 0);
|
||||
}
|
||||
s = next;
|
||||
@@ -641,6 +641,10 @@ public:
|
||||
httpContext->getSocketContextData()->onClientError = std::move(onClientError);
|
||||
}
|
||||
|
||||
void setOnSocketUpgraded(HttpContextData<SSL>::OnSocketUpgradedCallback onUpgraded) {
|
||||
httpContext->getSocketContextData()->onSocketUpgraded = onUpgraded;
|
||||
}
|
||||
|
||||
TemplatedApp &&run() {
|
||||
uWS::run();
|
||||
return std::move(*this);
|
||||
|
||||
@@ -83,6 +83,7 @@ struct AsyncSocketData {
|
||||
|
||||
/* Or empty */
|
||||
AsyncSocketData() = default;
|
||||
bool isIdle = false;
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
@@ -253,6 +253,7 @@ private:
|
||||
/* Mark that we are inside the parser now */
|
||||
httpContextData->flags.isParsingHttp = true;
|
||||
httpResponseData->isIdle = false;
|
||||
|
||||
// clients need to know the cursor after http parse, not servers!
|
||||
// how far did we read then? we need to know to continue with websocket parsing data? or?
|
||||
|
||||
|
||||
@@ -43,11 +43,11 @@ struct alignas(16) HttpContextData {
|
||||
template <bool> friend struct TemplatedApp;
|
||||
private:
|
||||
std::vector<MoveOnlyFunction<void(HttpResponse<SSL> *, int)>> filterHandlers;
|
||||
using OnSocketClosedCallback = void (*)(void* userData, int is_ssl, struct us_socket_t *rawSocket);
|
||||
using OnSocketDataCallback = void (*)(void* userData, int is_ssl, struct us_socket_t *rawSocket, const char *data, int length, bool last);
|
||||
using OnSocketDrainCallback = void (*)(void* userData, int is_ssl, struct us_socket_t *rawSocket);
|
||||
using OnSocketUpgradedCallback = void (*)(void* userData, int is_ssl, struct us_socket_t *rawSocket);
|
||||
using OnClientErrorCallback = MoveOnlyFunction<void(int is_ssl, struct us_socket_t *rawSocket, uWS::HttpParserError errorCode, char *rawPacket, int rawPacketLength)>;
|
||||
|
||||
using OnSocketClosedCallback = void (*)(void* userData, int is_ssl, struct us_socket_t *rawSocket);
|
||||
|
||||
MoveOnlyFunction<void(const char *hostname)> missingServerNameHandler;
|
||||
|
||||
@@ -66,6 +66,7 @@ private:
|
||||
OnSocketClosedCallback onSocketClosed = nullptr;
|
||||
OnSocketDrainCallback onSocketDrain = nullptr;
|
||||
OnSocketDataCallback onSocketData = nullptr;
|
||||
OnSocketUpgradedCallback onSocketUpgraded = nullptr;
|
||||
OnClientErrorCallback onClientError = nullptr;
|
||||
|
||||
uint64_t maxHeaderSize = 0; // 0 means no limit
|
||||
@@ -78,6 +79,7 @@ private:
|
||||
}
|
||||
|
||||
public:
|
||||
|
||||
HttpFlags flags;
|
||||
};
|
||||
|
||||
|
||||
@@ -316,14 +316,20 @@ public:
|
||||
HttpContext<SSL> *httpContext = (HttpContext<SSL> *) us_socket_context(SSL, (struct us_socket_t *) this);
|
||||
|
||||
/* Move any backpressure out of HttpResponse */
|
||||
BackPressure backpressure(std::move(((AsyncSocketData<SSL> *) getHttpResponseData())->buffer));
|
||||
|
||||
auto* responseData = getHttpResponseData();
|
||||
BackPressure backpressure(std::move(((AsyncSocketData<SSL> *) responseData)->buffer));
|
||||
|
||||
auto* socketData = responseData->socketData;
|
||||
HttpContextData<SSL> *httpContextData = httpContext->getSocketContextData();
|
||||
|
||||
/* Destroy HttpResponseData */
|
||||
getHttpResponseData()->~HttpResponseData();
|
||||
responseData->~HttpResponseData();
|
||||
|
||||
/* Before we adopt and potentially change socket, check if we are corked */
|
||||
bool wasCorked = Super::isCorked();
|
||||
|
||||
|
||||
|
||||
/* Adopting a socket invalidates it, do not rely on it directly to carry any data */
|
||||
us_socket_t *usSocket = us_socket_context_adopt_socket(SSL, (us_socket_context_t *) webSocketContext, (us_socket_t *) this, sizeof(WebSocketData) + sizeof(UserData));
|
||||
WebSocket<SSL, true, UserData> *webSocket = (WebSocket<SSL, true, UserData> *) usSocket;
|
||||
@@ -334,10 +340,12 @@ public:
|
||||
}
|
||||
|
||||
/* Initialize websocket with any moved backpressure intact */
|
||||
webSocket->init(perMessageDeflate, compressOptions, std::move(backpressure));
|
||||
webSocket->init(perMessageDeflate, compressOptions, std::move(backpressure), socketData, httpContextData->onSocketClosed);
|
||||
if (httpContextData->onSocketUpgraded) {
|
||||
httpContextData->onSocketUpgraded(socketData, SSL, usSocket);
|
||||
}
|
||||
|
||||
/* We should only mark this if inside the parser; if upgrading "async" we cannot set this */
|
||||
HttpContextData<SSL> *httpContextData = httpContext->getSocketContextData();
|
||||
if (httpContextData->flags.isParsingHttp) {
|
||||
/* We need to tell the Http parser that we changed socket */
|
||||
httpContextData->upgradedWebSocket = webSocket;
|
||||
@@ -351,7 +359,6 @@ public:
|
||||
|
||||
/* Move construct the UserData right before calling open handler */
|
||||
new (webSocket->getUserData()) UserData(std::forward<UserData>(userData));
|
||||
|
||||
|
||||
/* Emit open event and start the timeout */
|
||||
if (webSocketContextData->openHandler) {
|
||||
|
||||
@@ -109,9 +109,6 @@ struct HttpResponseData : AsyncSocketData<SSL>, HttpParser {
|
||||
uint8_t idleTimeout = 10; // default HTTP_TIMEOUT 10 seconds
|
||||
bool fromAncientRequest = false;
|
||||
bool isConnectRequest = false;
|
||||
bool isIdle = true;
|
||||
bool shouldCloseOnceIdle = false;
|
||||
|
||||
|
||||
#ifdef UWS_WITH_PROXY
|
||||
ProxyParser proxyParser;
|
||||
|
||||
@@ -34,8 +34,8 @@ struct WebSocket : AsyncSocket<SSL> {
|
||||
private:
|
||||
typedef AsyncSocket<SSL> Super;
|
||||
|
||||
void *init(bool perMessageDeflate, CompressOptions compressOptions, BackPressure &&backpressure) {
|
||||
new (us_socket_ext(SSL, (us_socket_t *) this)) WebSocketData(perMessageDeflate, compressOptions, std::move(backpressure));
|
||||
void *init(bool perMessageDeflate, CompressOptions compressOptions, BackPressure &&backpressure, void *socketData, WebSocketData::OnSocketClosedCallback onSocketClosed) {
|
||||
new (us_socket_ext(SSL, (us_socket_t *) this)) WebSocketData(perMessageDeflate, compressOptions, std::move(backpressure), socketData, onSocketClosed);
|
||||
return this;
|
||||
}
|
||||
public:
|
||||
|
||||
@@ -256,6 +256,9 @@ private:
|
||||
|
||||
/* For whatever reason, if we already have emitted close event, do not emit it again */
|
||||
WebSocketData *webSocketData = (WebSocketData *) (us_socket_ext(SSL, s));
|
||||
if (webSocketData->socketData && webSocketData->onSocketClosed) {
|
||||
webSocketData->onSocketClosed(webSocketData->socketData, SSL, (us_socket_t *) s);
|
||||
}
|
||||
if (!webSocketData->isShuttingDown) {
|
||||
/* Emit close event */
|
||||
auto *webSocketContextData = (WebSocketContextData<SSL, USERDATA> *) us_socket_context_ext(SSL, us_socket_context(SSL, (us_socket_t *) s));
|
||||
|
||||
@@ -52,7 +52,6 @@ struct WebSocketContextData {
|
||||
private:
|
||||
|
||||
public:
|
||||
|
||||
/* This one points to the App's shared topicTree */
|
||||
TopicTree<TopicTreeMessage, TopicTreeBigMessage> *topicTree;
|
||||
|
||||
|
||||
@@ -38,6 +38,7 @@ private:
|
||||
unsigned int controlTipLength = 0;
|
||||
bool isShuttingDown = 0;
|
||||
bool hasTimedOut = false;
|
||||
|
||||
enum CompressionStatus : char {
|
||||
DISABLED,
|
||||
ENABLED,
|
||||
@@ -52,7 +53,12 @@ private:
|
||||
/* We could be a subscriber */
|
||||
Subscriber *subscriber = nullptr;
|
||||
public:
|
||||
WebSocketData(bool perMessageDeflate, CompressOptions compressOptions, BackPressure &&backpressure) : AsyncSocketData<false>(std::move(backpressure)), WebSocketState<true>() {
|
||||
using OnSocketClosedCallback = void (*)(void* userData, int is_ssl, struct us_socket_t *rawSocket);
|
||||
void *socketData = nullptr;
|
||||
/* node http compatibility callbacks */
|
||||
OnSocketClosedCallback onSocketClosed = nullptr;
|
||||
|
||||
WebSocketData(bool perMessageDeflate, CompressOptions compressOptions, BackPressure &&backpressure, void *socketData, OnSocketClosedCallback onSocketClosed) : AsyncSocketData<false>(std::move(backpressure)), WebSocketState<true>() {
|
||||
compressionStatus = perMessageDeflate ? ENABLED : DISABLED;
|
||||
|
||||
/* Initialize the dedicated sliding window(s) */
|
||||
@@ -64,6 +70,10 @@ public:
|
||||
inflationStream = new InflationStream(compressOptions);
|
||||
}
|
||||
}
|
||||
// never close websocket sockets when closing idle connections
|
||||
this->isIdle = false;
|
||||
this->socketData = socketData;
|
||||
this->onSocketClosed = onSocketClosed;
|
||||
}
|
||||
|
||||
~WebSocketData() {
|
||||
|
||||
@@ -80,6 +80,7 @@ function getNodeParallelTestTimeout(testPath) {
|
||||
if (testPath.includes("test-dns")) {
|
||||
return 90_000;
|
||||
}
|
||||
if (!isCI) return 60_000; // everything slower in debug mode
|
||||
return 20_000;
|
||||
}
|
||||
|
||||
@@ -449,7 +450,7 @@ async function runTests() {
|
||||
|
||||
if (parallelism > 1) {
|
||||
console.log(grouptitle);
|
||||
result = await fn();
|
||||
result = await fn(index);
|
||||
} else {
|
||||
result = await startGroup(grouptitle, fn);
|
||||
}
|
||||
@@ -469,6 +470,7 @@ async function runTests() {
|
||||
const label = `${getAnsi(color)}[${index}/${total}] ${title} - ${error}${getAnsi("reset")}`;
|
||||
startGroup(label, () => {
|
||||
if (parallelism > 1) return;
|
||||
if (!isCI) return;
|
||||
process.stderr.write(stdoutPreview);
|
||||
});
|
||||
|
||||
@@ -579,8 +581,11 @@ async function runTests() {
|
||||
const title = relative(cwd, absoluteTestPath).replaceAll(sep, "/");
|
||||
if (isNodeTest(testPath)) {
|
||||
const testContent = readFileSync(absoluteTestPath, "utf-8");
|
||||
const runWithBunTest =
|
||||
title.includes("needs-test") || testContent.includes("bun:test") || testContent.includes("node:test");
|
||||
let runWithBunTest = title.includes("needs-test") || testContent.includes("node:test");
|
||||
// don't wanna have a filter for includes("bun:test") but these need our mocks
|
||||
runWithBunTest ||= title === "test/js/node/test/parallel/test-fs-append-file-flush.js";
|
||||
runWithBunTest ||= title === "test/js/node/test/parallel/test-fs-write-file-flush.js";
|
||||
runWithBunTest ||= title === "test/js/node/test/parallel/test-fs-write-stream-flush.js";
|
||||
const subcommand = runWithBunTest ? "test" : "run";
|
||||
const env = {
|
||||
FORCE_COLOR: "0",
|
||||
@@ -668,7 +673,9 @@ async function runTests() {
|
||||
const title = join(relative(cwd, vendorPath), testPath).replace(/\\/g, "/");
|
||||
|
||||
if (testRunner === "bun") {
|
||||
await runTest(title, () => spawnBunTest(execPath, testPath, { cwd: vendorPath }));
|
||||
await runTest(title, index =>
|
||||
spawnBunTest(execPath, testPath, { cwd: vendorPath, env: { TEST_SERIAL_ID: index } }),
|
||||
);
|
||||
} else {
|
||||
const testRunnerPath = join(cwd, "test", "runners", `${testRunner}.ts`);
|
||||
if (!existsSync(testRunnerPath)) {
|
||||
@@ -1295,6 +1302,7 @@ async function spawnBun(execPath, { args, cwd, timeout, env, stdout, stderr }) {
|
||||
* @param {object} [opts]
|
||||
* @param {string} [opts.cwd]
|
||||
* @param {string[]} [opts.args]
|
||||
* @param {object} [opts.env]
|
||||
* @returns {Promise<TestResult>}
|
||||
*/
|
||||
async function spawnBunTest(execPath, testPath, opts = { cwd }) {
|
||||
@@ -1328,6 +1336,7 @@ async function spawnBunTest(execPath, testPath, opts = { cwd }) {
|
||||
|
||||
const env = {
|
||||
GITHUB_ACTIONS: "true", // always true so annotations are parsed
|
||||
...opts["env"],
|
||||
};
|
||||
if ((basename(execPath).includes("asan") || !isCI) && shouldValidateExceptions(relative(cwd, absPath))) {
|
||||
env.BUN_JSC_validateExceptionChecks = "1";
|
||||
|
||||
60
scripts/update-sqlite-amalgamation.sh
Executable file
60
scripts/update-sqlite-amalgamation.sh
Executable file
@@ -0,0 +1,60 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# This script updates SQLite amalgamation files with the required compiler flags.
|
||||
# It downloads the SQLite source, configures it with necessary flags, builds the
|
||||
# amalgamation, and copies the generated files to the Bun source tree.
|
||||
#
|
||||
# Usage:
|
||||
# ./scripts/update-sqlite-amalgamation.sh <version_number> <year>
|
||||
#
|
||||
# Example:
|
||||
# ./scripts/update-sqlite-amalgamation.sh 3500400 2025
|
||||
#
|
||||
# The version number is a 7-digit SQLite version (e.g., 3500400 for 3.50.4)
|
||||
# The year is the release year found in the download URL
|
||||
|
||||
if [ $# -ne 2 ]; then
|
||||
echo "Usage: $0 <version_number> <year>"
|
||||
echo "Example: $0 3500400 2025"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
VERSION_NUM="$1"
|
||||
YEAR="$2"
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
|
||||
# Create temporary directory
|
||||
TEMP_DIR=$(mktemp -d)
|
||||
trap 'rm -rf "$TEMP_DIR"' EXIT
|
||||
|
||||
cd "$TEMP_DIR"
|
||||
|
||||
echo "Downloading SQLite source version $VERSION_NUM from year $YEAR..."
|
||||
DOWNLOAD_URL="https://sqlite.org/$YEAR/sqlite-src-$VERSION_NUM.zip"
|
||||
echo "URL: $DOWNLOAD_URL"
|
||||
|
||||
wget -q "$DOWNLOAD_URL"
|
||||
unzip -q "sqlite-src-$VERSION_NUM.zip"
|
||||
cd "sqlite-src-$VERSION_NUM"
|
||||
|
||||
echo "Configuring SQLite with required flags..."
|
||||
# These flags must be set during amalgamation generation for them to take effect
|
||||
# in the parser and other compile-time generated code
|
||||
CFLAGS="-DSQLITE_ENABLE_UPDATE_DELETE_LIMIT=1 -DSQLITE_ENABLE_COLUMN_METADATA=1"
|
||||
./configure CFLAGS="$CFLAGS" > /dev/null 2>&1
|
||||
|
||||
echo "Building amalgamation..."
|
||||
make sqlite3.c > /dev/null 2>&1
|
||||
|
||||
echo "Copying files to Bun source tree..."
|
||||
# Add clang-format off directive and copy the amalgamation
|
||||
echo "// clang-format off" > "$REPO_ROOT/src/bun.js/bindings/sqlite/sqlite3.c"
|
||||
cat sqlite3.c >> "$REPO_ROOT/src/bun.js/bindings/sqlite/sqlite3.c"
|
||||
|
||||
echo "// clang-format off" > "$REPO_ROOT/src/bun.js/bindings/sqlite/sqlite3_local.h"
|
||||
cat sqlite3.h >> "$REPO_ROOT/src/bun.js/bindings/sqlite/sqlite3_local.h"
|
||||
|
||||
echo "✓ Successfully updated SQLite amalgamation files"
|
||||
1
src/AGENTS.md
Symbolic link
1
src/AGENTS.md
Symbolic link
@@ -0,0 +1 @@
|
||||
CLAUDE.md
|
||||
12
src/CLAUDE.md
Normal file
12
src/CLAUDE.md
Normal file
@@ -0,0 +1,12 @@
|
||||
## Zig
|
||||
|
||||
Syntax reminders:
|
||||
|
||||
- Private fields are fully supported in Zig with the `#` prefix. `struct { #foo: u32 };` makes a struct with a private field named `#foo`.
|
||||
- Decl literals in Zig are recommended. `const decl: Decl = .{ .binding = 0, .value = 0 };`
|
||||
|
||||
Conventions:
|
||||
|
||||
- Prefer `@import` at the **bottom** of the file.
|
||||
- It's `@import("bun")` not `@import("root").bun`
|
||||
- You must be patient with the build.
|
||||
@@ -431,6 +431,27 @@ pub const StandaloneModuleGraph = struct {
|
||||
}
|
||||
};
|
||||
|
||||
if (comptime bun.Environment.is_canary or bun.Environment.isDebug) {
|
||||
if (bun.getenvZ("BUN_FEATURE_FLAG_DUMP_CODE")) |dump_code_dir| {
|
||||
const buf = bun.path_buffer_pool.get();
|
||||
defer bun.path_buffer_pool.put(buf);
|
||||
const dest_z = bun.path.joinAbsStringBufZ(dump_code_dir, buf, &.{dest_path}, .auto);
|
||||
|
||||
// Scoped block to handle dump failures without skipping module emission
|
||||
dump: {
|
||||
const file = bun.sys.File.makeOpen(dest_z, bun.O.WRONLY | bun.O.CREAT | bun.O.TRUNC, 0o664).unwrap() catch |err| {
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to open {s}: {s}", .{ dest_path, @errorName(err) });
|
||||
break :dump;
|
||||
};
|
||||
defer file.close();
|
||||
file.writeAll(output_file.value.buffer.bytes).unwrap() catch |err| {
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to write {s}: {s}", .{ dest_path, @errorName(err) });
|
||||
break :dump;
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var module = CompiledModuleGraphFile{
|
||||
.name = string_builder.fmtAppendCountZ("{s}{s}", .{
|
||||
prefix,
|
||||
|
||||
@@ -506,6 +506,12 @@ pub fn AllocationScopeIn(comptime Allocator: type) type {
|
||||
pub fn setPointerExtra(self: Self, ptr: *anyopaque, extra: Extra) void {
|
||||
return self.borrow().setPointerExtra(ptr, extra);
|
||||
}
|
||||
|
||||
pub fn leakSlice(self: Self, memory: anytype) void {
|
||||
if (comptime !Self.enabled) return;
|
||||
_ = @typeInfo(@TypeOf(memory)).pointer;
|
||||
self.trackExternalFree(memory, null) catch @panic("tried to free memory that was not allocated by the allocation scope");
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -216,8 +216,7 @@ pub extern fn mi_new_reallocn(p: ?*anyopaque, newcount: usize, size: usize) ?*an
|
||||
pub const MI_SMALL_WSIZE_MAX = @as(c_int, 128);
|
||||
pub const MI_SMALL_SIZE_MAX = MI_SMALL_WSIZE_MAX * @import("std").zig.c_translation.sizeof(?*anyopaque);
|
||||
pub const MI_ALIGNMENT_MAX = (@as(c_int, 16) * @as(c_int, 1024)) * @as(c_ulong, 1024);
|
||||
|
||||
const MI_MAX_ALIGN_SIZE = 16;
|
||||
pub const MI_MAX_ALIGN_SIZE = 16;
|
||||
|
||||
pub fn mustUseAlignedAlloc(alignment: std.mem.Alignment) bool {
|
||||
return alignment.toByteUnits() > MI_MAX_ALIGN_SIZE;
|
||||
|
||||
@@ -321,8 +321,9 @@ pub const ByteWriter = Writer(*std.io.FixedBufferStream([]u8));
|
||||
pub const FileWriter = Writer(std.fs.File);
|
||||
|
||||
pub const api = struct {
|
||||
// these are in sync with BunLoaderType in headers-handwritten.h
|
||||
pub const Loader = enum(u8) {
|
||||
_none = 255,
|
||||
_none = 254,
|
||||
jsx = 1,
|
||||
js = 2,
|
||||
ts = 3,
|
||||
@@ -3052,173 +3053,8 @@ pub const api = struct {
|
||||
|
||||
security_scanner: ?[]const u8 = null,
|
||||
|
||||
pub fn decode(reader: anytype) anyerror!BunInstall {
|
||||
var this = std.mem.zeroes(BunInstall);
|
||||
|
||||
while (true) {
|
||||
switch (try reader.readByte()) {
|
||||
0 => {
|
||||
return this;
|
||||
},
|
||||
|
||||
1 => {
|
||||
this.default_registry = try reader.readValue(NpmRegistry);
|
||||
},
|
||||
2 => {
|
||||
this.scoped = try reader.readValue(NpmRegistryMap);
|
||||
},
|
||||
3 => {
|
||||
this.lockfile_path = try reader.readValue([]const u8);
|
||||
},
|
||||
4 => {
|
||||
this.save_lockfile_path = try reader.readValue([]const u8);
|
||||
},
|
||||
5 => {
|
||||
this.cache_directory = try reader.readValue([]const u8);
|
||||
},
|
||||
6 => {
|
||||
this.dry_run = try reader.readValue(bool);
|
||||
},
|
||||
7 => {
|
||||
this.force = try reader.readValue(bool);
|
||||
},
|
||||
8 => {
|
||||
this.save_dev = try reader.readValue(bool);
|
||||
},
|
||||
9 => {
|
||||
this.save_optional = try reader.readValue(bool);
|
||||
},
|
||||
10 => {
|
||||
this.save_peer = try reader.readValue(bool);
|
||||
},
|
||||
11 => {
|
||||
this.save_lockfile = try reader.readValue(bool);
|
||||
},
|
||||
12 => {
|
||||
this.production = try reader.readValue(bool);
|
||||
},
|
||||
13 => {
|
||||
this.save_yarn_lockfile = try reader.readValue(bool);
|
||||
},
|
||||
14 => {
|
||||
this.native_bin_links = try reader.readArray([]const u8);
|
||||
},
|
||||
15 => {
|
||||
this.disable_cache = try reader.readValue(bool);
|
||||
},
|
||||
16 => {
|
||||
this.disable_manifest_cache = try reader.readValue(bool);
|
||||
},
|
||||
17 => {
|
||||
this.global_dir = try reader.readValue([]const u8);
|
||||
},
|
||||
18 => {
|
||||
this.global_bin_dir = try reader.readValue([]const u8);
|
||||
},
|
||||
19 => {
|
||||
this.frozen_lockfile = try reader.readValue(bool);
|
||||
},
|
||||
20 => {
|
||||
this.exact = try reader.readValue(bool);
|
||||
},
|
||||
21 => {
|
||||
this.concurrent_scripts = try reader.readValue(u32);
|
||||
},
|
||||
else => {
|
||||
return error.InvalidMessage;
|
||||
},
|
||||
}
|
||||
}
|
||||
unreachable;
|
||||
}
|
||||
|
||||
pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
|
||||
if (this.default_registry) |default_registry| {
|
||||
try writer.writeFieldID(1);
|
||||
try writer.writeValue(@TypeOf(default_registry), default_registry);
|
||||
}
|
||||
if (this.scoped) |scoped| {
|
||||
try writer.writeFieldID(2);
|
||||
try writer.writeValue(@TypeOf(scoped), scoped);
|
||||
}
|
||||
if (this.lockfile_path) |lockfile_path| {
|
||||
try writer.writeFieldID(3);
|
||||
try writer.writeValue(@TypeOf(lockfile_path), lockfile_path);
|
||||
}
|
||||
if (this.save_lockfile_path) |save_lockfile_path| {
|
||||
try writer.writeFieldID(4);
|
||||
try writer.writeValue(@TypeOf(save_lockfile_path), save_lockfile_path);
|
||||
}
|
||||
if (this.cache_directory) |cache_directory| {
|
||||
try writer.writeFieldID(5);
|
||||
try writer.writeValue(@TypeOf(cache_directory), cache_directory);
|
||||
}
|
||||
if (this.dry_run) |dry_run| {
|
||||
try writer.writeFieldID(6);
|
||||
try writer.writeInt(@as(u8, @intFromBool(dry_run)));
|
||||
}
|
||||
if (this.force) |force| {
|
||||
try writer.writeFieldID(7);
|
||||
try writer.writeInt(@as(u8, @intFromBool(force)));
|
||||
}
|
||||
if (this.save_dev) |save_dev| {
|
||||
try writer.writeFieldID(8);
|
||||
try writer.writeInt(@as(u8, @intFromBool(save_dev)));
|
||||
}
|
||||
if (this.save_optional) |save_optional| {
|
||||
try writer.writeFieldID(9);
|
||||
try writer.writeInt(@as(u8, @intFromBool(save_optional)));
|
||||
}
|
||||
if (this.save_peer) |save_peer| {
|
||||
try writer.writeFieldID(10);
|
||||
try writer.writeInt(@as(u8, @intFromBool(save_peer)));
|
||||
}
|
||||
if (this.save_lockfile) |save_lockfile| {
|
||||
try writer.writeFieldID(11);
|
||||
try writer.writeInt(@as(u8, @intFromBool(save_lockfile)));
|
||||
}
|
||||
if (this.production) |production| {
|
||||
try writer.writeFieldID(12);
|
||||
try writer.writeInt(@as(u8, @intFromBool(production)));
|
||||
}
|
||||
if (this.save_yarn_lockfile) |save_yarn_lockfile| {
|
||||
try writer.writeFieldID(13);
|
||||
try writer.writeInt(@as(u8, @intFromBool(save_yarn_lockfile)));
|
||||
}
|
||||
if (this.native_bin_links) |native_bin_links| {
|
||||
try writer.writeFieldID(14);
|
||||
try writer.writeArray([]const u8, native_bin_links);
|
||||
}
|
||||
if (this.disable_cache) |disable_cache| {
|
||||
try writer.writeFieldID(15);
|
||||
try writer.writeInt(@as(u8, @intFromBool(disable_cache)));
|
||||
}
|
||||
if (this.disable_manifest_cache) |disable_manifest_cache| {
|
||||
try writer.writeFieldID(16);
|
||||
try writer.writeInt(@as(u8, @intFromBool(disable_manifest_cache)));
|
||||
}
|
||||
if (this.global_dir) |global_dir| {
|
||||
try writer.writeFieldID(17);
|
||||
try writer.writeValue(@TypeOf(global_dir), global_dir);
|
||||
}
|
||||
if (this.global_bin_dir) |global_bin_dir| {
|
||||
try writer.writeFieldID(18);
|
||||
try writer.writeValue(@TypeOf(global_bin_dir), global_bin_dir);
|
||||
}
|
||||
if (this.frozen_lockfile) |frozen_lockfile| {
|
||||
try writer.writeFieldID(19);
|
||||
try writer.writeInt(@as(u8, @intFromBool(frozen_lockfile)));
|
||||
}
|
||||
if (this.exact) |exact| {
|
||||
try writer.writeFieldID(20);
|
||||
try writer.writeInt(@as(u8, @intFromBool(exact)));
|
||||
}
|
||||
if (this.concurrent_scripts) |concurrent_scripts| {
|
||||
try writer.writeFieldID(21);
|
||||
try writer.writeInt(concurrent_scripts);
|
||||
}
|
||||
try writer.endMessage();
|
||||
}
|
||||
minimum_release_age_ms: ?f64 = null,
|
||||
minimum_release_age_excludes: ?[]const []const u8 = null,
|
||||
};
|
||||
|
||||
pub const ClientServerModule = struct {
|
||||
|
||||
119
src/ast/P.zig
119
src/ast/P.zig
@@ -170,6 +170,21 @@ pub fn NewParser_(
|
||||
dirname_ref: Ref = Ref.None,
|
||||
import_meta_ref: Ref = Ref.None,
|
||||
hmr_api_ref: Ref = Ref.None,
|
||||
|
||||
/// If bake is enabled and this is a server-side file, we want to use
|
||||
/// special `Response` class inside the `bun:app` built-in module to
|
||||
/// support syntax like `return Response(<jsx />, {...})` or `return Response.render("/my-page")`
|
||||
/// or `return Response.redirect("/other")`.
|
||||
///
|
||||
/// So we'll need to add a `import { Response } from 'bun:app'` to the
|
||||
/// top of the file
|
||||
///
|
||||
/// We need to declare this `response_ref` upfront
|
||||
response_ref: Ref = Ref.None,
|
||||
/// We also need to declare the namespace ref for `bun:app` and attach
|
||||
/// it to the symbol so the code generated `e_import_identifier`'s
|
||||
bun_app_namespace_ref: Ref = Ref.None,
|
||||
|
||||
scopes_in_order_visitor_index: usize = 0,
|
||||
has_classic_runtime_warned: bool = false,
|
||||
macro_call_count: MacroCallCountType = 0,
|
||||
@@ -1220,6 +1235,81 @@ pub fn NewParser_(
|
||||
};
|
||||
}
|
||||
|
||||
pub fn generateImportStmtForBakeResponse(
|
||||
noalias p: *P,
|
||||
parts: *ListManaged(js_ast.Part),
|
||||
) !void {
|
||||
bun.assert(!p.response_ref.isNull());
|
||||
bun.assert(!p.bun_app_namespace_ref.isNull());
|
||||
const allocator = p.allocator;
|
||||
|
||||
const import_path = "bun:app";
|
||||
|
||||
const import_record_i = p.addImportRecordByRange(.stmt, logger.Range.None, import_path);
|
||||
|
||||
var declared_symbols = DeclaredSymbol.List{};
|
||||
try declared_symbols.ensureTotalCapacity(allocator, 2);
|
||||
|
||||
var stmts = try allocator.alloc(Stmt, 1);
|
||||
|
||||
declared_symbols.appendAssumeCapacity(
|
||||
DeclaredSymbol{ .ref = p.bun_app_namespace_ref, .is_top_level = true },
|
||||
);
|
||||
try p.module_scope.generated.append(allocator, p.bun_app_namespace_ref);
|
||||
|
||||
const clause_items = try allocator.dupe(js_ast.ClauseItem, &.{
|
||||
js_ast.ClauseItem{
|
||||
.alias = "Response",
|
||||
.original_name = "Response",
|
||||
.alias_loc = logger.Loc{},
|
||||
.name = LocRef{ .ref = p.response_ref, .loc = logger.Loc{} },
|
||||
},
|
||||
});
|
||||
|
||||
declared_symbols.appendAssumeCapacity(DeclaredSymbol{
|
||||
.ref = p.response_ref,
|
||||
.is_top_level = true,
|
||||
});
|
||||
|
||||
// ensure every e_import_identifier holds the namespace
|
||||
if (p.options.features.hot_module_reloading) {
|
||||
const symbol = &p.symbols.items[p.response_ref.inner_index];
|
||||
bun.assert(symbol.namespace_alias != null);
|
||||
symbol.namespace_alias.?.import_record_index = import_record_i;
|
||||
}
|
||||
|
||||
try p.is_import_item.put(allocator, p.response_ref, {});
|
||||
try p.named_imports.put(allocator, p.response_ref, js_ast.NamedImport{
|
||||
.alias = "Response",
|
||||
.alias_loc = logger.Loc{},
|
||||
.namespace_ref = p.bun_app_namespace_ref,
|
||||
.import_record_index = import_record_i,
|
||||
});
|
||||
|
||||
stmts[0] = p.s(
|
||||
S.Import{
|
||||
.namespace_ref = p.bun_app_namespace_ref,
|
||||
.items = clause_items,
|
||||
.import_record_index = import_record_i,
|
||||
.is_single_line = true,
|
||||
},
|
||||
logger.Loc{},
|
||||
);
|
||||
|
||||
var import_records = try allocator.alloc(u32, 1);
|
||||
import_records[0] = import_record_i;
|
||||
|
||||
// This import is placed in a part before the main code, however
|
||||
// the bundler ends up re-ordering this to be after... The order
|
||||
// does not matter as ESM imports are always hoisted.
|
||||
parts.append(js_ast.Part{
|
||||
.stmts = stmts,
|
||||
.declared_symbols = declared_symbols,
|
||||
.import_record_indices = bun.BabyList(u32).fromOwnedSlice(import_records),
|
||||
.tag = .runtime,
|
||||
}) catch unreachable;
|
||||
}
|
||||
|
||||
pub fn generateImportStmt(
|
||||
noalias p: *P,
|
||||
import_path: string,
|
||||
@@ -1227,7 +1317,7 @@ pub fn NewParser_(
|
||||
parts: *ListManaged(js_ast.Part),
|
||||
symbols: anytype,
|
||||
additional_stmt: ?Stmt,
|
||||
comptime suffix: string,
|
||||
comptime prefix: string,
|
||||
comptime is_internal: bool,
|
||||
) anyerror!void {
|
||||
const allocator = p.allocator;
|
||||
@@ -1237,13 +1327,13 @@ pub fn NewParser_(
|
||||
import_record.path.namespace = "runtime";
|
||||
import_record.is_internal = is_internal;
|
||||
const import_path_identifier = try import_record.path.name.nonUniqueNameString(allocator);
|
||||
var namespace_identifier = try allocator.alloc(u8, import_path_identifier.len + suffix.len);
|
||||
var namespace_identifier = try allocator.alloc(u8, import_path_identifier.len + prefix.len);
|
||||
const clause_items = try allocator.alloc(js_ast.ClauseItem, imports.len);
|
||||
var stmts = try allocator.alloc(Stmt, 1 + if (additional_stmt != null) @as(usize, 1) else @as(usize, 0));
|
||||
var declared_symbols = DeclaredSymbol.List{};
|
||||
try declared_symbols.ensureTotalCapacity(allocator, imports.len + 1);
|
||||
bun.copy(u8, namespace_identifier, suffix);
|
||||
bun.copy(u8, namespace_identifier[suffix.len..], import_path_identifier);
|
||||
bun.copy(u8, namespace_identifier, prefix);
|
||||
bun.copy(u8, namespace_identifier[prefix.len..], import_path_identifier);
|
||||
|
||||
const namespace_ref = try p.newSymbol(.other, namespace_identifier);
|
||||
declared_symbols.appendAssumeCapacity(.{
|
||||
@@ -2014,6 +2104,25 @@ pub fn NewParser_(
|
||||
.wrap_exports_for_server_reference => {},
|
||||
}
|
||||
|
||||
// Server-side components:
|
||||
// Declare upfront the symbols for "Response" and "bun:app"
|
||||
switch (p.options.features.server_components) {
|
||||
.none, .client_side => {},
|
||||
else => {
|
||||
p.response_ref = try p.declareGeneratedSymbol(.import, "Response");
|
||||
p.bun_app_namespace_ref = try p.newSymbol(
|
||||
.other,
|
||||
"import_bun_app",
|
||||
);
|
||||
const symbol = &p.symbols.items[p.response_ref.inner_index];
|
||||
symbol.namespace_alias = .{
|
||||
.namespace_ref = p.bun_app_namespace_ref,
|
||||
.alias = "Response",
|
||||
.import_record_index = std.math.maxInt(u32),
|
||||
};
|
||||
},
|
||||
}
|
||||
|
||||
if (p.options.features.hot_module_reloading) {
|
||||
p.hmr_api_ref = try p.declareCommonJSSymbol(.unbound, "hmr");
|
||||
}
|
||||
@@ -3071,7 +3180,7 @@ pub fn NewParser_(
|
||||
return ref;
|
||||
}
|
||||
|
||||
fn declareGeneratedSymbol(p: *P, kind: Symbol.Kind, comptime name: string) !Ref {
|
||||
pub fn declareGeneratedSymbol(p: *P, kind: Symbol.Kind, comptime name: string) !Ref {
|
||||
// The bundler runs the renamer, so it is ok to not append a hash
|
||||
if (p.options.bundle) {
|
||||
return try declareSymbolMaybeGenerated(p, kind, logger.Loc.Empty, name, true);
|
||||
|
||||
@@ -1357,6 +1357,16 @@ pub const Parser = struct {
|
||||
);
|
||||
}
|
||||
|
||||
// Bake: transform global `Response` to use `import { Response } from 'bun:app'`
|
||||
if (!p.response_ref.isNull() and is_used_and_has_no_links: {
|
||||
// We only want to do this if the symbol is used and didn't get
|
||||
// bound to some other value
|
||||
const symbol: *const Symbol = &p.symbols.items[p.response_ref.innerIndex()];
|
||||
break :is_used_and_has_no_links !symbol.hasLink() and symbol.use_count_estimate > 0;
|
||||
}) {
|
||||
try p.generateImportStmtForBakeResponse(&before);
|
||||
}
|
||||
|
||||
if (before.items.len > 0 or after.items.len > 0) {
|
||||
try parts.ensureUnusedCapacity(before.items.len + after.items.len);
|
||||
const parts_len = parts.items.len;
|
||||
|
||||
@@ -205,9 +205,18 @@ pub const SideEffects = enum(u1) {
|
||||
.bin_ge,
|
||||
=> {
|
||||
if (isPrimitiveWithSideEffects(bin.left.data) and isPrimitiveWithSideEffects(bin.right.data)) {
|
||||
const left_simplified = simplifyUnusedExpr(p, bin.left);
|
||||
const right_simplified = simplifyUnusedExpr(p, bin.right);
|
||||
|
||||
// If both sides would be removed entirely, we can return null to remove the whole expression
|
||||
if (left_simplified == null and right_simplified == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Otherwise, preserve at least the structure
|
||||
return Expr.joinWithComma(
|
||||
simplifyUnusedExpr(p, bin.left) orelse bin.left.toEmpty(),
|
||||
simplifyUnusedExpr(p, bin.right) orelse bin.right.toEmpty(),
|
||||
left_simplified orelse bin.left.toEmpty(),
|
||||
right_simplified orelse bin.right.toEmpty(),
|
||||
p.allocator,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -194,11 +194,11 @@ pub fn isTSArrowFnJSX(p: anytype) !bool {
|
||||
}
|
||||
if (p.lexer.token == .t_identifier) {
|
||||
try p.lexer.next();
|
||||
if (p.lexer.token == .t_comma) {
|
||||
if (p.lexer.token == .t_comma or p.lexer.token == .t_equals) {
|
||||
is_ts_arrow_fn = true;
|
||||
} else if (p.lexer.token == .t_extends) {
|
||||
try p.lexer.next();
|
||||
is_ts_arrow_fn = p.lexer.token != .t_equals and p.lexer.token != .t_greater_than;
|
||||
is_ts_arrow_fn = p.lexer.token != .t_equals and p.lexer.token != .t_greater_than and p.lexer.token != .t_slash;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1223,8 +1223,9 @@ pub fn ParseStmt(
|
||||
// "module Foo {}"
|
||||
// "declare module 'fs' {}"
|
||||
// "declare module 'fs';"
|
||||
if (((opts.is_module_scope or opts.is_namespace_scope) and (p.lexer.token == .t_identifier or
|
||||
(p.lexer.token == .t_string_literal and opts.is_typescript_declare))))
|
||||
if (!p.lexer.has_newline_before and
|
||||
(opts.is_module_scope or opts.is_namespace_scope) and
|
||||
(p.lexer.token == .t_identifier or (p.lexer.token == .t_string_literal and opts.is_typescript_declare)))
|
||||
{
|
||||
return p.parseTypeScriptNamespaceStmt(loc, opts);
|
||||
}
|
||||
|
||||
@@ -827,24 +827,25 @@ pub fn ParseSuffix(
|
||||
const optional_chain = &optional_chain_;
|
||||
while (true) {
|
||||
if (p.lexer.loc().start == p.after_arrow_body_loc.start) {
|
||||
while (true) {
|
||||
switch (p.lexer.token) {
|
||||
.t_comma => {
|
||||
if (level.gte(.comma)) {
|
||||
break;
|
||||
}
|
||||
defer left_and_out.* = left_value;
|
||||
next_token: switch (p.lexer.token) {
|
||||
.t_comma => {
|
||||
if (level.gte(.comma)) {
|
||||
return;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left.* = p.newExpr(E.Binary{
|
||||
.op = .bin_comma,
|
||||
.left = left.*,
|
||||
.right = try p.parseExpr(.comma),
|
||||
}, left.loc);
|
||||
},
|
||||
else => {
|
||||
break;
|
||||
},
|
||||
}
|
||||
try p.lexer.next();
|
||||
left.* = p.newExpr(E.Binary{
|
||||
.op = .bin_comma,
|
||||
.left = left.*,
|
||||
.right = try p.parseExpr(.comma),
|
||||
}, left.loc);
|
||||
|
||||
continue :next_token p.lexer.token;
|
||||
},
|
||||
else => {
|
||||
return;
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -210,7 +210,7 @@ pub fn ParseTypescript(
|
||||
p.popScope();
|
||||
|
||||
if (!opts.is_typescript_declare) {
|
||||
name.ref = bun.handleOom(p.declareSymbol(.ts_namespace, name_loc, name_text));
|
||||
name.ref = try p.declareSymbol(.ts_namespace, name_loc, name_text);
|
||||
try p.ref_to_ts_namespace_member.put(p.allocator, name.ref.?, ns_member_data);
|
||||
}
|
||||
|
||||
|
||||
@@ -136,6 +136,9 @@ pub fn CreateBinaryExpressionVisitor(
|
||||
// "(0, this.fn)()" => "(0, this.fn)()"
|
||||
if (p.options.features.minify_syntax) {
|
||||
if (SideEffects.simplifyUnusedExpr(p, e_.left)) |simplified_left| {
|
||||
if (simplified_left.isEmpty()) {
|
||||
return e_.right;
|
||||
}
|
||||
e_.left = simplified_left;
|
||||
} else {
|
||||
// The left operand has no side effects, but we need to preserve
|
||||
|
||||
@@ -609,7 +609,8 @@ pub fn VisitExpr(
|
||||
p.delete_target = dot.data;
|
||||
}
|
||||
|
||||
return p.visitExprInOut(dot, in);
|
||||
// don't call visitExprInOut on `dot` because we've already visited `target` above!
|
||||
return dot;
|
||||
}
|
||||
|
||||
// Handle property rewrites to ensure things
|
||||
@@ -1444,9 +1445,20 @@ pub fn VisitExpr(
|
||||
// Why? Because we *don't* want to check for uses of
|
||||
// `useState` _inside_ React, and we know React uses
|
||||
// commonjs so it will never be `.e_import_identifier`.
|
||||
e_.target.data == .e_import_identifier) {
|
||||
check_for_usestate: {
|
||||
if (e_.target.data == .e_import_identifier) break :check_for_usestate true;
|
||||
// Also check for `React.useState(...)`
|
||||
if (e_.target.data == .e_dot and e_.target.data.e_dot.target.data == .e_import_identifier) {
|
||||
const id = e_.target.data.e_dot.target.data.e_import_identifier;
|
||||
const name = p.symbols.items[id.ref.innerIndex()].original_name;
|
||||
break :check_for_usestate bun.strings.eqlComptime(name, "React");
|
||||
}
|
||||
break :check_for_usestate false;
|
||||
}) {
|
||||
bun.assert(p.options.features.server_components.isServerSide());
|
||||
if (bun.strings.eqlComptime(original_name, "useState")) {
|
||||
if (!bun.strings.startsWith(p.source.path.pretty, "node_modules") and
|
||||
bun.strings.eqlComptime(original_name, "useState"))
|
||||
{
|
||||
p.log.addError(
|
||||
p.source,
|
||||
expr.loc,
|
||||
|
||||
@@ -1315,10 +1315,10 @@ pub fn VisitStmt(
|
||||
try p.top_level_enums.append(p.allocator, data.name.ref.?);
|
||||
}
|
||||
|
||||
bun.handleOom(p.recordDeclaredSymbol(data.name.ref.?));
|
||||
bun.handleOom(p.pushScopeForVisitPass(.entry, stmt.loc));
|
||||
try p.recordDeclaredSymbol(data.name.ref.?);
|
||||
try p.pushScopeForVisitPass(.entry, stmt.loc);
|
||||
defer p.popScope();
|
||||
bun.handleOom(p.recordDeclaredSymbol(data.arg));
|
||||
try p.recordDeclaredSymbol(data.arg);
|
||||
|
||||
const allocator = p.allocator;
|
||||
// Scan ahead for any variables inside this namespace. This must be done
|
||||
@@ -1327,7 +1327,7 @@ pub fn VisitStmt(
|
||||
// We need to convert the uses into property accesses on the namespace.
|
||||
for (data.values) |value| {
|
||||
if (value.ref.isValid()) {
|
||||
bun.handleOom(p.is_exported_inside_namespace.put(allocator, value.ref, data.arg));
|
||||
try p.is_exported_inside_namespace.put(allocator, value.ref, data.arg);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1336,7 +1336,7 @@ pub fn VisitStmt(
|
||||
// without initializers are initialized to undefined.
|
||||
var next_numeric_value: ?f64 = 0.0;
|
||||
|
||||
var value_exprs = bun.handleOom(ListManaged(Expr).initCapacity(allocator, data.values.len));
|
||||
var value_exprs = try ListManaged(Expr).initCapacity(allocator, data.values.len);
|
||||
|
||||
var all_values_are_pure = true;
|
||||
|
||||
|
||||
35
src/bake.zig
35
src/bake.zig
@@ -27,9 +27,6 @@ pub const UserOptions = struct {
|
||||
|
||||
/// Currently, this function must run at the top of the event loop.
|
||||
pub fn fromJS(config: JSValue, global: *jsc.JSGlobalObject) !UserOptions {
|
||||
if (!config.isObject()) {
|
||||
return global.throwInvalidArguments("'" ++ api_name ++ "' is not an object", .{});
|
||||
}
|
||||
var arena = std.heap.ArenaAllocator.init(bun.default_allocator);
|
||||
errdefer arena.deinit();
|
||||
const alloc = arena.allocator();
|
||||
@@ -38,6 +35,38 @@ pub const UserOptions = struct {
|
||||
errdefer allocations.free();
|
||||
var bundler_options = SplitBundlerOptions.empty;
|
||||
|
||||
if (!config.isObject()) {
|
||||
// Allow users to do `export default { app: 'react' }` for convenience
|
||||
if (config.isString()) {
|
||||
const bunstr = try config.toBunString(global);
|
||||
defer bunstr.deref();
|
||||
const utf8_string = bunstr.toUTF8(bun.default_allocator);
|
||||
defer utf8_string.deinit();
|
||||
|
||||
if (bun.strings.eql(utf8_string.byteSlice(), "react")) {
|
||||
const root = bun.getcwdAlloc(alloc) catch |err| switch (err) {
|
||||
error.OutOfMemory => {
|
||||
return global.throwOutOfMemory();
|
||||
},
|
||||
else => {
|
||||
return global.throwError(err, "while querying current working directory");
|
||||
},
|
||||
};
|
||||
|
||||
const framework = try Framework.react(alloc);
|
||||
|
||||
return UserOptions{
|
||||
.arena = arena,
|
||||
.allocations = allocations,
|
||||
.root = root,
|
||||
.framework = framework,
|
||||
.bundler_options = bundler_options,
|
||||
};
|
||||
}
|
||||
}
|
||||
return global.throwInvalidArguments("'" ++ api_name ++ "' is not an object", .{});
|
||||
}
|
||||
|
||||
if (try config.getOptional(global, "bundlerOptions", JSValue)) |js_options| {
|
||||
if (try js_options.getOptional(global, "server", JSValue)) |server_options| {
|
||||
bundler_options.server = try BuildConfigSubset.fromJS(global, server_options);
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
// clang-format off
|
||||
#include "BakeSourceProvider.h"
|
||||
#include "DevServerSourceProvider.h"
|
||||
#include "BakeGlobalObject.h"
|
||||
#include "JavaScriptCore/CallData.h"
|
||||
#include "JavaScriptCore/Completion.h"
|
||||
@@ -78,6 +79,34 @@ extern "C" JSC::EncodedJSValue BakeLoadServerHmrPatch(GlobalObject* global, BunS
|
||||
return JSC::JSValue::encode(result);
|
||||
}
|
||||
|
||||
extern "C" JSC::EncodedJSValue BakeLoadServerHmrPatchWithSourceMap(GlobalObject* global, BunString source, const char* sourceMapJSONPtr, size_t sourceMapJSONLength) {
|
||||
JSC::VM&vm = global->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
|
||||
String string = "bake://server.patch.js"_s;
|
||||
JSC::SourceOrigin origin = JSC::SourceOrigin(WTF::URL(string));
|
||||
|
||||
// Use DevServerSourceProvider with the source map JSON
|
||||
auto provider = DevServerSourceProvider::create(
|
||||
global,
|
||||
source.toWTFString(),
|
||||
sourceMapJSONPtr,
|
||||
sourceMapJSONLength,
|
||||
origin,
|
||||
WTFMove(string),
|
||||
WTF::TextPosition(),
|
||||
JSC::SourceProviderSourceType::Program
|
||||
);
|
||||
|
||||
JSC::SourceCode sourceCode = JSC::SourceCode(provider);
|
||||
|
||||
JSC::JSValue result = vm.interpreter.executeProgram(sourceCode, global, global);
|
||||
RETURN_IF_EXCEPTION(scope, {});
|
||||
|
||||
RELEASE_ASSERT(result);
|
||||
return JSC::JSValue::encode(result);
|
||||
}
|
||||
|
||||
extern "C" JSC::EncodedJSValue BakeGetModuleNamespace(
|
||||
JSC::JSGlobalObject* global,
|
||||
JSC::JSValue keyValue
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// @ts-ignore
|
||||
import { fn, t } from "../codegen/bindgen-lib";
|
||||
import { fn, t } from "bindgen";
|
||||
export const getDeinitCountForTesting = fn({
|
||||
args: {},
|
||||
ret: t.usize,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -269,9 +269,11 @@ pub fn IncrementalGraph(comptime side: bake.Side) type {
|
||||
/// All part contents
|
||||
current_chunk_parts: ArrayListUnmanaged(switch (side) {
|
||||
.client => FileIndex,
|
||||
// These slices do not outlive the bundler, and must
|
||||
// be joined before its arena is deinitialized.
|
||||
.server => []const u8,
|
||||
// This memory is allocated by the dev server allocator
|
||||
.server => bun.ptr.OwnedIn(
|
||||
[]const u8,
|
||||
bun.bake.DevServer.DevAllocator,
|
||||
),
|
||||
}),
|
||||
|
||||
/// Asset IDs, which can be printed as hex in '/_bun/asset/{hash}.css'
|
||||
@@ -280,6 +282,10 @@ pub fn IncrementalGraph(comptime side: bake.Side) type {
|
||||
.server => void,
|
||||
},
|
||||
|
||||
/// Source maps for server chunks and the file indices to track which
|
||||
/// file each chunk comes from
|
||||
current_chunk_source_maps: if (side == .server) ArrayListUnmanaged(CurrentChunkSourceMapData) else void = if (side == .server) .empty,
|
||||
|
||||
pub const empty: Self = .{
|
||||
.bundled_files = .empty,
|
||||
.stale_files = .empty,
|
||||
@@ -293,6 +299,16 @@ pub fn IncrementalGraph(comptime side: bake.Side) type {
|
||||
.current_chunk_parts = .empty,
|
||||
|
||||
.current_css_files = if (side == .client) .empty,
|
||||
.current_chunk_source_maps = if (side == .server) .empty else {},
|
||||
};
|
||||
|
||||
const CurrentChunkSourceMapData = struct {
|
||||
file_index: FileIndex,
|
||||
source_map: PackedMap.Shared,
|
||||
|
||||
pub fn deinit(self: *CurrentChunkSourceMapData) void {
|
||||
self.source_map.deinit();
|
||||
}
|
||||
};
|
||||
|
||||
pub const File = switch (side) {
|
||||
@@ -378,9 +394,19 @@ pub fn IncrementalGraph(comptime side: bake.Side) type {
|
||||
.edges = g.edges.deinit(alloc),
|
||||
.edges_free_list = g.edges_free_list.deinit(alloc),
|
||||
.current_chunk_len = {},
|
||||
.current_chunk_parts = g.current_chunk_parts.deinit(alloc),
|
||||
.current_css_files = if (comptime side == .client)
|
||||
g.current_css_files.deinit(alloc),
|
||||
.current_chunk_parts = {
|
||||
if (comptime side == .server) {
|
||||
for (g.current_chunk_parts.items) |*part| part.deinit();
|
||||
}
|
||||
g.current_chunk_parts.deinit(alloc);
|
||||
},
|
||||
.current_css_files = if (comptime side == .client) g.current_css_files.deinit(alloc),
|
||||
.current_chunk_source_maps = if (side == .server) {
|
||||
for (g.current_chunk_source_maps.items) |*source_map| {
|
||||
source_map.deinit();
|
||||
}
|
||||
g.current_chunk_source_maps.deinit(alloc);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
@@ -412,6 +438,11 @@ pub fn IncrementalGraph(comptime side: bake.Side) type {
|
||||
}
|
||||
source_maps += file.source_map.memoryCost();
|
||||
}
|
||||
} else if (side == .server) {
|
||||
graph += DevServer.memoryCostArrayList(g.current_chunk_source_maps);
|
||||
for (g.current_chunk_source_maps.items) |item| {
|
||||
source_maps += item.source_map.memoryCost();
|
||||
}
|
||||
}
|
||||
return .{
|
||||
.graph = graph,
|
||||
@@ -445,7 +476,7 @@ pub fn IncrementalGraph(comptime side: bake.Side) type {
|
||||
g: *Self,
|
||||
ctx: *HotUpdateContext,
|
||||
index: bun.ast.Index,
|
||||
content_: union(enum) {
|
||||
_content: union(enum) {
|
||||
js: struct {
|
||||
code: JsCode,
|
||||
source_map: ?struct {
|
||||
@@ -457,13 +488,16 @@ pub fn IncrementalGraph(comptime side: bake.Side) type {
|
||||
},
|
||||
is_ssr_graph: bool,
|
||||
) !void {
|
||||
var content = content_;
|
||||
var content = _content;
|
||||
const dev = g.owner();
|
||||
dev.graph_safety_lock.assertLocked();
|
||||
|
||||
const path = ctx.sources[index.get()].path;
|
||||
const key = path.keyForIncrementalGraph();
|
||||
|
||||
const log = bun.Output.scoped(.IncrementalGraphReceiveChunk, .visible);
|
||||
log("receiveChunk({s}, {s})", .{ @tagName(side), key });
|
||||
|
||||
if (Environment.allow_assert) {
|
||||
switch (content) {
|
||||
.css => {},
|
||||
@@ -546,7 +580,7 @@ pub fn IncrementalGraph(comptime side: bake.Side) type {
|
||||
bun.assert(html_route_bundle_index == null); // suspect behind #17956
|
||||
if (source_map.chunk.buffer.len() > 0) {
|
||||
break :blk .{ .some = PackedMap.newNonEmpty(
|
||||
source_map.chunk,
|
||||
&source_map.chunk,
|
||||
source_map.escaped_source.take().?,
|
||||
) };
|
||||
}
|
||||
@@ -632,11 +666,47 @@ pub fn IncrementalGraph(comptime side: bake.Side) type {
|
||||
}
|
||||
}
|
||||
if (content == .js) {
|
||||
try g.current_chunk_parts.append(dev.allocator(), content.js.code);
|
||||
try g.current_chunk_parts.append(
|
||||
dev.allocator(),
|
||||
bun.ptr.OwnedIn([]const u8, bun.bake.DevServer.DevAllocator).fromRawIn(
|
||||
content.js.code,
|
||||
dev.dev_allocator(),
|
||||
),
|
||||
);
|
||||
g.current_chunk_len += content.js.code.len;
|
||||
if (content.js.source_map) |*source_map| {
|
||||
source_map.chunk.buffer.deinit();
|
||||
source_map.escaped_source.deinit();
|
||||
|
||||
// TODO: we probably want to store SSR chunks but not
|
||||
// server chunks, but not 100% sure
|
||||
const should_immediately_free_sourcemap = false;
|
||||
if (should_immediately_free_sourcemap) {
|
||||
@compileError("Not implemented the codepath to free the sourcemap");
|
||||
} else {
|
||||
if (content.js.source_map) |*source_map| append_empty: {
|
||||
defer source_map.chunk.deinit();
|
||||
defer source_map.escaped_source.deinit();
|
||||
if (source_map.chunk.buffer.len() > 0) {
|
||||
const escaped_source = source_map.escaped_source.take() orelse break :append_empty;
|
||||
const packed_map: PackedMap.Shared = .{ .some = PackedMap.newNonEmpty(
|
||||
&source_map.chunk,
|
||||
escaped_source,
|
||||
) };
|
||||
try g.current_chunk_source_maps.append(dev.allocator(), CurrentChunkSourceMapData{
|
||||
.source_map = packed_map,
|
||||
.file_index = file_index,
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Must precompute this. Otherwise, source maps won't have
|
||||
// the info needed to concatenate VLQ mappings.
|
||||
const count: u32 = @intCast(bun.strings.countChar(content.js.code, '\n'));
|
||||
try g.current_chunk_source_maps.append(dev.allocator(), .{
|
||||
.file_index = file_index,
|
||||
.source_map = PackedMap.Shared{
|
||||
.line_count = .init(count),
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -1598,10 +1668,17 @@ pub fn IncrementalGraph(comptime side: bake.Side) type {
|
||||
pub fn reset(g: *Self) void {
|
||||
g.owner().graph_safety_lock.assertLocked();
|
||||
g.current_chunk_len = 0;
|
||||
g.current_chunk_parts.clearRetainingCapacity();
|
||||
|
||||
if (comptime side == .client) {
|
||||
g.current_css_files.clearRetainingCapacity();
|
||||
} else if (comptime side == .server) {
|
||||
for (g.current_chunk_parts.items) |*part| part.deinit();
|
||||
|
||||
for (g.current_chunk_source_maps.items) |*sourcemap| sourcemap.deinit();
|
||||
g.current_chunk_source_maps.clearRetainingCapacity();
|
||||
}
|
||||
|
||||
g.current_chunk_parts.clearRetainingCapacity();
|
||||
}
|
||||
|
||||
const TakeJSBundleOptions = switch (side) {
|
||||
@@ -1614,6 +1691,7 @@ pub fn IncrementalGraph(comptime side: bake.Side) type {
|
||||
},
|
||||
.server => struct {
|
||||
kind: ChunkKind,
|
||||
script_id: SourceMapStore.Key,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1729,7 +1807,7 @@ pub fn IncrementalGraph(comptime side: bake.Side) type {
|
||||
// entry is an index into files
|
||||
.client => files[entry.get()].unpack().jsCode().?,
|
||||
// entry is the '[]const u8' itself
|
||||
.server => entry,
|
||||
.server => entry.get(),
|
||||
});
|
||||
}
|
||||
list.appendSliceAssumeCapacity(end);
|
||||
@@ -1756,46 +1834,71 @@ pub fn IncrementalGraph(comptime side: bake.Side) type {
|
||||
};
|
||||
|
||||
/// Uses `arena` as a temporary allocator, fills in all fields of `out` except ref_count
|
||||
pub fn takeSourceMap(g: *Self, arena: std.mem.Allocator, gpa: Allocator, out: *SourceMapStore.Entry) bun.OOM!void {
|
||||
if (comptime side == .server) @compileError("not implemented");
|
||||
|
||||
pub fn takeSourceMap(g: *@This(), _: std.mem.Allocator, gpa: Allocator, out: *SourceMapStore.Entry) bun.OOM!void {
|
||||
const paths = g.bundled_files.keys();
|
||||
const files = g.bundled_files.values();
|
||||
|
||||
// This buffer is temporary, holding the quoted source paths, joined with commas.
|
||||
var source_map_strings = std.ArrayList(u8).init(arena);
|
||||
defer source_map_strings.deinit();
|
||||
switch (side) {
|
||||
.client => {
|
||||
const files = g.bundled_files.values();
|
||||
|
||||
const buf = bun.path_buffer_pool.get();
|
||||
defer bun.path_buffer_pool.put(buf);
|
||||
const buf = bun.path_buffer_pool.get();
|
||||
defer bun.path_buffer_pool.put(buf);
|
||||
|
||||
var file_paths = try ArrayListUnmanaged([]const u8).initCapacity(gpa, g.current_chunk_parts.items.len);
|
||||
errdefer file_paths.deinit(gpa);
|
||||
var contained_maps: bun.MultiArrayList(PackedMap.Shared) = .empty;
|
||||
try contained_maps.ensureTotalCapacity(gpa, g.current_chunk_parts.items.len);
|
||||
errdefer contained_maps.deinit(gpa);
|
||||
var file_paths = try ArrayListUnmanaged([]const u8).initCapacity(gpa, g.current_chunk_parts.items.len);
|
||||
errdefer file_paths.deinit(gpa);
|
||||
var contained_maps: bun.MultiArrayList(PackedMap.Shared) = .empty;
|
||||
try contained_maps.ensureTotalCapacity(gpa, g.current_chunk_parts.items.len);
|
||||
errdefer contained_maps.deinit(gpa);
|
||||
|
||||
var overlapping_memory_cost: usize = 0;
|
||||
var overlapping_memory_cost: usize = 0;
|
||||
|
||||
for (g.current_chunk_parts.items) |file_index| {
|
||||
file_paths.appendAssumeCapacity(paths[file_index.get()]);
|
||||
const source_map = files[file_index.get()].unpack().source_map.clone();
|
||||
if (source_map.get()) |map| {
|
||||
overlapping_memory_cost += map.memoryCost();
|
||||
}
|
||||
contained_maps.appendAssumeCapacity(source_map);
|
||||
for (g.current_chunk_parts.items) |file_index| {
|
||||
file_paths.appendAssumeCapacity(paths[file_index.get()]);
|
||||
const source_map = files[file_index.get()].unpack().source_map.clone();
|
||||
if (source_map.get()) |map| {
|
||||
overlapping_memory_cost += map.memoryCost();
|
||||
}
|
||||
contained_maps.appendAssumeCapacity(source_map);
|
||||
}
|
||||
|
||||
overlapping_memory_cost += contained_maps.memoryCost() + DevServer.memoryCostSlice(file_paths.items);
|
||||
|
||||
const ref_count = out.ref_count;
|
||||
out.* = .{
|
||||
.dev_allocator = g.dev_allocator(),
|
||||
.ref_count = ref_count,
|
||||
.paths = file_paths.items,
|
||||
.files = contained_maps,
|
||||
.overlapping_memory_cost = @intCast(overlapping_memory_cost),
|
||||
};
|
||||
},
|
||||
.server => {
|
||||
var file_paths = try ArrayListUnmanaged([]const u8).initCapacity(gpa, g.current_chunk_parts.items.len);
|
||||
errdefer file_paths.deinit(gpa);
|
||||
var contained_maps: bun.MultiArrayList(PackedMap.Shared) = .empty;
|
||||
try contained_maps.ensureTotalCapacity(gpa, g.current_chunk_parts.items.len);
|
||||
errdefer contained_maps.deinit(gpa);
|
||||
|
||||
var overlapping_memory_cost: u32 = 0;
|
||||
|
||||
// For server, we use the tracked file indices to get the correct paths
|
||||
for (g.current_chunk_source_maps.items) |item| {
|
||||
file_paths.appendAssumeCapacity(paths[item.file_index.get()]);
|
||||
contained_maps.appendAssumeCapacity(item.source_map.clone());
|
||||
overlapping_memory_cost += @intCast(item.source_map.memoryCost());
|
||||
}
|
||||
|
||||
overlapping_memory_cost += @intCast(contained_maps.memoryCost() + DevServer.memoryCostSlice(file_paths.items));
|
||||
|
||||
out.* = .{
|
||||
.dev_allocator = g.dev_allocator(),
|
||||
.ref_count = out.ref_count,
|
||||
.paths = file_paths.items,
|
||||
.files = contained_maps,
|
||||
.overlapping_memory_cost = overlapping_memory_cost,
|
||||
};
|
||||
},
|
||||
}
|
||||
|
||||
overlapping_memory_cost += contained_maps.memoryCost() + DevServer.memoryCostSlice(file_paths.items);
|
||||
|
||||
const ref_count = out.ref_count;
|
||||
out.* = .{
|
||||
.dev_allocator = g.dev_allocator(),
|
||||
.ref_count = ref_count,
|
||||
.paths = file_paths.items,
|
||||
.files = contained_maps,
|
||||
.overlapping_memory_cost = @intCast(overlapping_memory_cost),
|
||||
};
|
||||
}
|
||||
|
||||
fn disconnectAndDeleteFile(g: *Self, file_index: FileIndex) void {
|
||||
|
||||
@@ -19,8 +19,8 @@ end_state: struct {
|
||||
original_column: i32,
|
||||
},
|
||||
|
||||
pub fn newNonEmpty(chunk: SourceMap.Chunk, escaped_source: Owned([]u8)) bun.ptr.Shared(*Self) {
|
||||
var buffer = chunk.buffer;
|
||||
pub fn newNonEmpty(chunk: *SourceMap.Chunk, escaped_source: Owned([]u8)) bun.ptr.Shared(*Self) {
|
||||
var buffer = &chunk.buffer;
|
||||
assert(!buffer.isEmpty());
|
||||
const dev_allocator = DevAllocator.downcast(buffer.allocator);
|
||||
return .new(.{
|
||||
|
||||
@@ -72,6 +72,8 @@ pub const State = enum {
|
||||
unqueued,
|
||||
/// A bundle associated with this route is happening
|
||||
bundling,
|
||||
/// A bundle associated with this route *will happen in the next bundle*
|
||||
deferred_to_next_bundle,
|
||||
/// This route was flagged for bundling failures. There are edge cases
|
||||
/// where a route can be disconnected from its failures, so the route
|
||||
/// imports has to be traced to discover if possible failures still
|
||||
|
||||
@@ -76,11 +76,11 @@ pub const Entry = struct {
|
||||
pub fn renderMappings(map: Entry, kind: ChunkKind, arena: Allocator, gpa: Allocator) ![]u8 {
|
||||
var j: StringJoiner = .{ .allocator = arena };
|
||||
j.pushStatic("AAAA");
|
||||
try joinVLQ(&map, kind, &j, arena);
|
||||
try joinVLQ(&map, kind, &j, arena, .client);
|
||||
return j.done(gpa);
|
||||
}
|
||||
|
||||
pub fn renderJSON(map: *const Entry, dev: *DevServer, arena: Allocator, kind: ChunkKind, gpa: Allocator) ![]u8 {
|
||||
pub fn renderJSON(map: *const Entry, dev: *DevServer, arena: Allocator, kind: ChunkKind, gpa: Allocator, side: bake.Side) ![]u8 {
|
||||
const map_files = map.files.slice();
|
||||
const paths = map.paths;
|
||||
|
||||
@@ -106,13 +106,22 @@ pub const Entry = struct {
|
||||
|
||||
if (std.fs.path.isAbsolute(path)) {
|
||||
const is_windows_drive_path = Environment.isWindows and path[0] != '/';
|
||||
try source_map_strings.appendSlice(if (is_windows_drive_path)
|
||||
"\"file:///"
|
||||
else
|
||||
"\"file://");
|
||||
|
||||
// On the client we prefix the sourcemap path with "file://" and
|
||||
// percent encode it
|
||||
if (side == .client) {
|
||||
try source_map_strings.appendSlice(if (is_windows_drive_path)
|
||||
"\"file:///"
|
||||
else
|
||||
"\"file://");
|
||||
} else {
|
||||
try source_map_strings.append('"');
|
||||
}
|
||||
|
||||
if (Environment.isWindows and !is_windows_drive_path) {
|
||||
// UNC namespace -> file://server/share/path.ext
|
||||
bun.strings.percentEncodeWrite(
|
||||
encodeSourceMapPath(
|
||||
side,
|
||||
if (path.len > 2 and path[0] == '/' and path[1] == '/')
|
||||
path[2..]
|
||||
else
|
||||
@@ -127,7 +136,7 @@ pub const Entry = struct {
|
||||
// -> file:///path/to/file.js
|
||||
// windows drive letter paths have the extra slash added
|
||||
// -> file:///C:/path/to/file.js
|
||||
bun.strings.percentEncodeWrite(path, &source_map_strings) catch |err| switch (err) {
|
||||
encodeSourceMapPath(side, path, &source_map_strings) catch |err| switch (err) {
|
||||
error.IncompleteUTF8 => @panic("Unexpected: asset with incomplete UTF-8 as file path"),
|
||||
error.OutOfMemory => |e| return e,
|
||||
};
|
||||
@@ -175,14 +184,14 @@ pub const Entry = struct {
|
||||
j.pushStatic(
|
||||
\\],"names":[],"mappings":"AAAA
|
||||
);
|
||||
try joinVLQ(map, kind, &j, arena);
|
||||
try joinVLQ(map, kind, &j, arena, side);
|
||||
|
||||
const json_bytes = try j.doneWithEnd(gpa, "\"}");
|
||||
errdefer @compileError("last try should be the final alloc");
|
||||
|
||||
if (bun.FeatureFlags.bake_debugging_features) if (dev.dump_dir) |dump_dir| {
|
||||
const rel_path_escaped = "latest_chunk.js.map";
|
||||
dumpBundle(dump_dir, .client, rel_path_escaped, json_bytes, false) catch |err| {
|
||||
const rel_path_escaped = if (side == .client) "latest_chunk.js.map" else "latest_hmr.js.map";
|
||||
dumpBundle(dump_dir, if (side == .client) .client else .server, rel_path_escaped, json_bytes, false) catch |err| {
|
||||
bun.handleErrorReturnTrace(err, @errorReturnTrace());
|
||||
Output.warn("Could not dump bundle: {}", .{err});
|
||||
};
|
||||
@@ -191,7 +200,22 @@ pub const Entry = struct {
|
||||
return json_bytes;
|
||||
}
|
||||
|
||||
fn joinVLQ(map: *const Entry, kind: ChunkKind, j: *StringJoiner, arena: Allocator) !void {
|
||||
fn encodeSourceMapPath(
|
||||
side: bake.Side,
|
||||
utf8_input: []const u8,
|
||||
array_list: *std.ArrayList(u8),
|
||||
) error{ OutOfMemory, IncompleteUTF8 }!void {
|
||||
// On the client, percent encode everything so it works in the browser
|
||||
if (side == .client) {
|
||||
return bun.strings.percentEncodeWrite(utf8_input, array_list);
|
||||
}
|
||||
|
||||
const writer = array_list.writer();
|
||||
try bun.js_printer.writePreQuotedString(utf8_input, @TypeOf(writer), writer, '"', false, true, .utf8);
|
||||
}
|
||||
|
||||
fn joinVLQ(map: *const Entry, kind: ChunkKind, j: *StringJoiner, arena: Allocator, side: bake.Side) !void {
|
||||
_ = side;
|
||||
const map_files = map.files.slice();
|
||||
|
||||
const runtime: bake.HmrRuntime = switch (kind) {
|
||||
|
||||
17
src/bake/DevServerSourceProvider.cpp
Normal file
17
src/bake/DevServerSourceProvider.cpp
Normal file
@@ -0,0 +1,17 @@
|
||||
#include "DevServerSourceProvider.h"
|
||||
#include "BunBuiltinNames.h"
|
||||
#include "BunString.h"
|
||||
|
||||
// The Zig implementation will be provided to handle registration
|
||||
extern "C" void Bun__addDevServerSourceProvider(void* bun_vm, Bake::DevServerSourceProvider* opaque_source_provider, BunString* specifier);
|
||||
|
||||
// Export functions for Zig to access DevServerSourceProvider
|
||||
extern "C" BunString DevServerSourceProvider__getSourceSlice(Bake::DevServerSourceProvider* provider)
|
||||
{
|
||||
return Bun::toStringView(provider->source());
|
||||
}
|
||||
|
||||
extern "C" MiCString DevServerSourceProvider__getSourceMapJSON(Bake::DevServerSourceProvider* provider)
|
||||
{
|
||||
return provider->sourceMapJSON();
|
||||
}
|
||||
74
src/bake/DevServerSourceProvider.h
Normal file
74
src/bake/DevServerSourceProvider.h
Normal file
@@ -0,0 +1,74 @@
|
||||
#pragma once
|
||||
#include "root.h"
|
||||
#include "headers-handwritten.h"
|
||||
#include "JavaScriptCore/SourceOrigin.h"
|
||||
#include "ZigGlobalObject.h"
|
||||
#include "MiString.h"
|
||||
|
||||
namespace Bake {
|
||||
|
||||
class DevServerSourceProvider;
|
||||
|
||||
// Function to be implemented in Zig to register the source provider
|
||||
extern "C" void Bun__addDevServerSourceProvider(void* bun_vm, DevServerSourceProvider* opaque_source_provider, BunString* specifier);
|
||||
extern "C" void Bun__removeDevServerSourceProvider(void* bun_vm, DevServerSourceProvider* opaque_source_provider, BunString* specifier);
|
||||
|
||||
class DevServerSourceProvider final : public JSC::StringSourceProvider {
|
||||
public:
|
||||
static Ref<DevServerSourceProvider> create(
|
||||
JSC::JSGlobalObject* globalObject,
|
||||
const String& source,
|
||||
const char* sourceMapJSONPtr,
|
||||
size_t sourceMapJSONLength,
|
||||
const JSC::SourceOrigin& sourceOrigin,
|
||||
String&& sourceURL,
|
||||
const TextPosition& startPosition,
|
||||
JSC::SourceProviderSourceType sourceType)
|
||||
{
|
||||
auto provider = adoptRef(*new DevServerSourceProvider(source, sourceMapJSONPtr, sourceMapJSONLength, sourceOrigin, WTFMove(sourceURL), startPosition, sourceType));
|
||||
auto* zigGlobalObject = jsCast<::Zig::GlobalObject*>(globalObject);
|
||||
auto specifier = Bun::toString(provider->sourceURL());
|
||||
provider->m_globalObject = zigGlobalObject;
|
||||
provider->m_specifier = specifier;
|
||||
Bun__addDevServerSourceProvider(zigGlobalObject->bunVM(), provider.ptr(), &specifier);
|
||||
return provider;
|
||||
}
|
||||
|
||||
MiCString sourceMapJSON() const
|
||||
{
|
||||
return m_sourceMapJSON.asCString();
|
||||
}
|
||||
|
||||
private:
|
||||
DevServerSourceProvider(
|
||||
const String& source,
|
||||
const char* sourceMapJSONPtr,
|
||||
size_t sourceMapJSONLength,
|
||||
const JSC::SourceOrigin& sourceOrigin,
|
||||
String&& sourceURL,
|
||||
const TextPosition& startPosition,
|
||||
JSC::SourceProviderSourceType sourceType)
|
||||
: StringSourceProvider(
|
||||
source,
|
||||
sourceOrigin,
|
||||
JSC::SourceTaintedOrigin::Untainted,
|
||||
WTFMove(sourceURL),
|
||||
startPosition,
|
||||
sourceType)
|
||||
, m_sourceMapJSON(sourceMapJSONPtr, sourceMapJSONLength)
|
||||
{
|
||||
}
|
||||
|
||||
~DevServerSourceProvider()
|
||||
{
|
||||
if (m_globalObject) {
|
||||
Bun__removeDevServerSourceProvider(m_globalObject->bunVM(), this, &m_specifier);
|
||||
}
|
||||
}
|
||||
|
||||
MiString m_sourceMapJSON;
|
||||
Zig::GlobalObject* m_globalObject;
|
||||
BunString m_specifier;
|
||||
};
|
||||
|
||||
} // namespace Bake
|
||||
@@ -822,6 +822,28 @@ pub const MatchedParams = struct {
|
||||
key: []const u8,
|
||||
value: []const u8,
|
||||
};
|
||||
|
||||
/// Convert the matched params to a JavaScript object
|
||||
/// Returns null if there are no params
|
||||
pub fn toJS(self: *const MatchedParams, global: *jsc.JSGlobalObject) JSValue {
|
||||
const params_array = self.params.slice();
|
||||
|
||||
if (params_array.len == 0) {
|
||||
return JSValue.null;
|
||||
}
|
||||
|
||||
// Create a JavaScript object with params
|
||||
const obj = JSValue.createEmptyObject(global, params_array.len);
|
||||
for (params_array) |param| {
|
||||
const key_str = bun.String.cloneUTF8(param.key);
|
||||
defer key_str.deref();
|
||||
const value_str = bun.String.cloneUTF8(param.value);
|
||||
defer value_str.deref();
|
||||
|
||||
_ = obj.putBunStringOneOrArray(global, &key_str, value_str.toJS(global)) catch unreachable;
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
};
|
||||
|
||||
/// Fast enough for development to be seamless, but avoids building a
|
||||
|
||||
12
src/bake/bake.d.ts
vendored
12
src/bake/bake.d.ts
vendored
@@ -5,8 +5,6 @@
|
||||
// /// <reference path="/path/to/bun/src/bake/bake.d.ts" />
|
||||
|
||||
declare module "bun" {
|
||||
type Awaitable<T> = T | Promise<T>;
|
||||
|
||||
declare namespace Bake {
|
||||
interface Options {
|
||||
/**
|
||||
@@ -369,7 +367,7 @@ declare module "bun" {
|
||||
* A common pattern would be to enforce the object is
|
||||
* `{ default: ReactComponent }`
|
||||
*/
|
||||
render: (request: Request, routeMetadata: RouteMetadata) => Awaitable<Response>;
|
||||
render: (request: Request, routeMetadata: RouteMetadata) => MaybePromise<Response>;
|
||||
/**
|
||||
* Prerendering does not use a request, and is allowed to generate
|
||||
* multiple responses. This is used for static site generation, but not
|
||||
@@ -379,7 +377,7 @@ declare module "bun" {
|
||||
* Note that `import.meta.env.STATIC` will be inlined to true during
|
||||
* a static build.
|
||||
*/
|
||||
prerender?: (routeMetadata: RouteMetadata) => Awaitable<PrerenderResult | null>;
|
||||
prerender?: (routeMetadata: RouteMetadata) => MaybePromise<PrerenderResult | null>;
|
||||
// TODO: prerenderWithoutProps (for partial prerendering)
|
||||
/**
|
||||
* For prerendering routes with dynamic parameters, such as `/blog/:slug`,
|
||||
@@ -409,7 +407,7 @@ declare module "bun" {
|
||||
* return { exhaustive: false };
|
||||
* }
|
||||
*/
|
||||
getParams?: (paramsMetadata: ParamsMetadata) => Awaitable<GetParamIterator>;
|
||||
getParams?: (paramsMetadata: ParamsMetadata) => MaybePromise<GetParamIterator>;
|
||||
/**
|
||||
* When a dynamic build uses static assets, Bun can map content types in the
|
||||
* user's `Accept` header to the different static files.
|
||||
@@ -448,7 +446,7 @@ declare module "bun" {
|
||||
}
|
||||
|
||||
interface DevServerHookEntryPoint {
|
||||
default: (dev: DevServerHookAPI) => Awaitable<void>;
|
||||
default: (dev: DevServerHookAPI) => MaybePromise<void>;
|
||||
}
|
||||
|
||||
interface DevServerHookAPI {
|
||||
@@ -505,7 +503,7 @@ declare module "bun" {
|
||||
}
|
||||
}
|
||||
|
||||
declare interface GenericServeOptions {
|
||||
declare interface BaseServeOptions {
|
||||
/** Add a fullstack web app to this server using Bun Bake */
|
||||
app?: Bake.Options | undefined;
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user