mirror of
https://github.com/oven-sh/bun
synced 2026-02-05 08:28:55 +00:00
Compare commits
17 Commits
claude/imp
...
claude/sql
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8c74055352 | ||
|
|
1acaa618c4 | ||
|
|
8eea8c6d7c | ||
|
|
a90632f658 | ||
|
|
f9ff380a8d | ||
|
|
f7d7217cc9 | ||
|
|
56d4480e5f | ||
|
|
e1febfad2d | ||
|
|
1977b029d8 | ||
|
|
0f05bf49cc | ||
|
|
b6a4ad805c | ||
|
|
d2df086596 | ||
|
|
7f8ddacfac | ||
|
|
2d5379c63c | ||
|
|
eb851aead3 | ||
|
|
7f670cd741 | ||
|
|
74695aaf78 |
@@ -108,9 +108,9 @@ const buildPlatforms = [
|
||||
{ os: "linux", arch: "x64", distro: "amazonlinux", release: "2023", features: ["docker"] },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "amazonlinux", release: "2023", features: ["docker"] },
|
||||
{ os: "linux", arch: "x64", profile: "asan", distro: "amazonlinux", release: "2023", features: ["docker"] },
|
||||
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.22" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.22" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.22" },
|
||||
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.21" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.21" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.21" },
|
||||
{ os: "windows", arch: "x64", release: "2019" },
|
||||
{ os: "windows", arch: "x64", baseline: true, release: "2019" },
|
||||
];
|
||||
@@ -133,9 +133,9 @@ const testPlatforms = [
|
||||
{ os: "linux", arch: "x64", distro: "ubuntu", release: "24.04", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "25.04", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "24.04", tier: "latest" },
|
||||
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.22", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.22", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.22", tier: "latest" },
|
||||
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.21", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.21", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.21", tier: "latest" },
|
||||
{ os: "windows", arch: "x64", release: "2019", tier: "oldest" },
|
||||
{ os: "windows", arch: "x64", release: "2019", baseline: true, tier: "oldest" },
|
||||
];
|
||||
@@ -343,7 +343,7 @@ function getZigPlatform() {
|
||||
arch: "aarch64",
|
||||
abi: "musl",
|
||||
distro: "alpine",
|
||||
release: "3.22",
|
||||
release: "3.21",
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
---
|
||||
allowed-tools: Bash(gh issue view:*), Bash(gh search:*), Bash(gh issue list:*), Bash(gh api:*), Bash(gh issue comment:*)
|
||||
description: Find duplicate GitHub issues
|
||||
---
|
||||
|
||||
# Issue deduplication command
|
||||
|
||||
Find up to 3 likely duplicate issues for a given GitHub issue.
|
||||
|
||||
To do this, follow these steps precisely:
|
||||
|
||||
1. Use an agent to check if the GitHub issue (a) is closed, (b) does not need to be deduped (eg. because it is broad product feedback without a specific solution, or positive feedback), or (c) already has a duplicate detection comment (check for the exact HTML marker `<!-- dedupe-bot:marker -->` in the issue comments - ignore other bot comments). If so, do not proceed.
|
||||
2. Use an agent to view a GitHub issue, and ask the agent to return a summary of the issue
|
||||
3. Then, launch 5 parallel agents to search GitHub for duplicates of this issue, using diverse keywords and search approaches, using the summary from Step 2. **IMPORTANT**: Always scope searches with `repo:owner/repo` to constrain results to the current repository only.
|
||||
4. Next, feed the results from Steps 2 and 3 into another agent, so that it can filter out false positives, that are likely not actually duplicates of the original issue. If there are no duplicates remaining, do not proceed.
|
||||
5. Finally, comment back on the issue with a list of up to three duplicate issues (or zero, if there are no likely duplicates)
|
||||
|
||||
Notes (be sure to tell this to your agents, too):
|
||||
|
||||
- Use `gh` to interact with GitHub, rather than web fetch
|
||||
- Do not use other tools, beyond `gh` (eg. don't use other MCP servers, file edit, etc.)
|
||||
- Make a todo list first
|
||||
- Always scope searches with `repo:owner/repo` to prevent cross-repo false positives
|
||||
- For your comment, follow the following format precisely (assuming for this example that you found 3 suspected duplicates):
|
||||
|
||||
---
|
||||
|
||||
Found 3 possible duplicate issues:
|
||||
|
||||
1. <link to issue>
|
||||
2. <link to issue>
|
||||
3. <link to issue>
|
||||
|
||||
This issue will be automatically closed as a duplicate in 3 days.
|
||||
|
||||
- If your issue is a duplicate, please close it and 👍 the existing issue instead
|
||||
- To prevent auto-closure, add a comment or 👎 this comment
|
||||
|
||||
🤖 Generated with [Claude Code](https://claude.ai/code)
|
||||
|
||||
<!-- dedupe-bot:marker -->
|
||||
|
||||
---
|
||||
@@ -1,88 +0,0 @@
|
||||
#!/usr/bin/env bun
|
||||
import { extname } from "path";
|
||||
import { spawnSync } from "child_process";
|
||||
|
||||
const input = await Bun.stdin.json();
|
||||
|
||||
const toolName = input.tool_name;
|
||||
const toolInput = input.tool_input || {};
|
||||
const filePath = toolInput.file_path;
|
||||
|
||||
// Only process Write, Edit, and MultiEdit tools
|
||||
if (!["Write", "Edit", "MultiEdit"].includes(toolName)) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const ext = extname(filePath);
|
||||
|
||||
// Only format known files
|
||||
if (!filePath) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
function formatZigFile() {
|
||||
try {
|
||||
// Format the Zig file
|
||||
const result = spawnSync("vendor/zig/zig.exe", ["fmt", filePath], {
|
||||
cwd: process.env.CLAUDE_PROJECT_DIR || process.cwd(),
|
||||
encoding: "utf-8",
|
||||
});
|
||||
|
||||
if (result.error) {
|
||||
console.error(`Failed to format ${filePath}: ${result.error.message}`);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
if (result.status !== 0) {
|
||||
console.error(`zig fmt failed for ${filePath}:`);
|
||||
if (result.stderr) {
|
||||
console.error(result.stderr);
|
||||
}
|
||||
process.exit(0);
|
||||
}
|
||||
} catch (error) {}
|
||||
}
|
||||
|
||||
function formatTypeScriptFile() {
|
||||
try {
|
||||
// Format the TypeScript file
|
||||
const result = spawnSync(
|
||||
"./node_modules/.bin/prettier",
|
||||
["--plugin=prettier-plugin-organize-imports", "--config", ".prettierrc", "--write", filePath],
|
||||
{
|
||||
cwd: process.env.CLAUDE_PROJECT_DIR || process.cwd(),
|
||||
encoding: "utf-8",
|
||||
},
|
||||
);
|
||||
} catch (error) {}
|
||||
}
|
||||
|
||||
if (ext === ".zig") {
|
||||
formatZigFile();
|
||||
} else if (
|
||||
[
|
||||
".cjs",
|
||||
".css",
|
||||
".html",
|
||||
".js",
|
||||
".json",
|
||||
".jsonc",
|
||||
".jsx",
|
||||
".less",
|
||||
".mjs",
|
||||
".pcss",
|
||||
".postcss",
|
||||
".sass",
|
||||
".scss",
|
||||
".styl",
|
||||
".stylus",
|
||||
".toml",
|
||||
".ts",
|
||||
".tsx",
|
||||
".yaml",
|
||||
].includes(ext)
|
||||
) {
|
||||
formatTypeScriptFile();
|
||||
}
|
||||
|
||||
process.exit(0);
|
||||
@@ -1,207 +0,0 @@
|
||||
#!/usr/bin/env bun
|
||||
import { basename, extname } from "path";
|
||||
|
||||
const input = await Bun.stdin.json();
|
||||
|
||||
const toolName = input.tool_name;
|
||||
const toolInput = input.tool_input || {};
|
||||
const command = toolInput.command || "";
|
||||
const timeout = toolInput.timeout;
|
||||
const cwd = input.cwd || "";
|
||||
|
||||
// Get environment variables from the hook context
|
||||
// Note: We check process.env directly as env vars are inherited
|
||||
let useSystemBun = process.env.USE_SYSTEM_BUN;
|
||||
|
||||
if (toolName !== "Bash" || !command) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
function denyWithReason(reason) {
|
||||
const output = {
|
||||
hookSpecificOutput: {
|
||||
hookEventName: "PreToolUse",
|
||||
permissionDecision: "deny",
|
||||
permissionDecisionReason: reason,
|
||||
},
|
||||
};
|
||||
console.log(JSON.stringify(output));
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
// Parse the command to extract argv0 and positional args
|
||||
let tokens;
|
||||
try {
|
||||
// Simple shell parsing - split on spaces but respect quotes (both single and double)
|
||||
tokens = command.match(/(?:[^\s"']+|"[^"]*"|'[^']*')+/g)?.map(t => t.replace(/^['"]|['"]$/g, "")) || [];
|
||||
} catch {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
if (tokens.length === 0) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
// Strip inline environment variable assignments (e.g., FOO=1 bun test)
|
||||
const inlineEnv = new Map();
|
||||
let commandStart = 0;
|
||||
while (
|
||||
commandStart < tokens.length &&
|
||||
/^[A-Za-z_][A-Za-z0-9_]*=/.test(tokens[commandStart]) &&
|
||||
!tokens[commandStart].includes("/")
|
||||
) {
|
||||
const [name, value = ""] = tokens[commandStart].split("=", 2);
|
||||
inlineEnv.set(name, value);
|
||||
commandStart++;
|
||||
}
|
||||
if (commandStart >= tokens.length) {
|
||||
process.exit(0);
|
||||
}
|
||||
tokens = tokens.slice(commandStart);
|
||||
useSystemBun = inlineEnv.get("USE_SYSTEM_BUN") ?? useSystemBun;
|
||||
|
||||
// Get the executable name (argv0)
|
||||
const argv0 = basename(tokens[0], extname(tokens[0]));
|
||||
|
||||
// Check if it's zig or zig.exe
|
||||
if (argv0 === "zig") {
|
||||
// Filter out flags (starting with -) to get positional arguments
|
||||
const positionalArgs = tokens.slice(1).filter(arg => !arg.startsWith("-"));
|
||||
|
||||
// Check if the positional args contain "build" followed by "obj"
|
||||
if (positionalArgs.length >= 2 && positionalArgs[0] === "build" && positionalArgs[1] === "obj") {
|
||||
denyWithReason("error: Use `bun bd` to build Bun and wait patiently");
|
||||
}
|
||||
}
|
||||
|
||||
// Check if argv0 is timeout and the command is "bun bd"
|
||||
if (argv0 === "timeout") {
|
||||
// Find the actual command after timeout and its arguments
|
||||
const timeoutArgEndIndex = tokens.slice(1).findIndex(t => !t.startsWith("-") && !/^\d/.test(t));
|
||||
if (timeoutArgEndIndex === -1) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const actualCommandIndex = timeoutArgEndIndex + 1;
|
||||
if (actualCommandIndex >= tokens.length) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const actualCommand = basename(tokens[actualCommandIndex]);
|
||||
const restArgs = tokens.slice(actualCommandIndex + 1);
|
||||
|
||||
// Check if it's "bun bd" or "bun-debug bd" without other positional args
|
||||
if (actualCommand === "bun" || actualCommand.includes("bun-debug")) {
|
||||
// Claude is a sneaky fucker
|
||||
let positionalArgs = restArgs.filter(arg => !arg.startsWith("-"));
|
||||
const redirectStderrToStdoutIndex = positionalArgs.findIndex(arg => arg === "2>&1");
|
||||
if (redirectStderrToStdoutIndex !== -1) {
|
||||
positionalArgs.splice(redirectStderrToStdoutIndex, 1);
|
||||
}
|
||||
const redirectStdoutToStderrIndex = positionalArgs.findIndex(arg => arg === "1>&2");
|
||||
if (redirectStdoutToStderrIndex !== -1) {
|
||||
positionalArgs.splice(redirectStdoutToStderrIndex, 1);
|
||||
}
|
||||
|
||||
const redirectToFileIndex = positionalArgs.findIndex(arg => arg === ">");
|
||||
if (redirectToFileIndex !== -1) {
|
||||
positionalArgs.splice(redirectToFileIndex, 2);
|
||||
}
|
||||
|
||||
const redirectToFileAppendIndex = positionalArgs.findIndex(arg => arg === ">>");
|
||||
if (redirectToFileAppendIndex !== -1) {
|
||||
positionalArgs.splice(redirectToFileAppendIndex, 2);
|
||||
}
|
||||
|
||||
const redirectTOFileInlineIndex = positionalArgs.findIndex(arg => arg.startsWith(">"));
|
||||
if (redirectTOFileInlineIndex !== -1) {
|
||||
positionalArgs.splice(redirectTOFileInlineIndex, 1);
|
||||
}
|
||||
|
||||
const pipeIndex = positionalArgs.findIndex(arg => arg === "|");
|
||||
if (pipeIndex !== -1) {
|
||||
positionalArgs = positionalArgs.slice(0, pipeIndex);
|
||||
}
|
||||
|
||||
positionalArgs = positionalArgs.map(arg => arg.trim()).filter(Boolean);
|
||||
|
||||
if (positionalArgs.length === 1 && positionalArgs[0] === "bd") {
|
||||
denyWithReason("error: Run `bun bd` without a timeout");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if command is "bun .* test" or "bun-debug test" with -u/--update-snapshots AND -t/--test-name-pattern
|
||||
if (argv0 === "bun" || argv0.includes("bun-debug")) {
|
||||
const allArgs = tokens.slice(1);
|
||||
|
||||
// Check if "test" is in positional args or "bd" followed by "test"
|
||||
const positionalArgs = allArgs.filter(arg => !arg.startsWith("-"));
|
||||
const hasTest = positionalArgs.includes("test") || (positionalArgs[0] === "bd" && positionalArgs[1] === "test");
|
||||
|
||||
if (hasTest) {
|
||||
const hasUpdateSnapshots = allArgs.some(arg => arg === "-u" || arg === "--update-snapshots");
|
||||
const hasTestNamePattern = allArgs.some(arg => arg === "-t" || arg === "--test-name-pattern");
|
||||
|
||||
if (hasUpdateSnapshots && hasTestNamePattern) {
|
||||
denyWithReason("error: Cannot use -u/--update-snapshots with -t/--test-name-pattern");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if timeout option is set for "bun bd" command
|
||||
if (timeout !== undefined && (argv0 === "bun" || argv0.includes("bun-debug"))) {
|
||||
const positionalArgs = tokens.slice(1).filter(arg => !arg.startsWith("-"));
|
||||
if (positionalArgs.length === 1 && positionalArgs[0] === "bd") {
|
||||
denyWithReason("error: Run `bun bd` without a timeout");
|
||||
}
|
||||
}
|
||||
|
||||
// Check if running "bun test <file>" without USE_SYSTEM_BUN=1
|
||||
if ((argv0 === "bun" || argv0.includes("bun-debug")) && useSystemBun !== "1") {
|
||||
const allArgs = tokens.slice(1);
|
||||
const positionalArgs = allArgs.filter(arg => !arg.startsWith("-"));
|
||||
|
||||
// Check if it's "test" (not "bd test")
|
||||
if (positionalArgs.length >= 1 && positionalArgs[0] === "test" && positionalArgs[0] !== "bd") {
|
||||
denyWithReason(
|
||||
"error: In development, use `bun bd test <file>` to test your changes. If you meant to use a release version, set USE_SYSTEM_BUN=1",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Check if running "bun bd test" from bun repo root or test folder without a file path
|
||||
if (argv0 === "bun" || argv0.includes("bun-debug")) {
|
||||
const allArgs = tokens.slice(1);
|
||||
const positionalArgs = allArgs.filter(arg => !arg.startsWith("-"));
|
||||
|
||||
// Check if it's "bd test"
|
||||
if (positionalArgs.length >= 2 && positionalArgs[0] === "bd" && positionalArgs[1] === "test") {
|
||||
// Check if cwd is the bun repo root or test folder
|
||||
const isBunRepoRoot = cwd === "/workspace/bun" || cwd.endsWith("/bun");
|
||||
const isTestFolder = cwd.endsWith("/bun/test");
|
||||
|
||||
if (isBunRepoRoot || isTestFolder) {
|
||||
// Check if there's a file path argument (looks like a path: contains / or has test extension)
|
||||
const hasFilePath = positionalArgs
|
||||
.slice(2)
|
||||
.some(
|
||||
arg =>
|
||||
arg.includes("/") ||
|
||||
arg.endsWith(".test.ts") ||
|
||||
arg.endsWith(".test.js") ||
|
||||
arg.endsWith(".test.tsx") ||
|
||||
arg.endsWith(".test.jsx"),
|
||||
);
|
||||
|
||||
if (!hasFilePath) {
|
||||
denyWithReason(
|
||||
"error: `bun bd test` from repo root or test folder will run all tests. Use `bun bd test <path>` with a specific test file.",
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Allow the command to proceed
|
||||
process.exit(0);
|
||||
@@ -1,26 +0,0 @@
|
||||
{
|
||||
"hooks": {
|
||||
"PreToolUse": [
|
||||
{
|
||||
"matcher": "Bash",
|
||||
"hooks": [
|
||||
{
|
||||
"type": "command",
|
||||
"command": "\"$CLAUDE_PROJECT_DIR\"/.claude/hooks/pre-bash-zig-build.js"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"PostToolUse": [
|
||||
{
|
||||
"matcher": "Write|Edit|MultiEdit",
|
||||
"hooks": [
|
||||
{
|
||||
"type": "command",
|
||||
"command": "\"$CLAUDE_PROJECT_DIR\"/.claude/hooks/post-edit-zig-format.js"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -30,7 +30,7 @@ bun bd <file> <...args>
|
||||
Debug logs look like this:
|
||||
|
||||
```zig
|
||||
const log = bun.Output.scoped(.${SCOPE}, .hidden);
|
||||
const log = bun.Output.scoped(.${SCOPE}, false);
|
||||
|
||||
// ...later
|
||||
log("MY DEBUG LOG", .{})
|
||||
|
||||
29
.github/workflows/auto-close-duplicates.yml
vendored
29
.github/workflows/auto-close-duplicates.yml
vendored
@@ -1,29 +0,0 @@
|
||||
name: Auto-close duplicate issues
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 9 * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
auto-close-duplicates:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
concurrency:
|
||||
group: auto-close-duplicates-${{ github.repository }}
|
||||
cancel-in-progress: true
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
|
||||
- name: Auto-close duplicate issues
|
||||
run: bun run scripts/auto-close-duplicates.ts
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||
34
.github/workflows/claude-dedupe-issues.yml
vendored
34
.github/workflows/claude-dedupe-issues.yml
vendored
@@ -1,34 +0,0 @@
|
||||
name: Claude Issue Dedupe
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
issue_number:
|
||||
description: 'Issue number to process for duplicate detection'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
claude-dedupe-issues:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
concurrency:
|
||||
group: claude-dedupe-issues-${{ github.event.issue.number || inputs.issue_number }}
|
||||
cancel-in-progress: true
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Run Claude Code slash command
|
||||
uses: anthropics/claude-code-base-action@beta
|
||||
with:
|
||||
prompt: "/dedupe ${{ github.repository }}/issues/${{ github.event.issue.number || inputs.issue_number }}"
|
||||
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
claude_args: "--model claude-sonnet-4-5-20250929"
|
||||
claude_env: |
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
3
.github/workflows/claude.yml
vendored
3
.github/workflows/claude.yml
vendored
@@ -57,7 +57,8 @@ jobs:
|
||||
git reset --hard origin/${{ github.event.pull_request.head.ref }}
|
||||
- name: Run Claude Code
|
||||
id: claude
|
||||
uses: anthropics/claude-code-action@v1
|
||||
# TODO: switch this out once they merge their v1
|
||||
uses: km-anthropic/claude-code-action@v1-dev
|
||||
with:
|
||||
timeout_minutes: "180"
|
||||
claude_args: |
|
||||
|
||||
6
.github/workflows/labeled.yml
vendored
6
.github/workflows/labeled.yml
vendored
@@ -142,8 +142,8 @@ jobs:
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const closeAction = ${{ fromJson(steps.add-labels.outputs.close-action) }};
|
||||
|
||||
const closeAction = JSON.parse('${{ steps.add-labels.outputs.close-action }}');
|
||||
|
||||
// Comment with the reason
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
@@ -151,7 +151,7 @@ jobs:
|
||||
issue_number: context.issue.number,
|
||||
body: closeAction.comment
|
||||
});
|
||||
|
||||
|
||||
// Close the issue
|
||||
await github.rest.issues.update({
|
||||
owner: context.repo.owner,
|
||||
|
||||
19
.github/workflows/update-sqlite3.yml
vendored
19
.github/workflows/update-sqlite3.yml
vendored
@@ -70,7 +70,24 @@ jobs:
|
||||
- name: Update SQLite if needed
|
||||
if: success() && steps.check-version.outputs.current_num < steps.check-version.outputs.latest_num
|
||||
run: |
|
||||
./scripts/update-sqlite-amalgamation.sh ${{ steps.check-version.outputs.latest_num }} ${{ steps.check-version.outputs.latest_year }}
|
||||
set -euo pipefail
|
||||
|
||||
TEMP_DIR=$(mktemp -d)
|
||||
cd $TEMP_DIR
|
||||
|
||||
echo "Downloading from: https://sqlite.org/${{ steps.check-version.outputs.latest_year }}/sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
|
||||
|
||||
# Download and extract latest version
|
||||
wget "https://sqlite.org/${{ steps.check-version.outputs.latest_year }}/sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
|
||||
unzip "sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
|
||||
cd "sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}"
|
||||
|
||||
# Add header comment and copy files
|
||||
echo "// clang-format off" > $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3.c
|
||||
cat sqlite3.c >> $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3.c
|
||||
|
||||
echo "// clang-format off" > $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3_local.h
|
||||
cat sqlite3.h >> $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3_local.h
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current_num < steps.check-version.outputs.latest_num
|
||||
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -1,9 +1,7 @@
|
||||
.claude/settings.local.json
|
||||
.DS_Store
|
||||
.env
|
||||
.envrc
|
||||
.eslintcache
|
||||
.gdb_history
|
||||
.idea
|
||||
.next
|
||||
.ninja_deps
|
||||
@@ -191,4 +189,4 @@ scratch*.{js,ts,tsx,cjs,mjs}
|
||||
scripts/lldb-inline
|
||||
|
||||
# We regenerate these in all the build scripts
|
||||
cmake/sources/*.txt
|
||||
cmake/sources/*.txt
|
||||
@@ -19,12 +19,6 @@
|
||||
"options": {
|
||||
"printWidth": 80
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["src/codegen/bindgenv2/**/*.ts", "*.bindv2.ts"],
|
||||
"options": {
|
||||
"printWidth": 100
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
86
CLAUDE.md
86
CLAUDE.md
@@ -23,15 +23,12 @@ Tip: Bun is already installed and in $PATH. The `bd` subcommand is a package.jso
|
||||
|
||||
### Test Organization
|
||||
|
||||
If a test is for a specific numbered GitHub Issue, it should be placed in `test/regression/issue/${issueNumber}.test.ts`. Ensure the issue number is **REAL** and not a placeholder!
|
||||
|
||||
If no valid issue number is provided, find the best existing file to modify instead, such as;
|
||||
|
||||
- `test/js/bun/` - Bun-specific API tests (http, crypto, ffi, shell, etc.)
|
||||
- `test/js/node/` - Node.js compatibility tests
|
||||
- `test/js/web/` - Web API tests (fetch, WebSocket, streams, etc.)
|
||||
- `test/cli/` - CLI command tests (install, run, test, etc.)
|
||||
- `test/bundler/` - Bundler and transpiler tests. Use `itBundled` helper.
|
||||
- `test/regression/issue/` - Regression tests (create one per bug fix)
|
||||
- `test/bundler/` - Bundler and transpiler tests
|
||||
- `test/integration/` - End-to-end integration tests
|
||||
- `test/napi/` - N-API compatibility tests
|
||||
- `test/v8/` - V8 C++ API compatibility tests
|
||||
@@ -64,21 +61,15 @@ test("my feature", async () => {
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
expect(exitCode).toBe(0);
|
||||
// Prefer snapshot tests over expect(stdout).toBe("hello\n");
|
||||
expect(normalizeBunSnapshot(stdout, dir)).toMatchInlineSnapshot(`"hello"`);
|
||||
|
||||
// Assert the exit code last. This gives you a more useful error message on test failure.
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
```
|
||||
|
||||
- Always use `port: 0`. Do not hardcode ports. Do not use your own random port number function.
|
||||
- Use `normalizeBunSnapshot` to normalize snapshot output of the test.
|
||||
- NEVER write tests that check for no "panic" or "uncaught exception" or similar in the test output. That is NOT a valid test.
|
||||
- Use `tempDir` from `"harness"` to create a temporary directory. **Do not** use `tmpdirSync` or `fs.mkdtempSync` to create temporary directories.
|
||||
- When spawning processes, tests should expect(stdout).toBe(...) BEFORE expect(exitCode).toBe(0). This gives you a more useful error message on test failure.
|
||||
- **CRITICAL**: Do not write flaky tests. Do not use `setTimeout` in tests. Instead, `await` the condition to be met. You are not testing the TIME PASSING, you are testing the CONDITION.
|
||||
- **CRITICAL**: Verify your test fails with `USE_SYSTEM_BUN=1 bun test <file>` and passes with `bun bd test <file>`. Your test is NOT VALID if it passes with `USE_SYSTEM_BUN=1`.
|
||||
|
||||
## Code Architecture
|
||||
|
||||
@@ -87,7 +78,7 @@ test("my feature", async () => {
|
||||
- **Zig code** (`src/*.zig`): Core runtime, JavaScript bindings, package manager
|
||||
- **C++ code** (`src/bun.js/bindings/*.cpp`): JavaScriptCore bindings, Web APIs
|
||||
- **TypeScript** (`src/js/`): Built-in JavaScript modules with special syntax (see JavaScript Modules section)
|
||||
- **Generated code**: Many files are auto-generated from `.classes.ts` and other sources. Bun will automatically rebuild these files when you make changes to them.
|
||||
- **Generated code**: Many files are auto-generated from `.classes.ts` and other sources
|
||||
|
||||
### Core Source Organization
|
||||
|
||||
@@ -152,6 +143,19 @@ When implementing JavaScript classes in C++:
|
||||
3. Add iso subspaces for classes with C++ fields
|
||||
4. Cache structures in ZigGlobalObject
|
||||
|
||||
## Development Workflow
|
||||
|
||||
### Code Formatting
|
||||
|
||||
- `bun run prettier` - Format JS/TS files
|
||||
- `bun run zig-format` - Format Zig files
|
||||
- `bun run clang-format` - Format C++ files
|
||||
|
||||
### Watching for Changes
|
||||
|
||||
- `bun run watch` - Incremental Zig compilation with error checking
|
||||
- `bun run watch-windows` - Windows-specific watch mode
|
||||
|
||||
### Code Generation
|
||||
|
||||
Code generation happens automatically as part of the build process. The main scripts are:
|
||||
@@ -173,6 +177,47 @@ Built-in JavaScript modules use special syntax and are organized as:
|
||||
- `internal/` - Internal modules not exposed to users
|
||||
- `builtins/` - Core JavaScript builtins (streams, console, etc.)
|
||||
|
||||
### Special Syntax in Built-in Modules
|
||||
|
||||
1. **`$` prefix** - Access to private properties and JSC intrinsics:
|
||||
|
||||
```js
|
||||
const arr = $Array.from(...); // Private global
|
||||
map.$set(...); // Private method
|
||||
const arr2 = $newArrayWithSize(5); // JSC intrinsic
|
||||
```
|
||||
|
||||
2. **`require()`** - Must use string literals, resolved at compile time:
|
||||
|
||||
```js
|
||||
const fs = require("fs"); // Directly loads by numeric ID
|
||||
```
|
||||
|
||||
3. **Debug helpers**:
|
||||
- `$debug()` - Like console.log but stripped in release builds
|
||||
- `$assert()` - Assertions stripped in release builds
|
||||
- `if($debug) {}` - Check if debug env var is set
|
||||
|
||||
4. **Platform detection**: `process.platform` and `process.arch` are inlined and dead-code eliminated
|
||||
|
||||
5. **Export syntax**: Use `export default` which gets converted to a return statement:
|
||||
```js
|
||||
export default {
|
||||
readFile,
|
||||
writeFile,
|
||||
};
|
||||
```
|
||||
|
||||
Note: These are NOT ES modules. The preprocessor converts `$` to `@` (JSC's actual syntax) and handles the special functions.
|
||||
|
||||
## CI
|
||||
|
||||
Bun uses BuildKite for CI. To get the status of a PR, you can use the following command:
|
||||
|
||||
```bash
|
||||
bun ci
|
||||
```
|
||||
|
||||
## Important Development Notes
|
||||
|
||||
1. **Never use `bun test` or `bun <file>` directly** - always use `bun bd test` or `bun bd <command>`. `bun bd` compiles & runs the debug build.
|
||||
@@ -184,8 +229,19 @@ Built-in JavaScript modules use special syntax and are organized as:
|
||||
7. **Avoid shell commands** - Don't use `find` or `grep` in tests; use Bun's Glob and built-in tools
|
||||
8. **Memory management** - In Zig code, be careful with allocators and use defer for cleanup
|
||||
9. **Cross-platform** - Run `bun run zig:check-all` to compile the Zig code on all platforms when making platform-specific changes
|
||||
10. **Debug builds** - Use `BUN_DEBUG_QUIET_LOGS=1` to disable debug logging, or `BUN_DEBUG_<scopeName>=1` to enable specific `Output.scoped(.${scopeName}, .visible)`s
|
||||
10. **Debug builds** - Use `BUN_DEBUG_QUIET_LOGS=1` to disable debug logging, or `BUN_DEBUG_<scope>=1` to enable specific scopes
|
||||
11. **Be humble & honest** - NEVER overstate what you got done or what actually works in commits, PRs or in messages to the user.
|
||||
12. **Branch names must start with `claude/`** - This is a requirement for the CI to work.
|
||||
|
||||
**ONLY** push up changes after running `bun bd test <file>` and ensuring your tests pass.
|
||||
## Key APIs and Features
|
||||
|
||||
### Bun-Specific APIs
|
||||
|
||||
- **Bun.serve()** - High-performance HTTP server
|
||||
- **Bun.spawn()** - Process spawning with better performance than Node.js
|
||||
- **Bun.file()** - Fast file I/O operations
|
||||
- **Bun.write()** - Unified API for writing to files, stdout, etc.
|
||||
- **Bun.$ (Shell)** - Cross-platform shell scripting
|
||||
- **Bun.SQLite** - Native SQLite integration
|
||||
- **Bun.FFI** - Call native libraries from JavaScript
|
||||
- **Bun.Glob** - Fast file pattern matching
|
||||
|
||||
@@ -2,21 +2,7 @@ Configuring a development environment for Bun can take 10-30 minutes depending o
|
||||
|
||||
If you are using Windows, please refer to [this guide](https://bun.com/docs/project/building-windows)
|
||||
|
||||
## Using Nix (Alternative)
|
||||
|
||||
A Nix flake is provided as an alternative to manual dependency installation:
|
||||
|
||||
```bash
|
||||
nix develop
|
||||
# or explicitly use the pure shell
|
||||
# nix develop .#pure
|
||||
export CMAKE_SYSTEM_PROCESSOR=$(uname -m)
|
||||
bun bd
|
||||
```
|
||||
|
||||
This provides all dependencies in an isolated, reproducible environment without requiring sudo.
|
||||
|
||||
## Install Dependencies (Manual)
|
||||
## Install Dependencies
|
||||
|
||||
Using your system's package manager, install Bun's dependencies:
|
||||
|
||||
@@ -163,7 +149,7 @@ Bun generally takes about 2.5 minutes to compile a debug build when there are Zi
|
||||
- Batch up your changes
|
||||
- Ensure zls is running with incremental watching for LSP errors (if you use VSCode and install Zig and run `bun run build` once to download Zig, this should just work)
|
||||
- Prefer using the debugger ("CodeLLDB" in VSCode) to step through the code.
|
||||
- Use debug logs. `BUN_DEBUG_<scope>=1` will enable debug logging for the corresponding `Output.scoped(.<scope>, .hidden)` logs. You can also set `BUN_DEBUG_QUIET_LOGS=1` to disable all debug logging that isn't explicitly enabled. To dump debug lgos into a file, `BUN_DEBUG=<path-to-file>.log`. Debug logs are aggressively removed in release builds.
|
||||
- Use debug logs. `BUN_DEBUG_<scope>=1` will enable debug logging for the corresponding `Output.scoped(.<scope>, false)` logs. You can also set `BUN_DEBUG_QUIET_LOGS=1` to disable all debug logging that isn't explicitly enabled. To dump debug lgos into a file, `BUN_DEBUG=<path-to-file>.log`. Debug logs are aggressively removed in release builds.
|
||||
- src/js/\*\*.ts changes are pretty much instant to rebuild. C++ changes are a bit slower, but still much faster than the Zig code (Zig is one compilation unit, C++ is many).
|
||||
|
||||
## Code generation scripts
|
||||
|
||||
@@ -4,16 +4,20 @@
|
||||
"": {
|
||||
"name": "react-hello-world",
|
||||
"dependencies": {
|
||||
"react": "^19.2.0",
|
||||
"react-dom": "^19.2.0",
|
||||
"react": "next",
|
||||
"react-dom": "next",
|
||||
},
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"react": ["react@19.2.0", "", {}, "sha512-tmbWg6W31tQLeB5cdIBOicJDJRR2KzXsV7uSK9iNfLWQ5bIZfxuPEHp7M8wiHyHnn0DD1i7w3Zmin0FtkrwoCQ=="],
|
||||
"js-tokens": ["js-tokens@4.0.0", "", {}, "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="],
|
||||
|
||||
"react-dom": ["react-dom@19.2.0", "", { "dependencies": { "scheduler": "^0.27.0" }, "peerDependencies": { "react": "^19.2.0" } }, "sha512-UlbRu4cAiGaIewkPyiRGJk0imDN2T3JjieT6spoL2UeSf5od4n5LB/mQ4ejmxhCFT1tYe8IvaFulzynWovsEFQ=="],
|
||||
"loose-envify": ["loose-envify@1.4.0", "", { "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" }, "bin": { "loose-envify": "cli.js" } }, "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q=="],
|
||||
|
||||
"scheduler": ["scheduler@0.27.0", "", {}, "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q=="],
|
||||
"react": ["react@18.3.0-next-b72ed698f-20230303", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-l6RbwXa9Peerh9pQEq62DDypxSQfavbybY0wV1vwZ63X0P5VaaEesZAz1KPpnVvXjTtQaOMQsIPvnQwmaVqzTQ=="],
|
||||
|
||||
"react-dom": ["react-dom@18.3.0-next-b72ed698f-20230303", "", { "dependencies": { "loose-envify": "^1.1.0", "scheduler": "0.24.0-next-b72ed698f-20230303" }, "peerDependencies": { "react": "18.3.0-next-b72ed698f-20230303" } }, "sha512-0Gh/gmTT6H8KxswIQB/8shdTTfs6QIu86nNqZf3Y0RBqIwgTVxRaQVz14/Fw4/Nt81nK/Jt6KT4bx3yvOxZDGQ=="],
|
||||
|
||||
"scheduler": ["scheduler@0.24.0-next-b72ed698f-20230303", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-ct4DMMFbc2kFxCdvbG+i/Jn1S1oqrIFSn2VX/mam+Ya0iuNy+lb8rgT7A+YBUqrQNDaNEqABYI2sOQgqoRxp7w=="],
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,14 +4,13 @@
|
||||
"description": "",
|
||||
"main": "react-hello-world.node.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"build:workerd": "bun build react-hello-world.workerd.jsx --outfile=react-hello-world.workerd.js --format=esm --production && (echo '// MessageChannel polyfill for workerd'; echo 'if (typeof MessageChannel === \"undefined\") {'; echo ' globalThis.MessageChannel = class MessageChannel {'; echo ' constructor() {'; echo ' this.port1 = { onmessage: null, postMessage: () => {} };'; echo ' this.port2 = {'; echo ' postMessage: (msg) => {'; echo ' if (this.port1.onmessage) {'; echo ' queueMicrotask(() => this.port1.onmessage({ data: msg }));'; echo ' }'; echo ' }'; echo ' };'; echo ' }'; echo ' };'; echo '}'; cat react-hello-world.workerd.js) > temp.js && mv temp.js react-hello-world.workerd.js"
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "Colin McDonnell",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"react": "^19.2.0",
|
||||
"react-dom": "^19.2.0"
|
||||
"react": "next",
|
||||
"react-dom": "next"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
using Workerd = import "/workerd/workerd.capnp";
|
||||
|
||||
const config :Workerd.Config = (
|
||||
services = [
|
||||
(name = "main", worker = .mainWorker),
|
||||
],
|
||||
|
||||
sockets = [
|
||||
( name = "http",
|
||||
address = "*:3001",
|
||||
http = (),
|
||||
service = "main"
|
||||
),
|
||||
]
|
||||
);
|
||||
|
||||
const mainWorker :Workerd.Worker = (
|
||||
modules = [
|
||||
(name = "worker", esModule = embed "react-hello-world.workerd.js"),
|
||||
],
|
||||
compatibilityDate = "2025-01-01",
|
||||
compatibilityFlags = ["nodejs_compat_v2"],
|
||||
);
|
||||
File diff suppressed because one or more lines are too long
@@ -1,40 +0,0 @@
|
||||
// Cloudflare Workers version with export default fetch
|
||||
// Run with: workerd serve react-hello-world.workerd.config.capnp
|
||||
|
||||
// Polyfill MessageChannel for workerd
|
||||
if (typeof MessageChannel === 'undefined') {
|
||||
globalThis.MessageChannel = class MessageChannel {
|
||||
constructor() {
|
||||
this.port1 = { onmessage: null, postMessage: () => {} };
|
||||
this.port2 = {
|
||||
postMessage: (msg) => {
|
||||
if (this.port1.onmessage) {
|
||||
queueMicrotask(() => this.port1.onmessage({ data: msg }));
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
import React from "react";
|
||||
import { renderToReadableStream } from "react-dom/server";
|
||||
|
||||
const headers = {
|
||||
"Content-Type": "text/html",
|
||||
};
|
||||
|
||||
const App = () => (
|
||||
<html>
|
||||
<body>
|
||||
<h1>Hello World</h1>
|
||||
<p>This is an example.</p>
|
||||
</body>
|
||||
</html>
|
||||
);
|
||||
|
||||
export default {
|
||||
async fetch(request) {
|
||||
return new Response(await renderToReadableStream(<App />), { headers });
|
||||
},
|
||||
};
|
||||
@@ -49,7 +49,6 @@ const BunBuildOptions = struct {
|
||||
enable_logs: bool = false,
|
||||
enable_asan: bool,
|
||||
enable_valgrind: bool,
|
||||
use_mimalloc: bool,
|
||||
tracy_callstack_depth: u16,
|
||||
reported_nodejs_version: Version,
|
||||
/// To make iterating on some '@embedFile's faster, we load them at runtime
|
||||
@@ -98,7 +97,6 @@ const BunBuildOptions = struct {
|
||||
opts.addOption(bool, "enable_logs", this.enable_logs);
|
||||
opts.addOption(bool, "enable_asan", this.enable_asan);
|
||||
opts.addOption(bool, "enable_valgrind", this.enable_valgrind);
|
||||
opts.addOption(bool, "use_mimalloc", this.use_mimalloc);
|
||||
opts.addOption([]const u8, "reported_nodejs_version", b.fmt("{}", .{this.reported_nodejs_version}));
|
||||
opts.addOption(bool, "zig_self_hosted_backend", this.no_llvm);
|
||||
opts.addOption(bool, "override_no_export_cpp_apis", this.override_no_export_cpp_apis);
|
||||
@@ -272,7 +270,6 @@ pub fn build(b: *Build) !void {
|
||||
.enable_logs = b.option(bool, "enable_logs", "Enable logs in release") orelse false,
|
||||
.enable_asan = b.option(bool, "enable_asan", "Enable asan") orelse false,
|
||||
.enable_valgrind = b.option(bool, "enable_valgrind", "Enable valgrind") orelse false,
|
||||
.use_mimalloc = b.option(bool, "use_mimalloc", "Use mimalloc as default allocator") orelse false,
|
||||
.llvm_codegen_threads = b.option(u32, "llvm_codegen_threads", "Number of threads to use for LLVM codegen") orelse 1,
|
||||
};
|
||||
|
||||
@@ -503,7 +500,6 @@ fn addMultiCheck(
|
||||
.no_llvm = root_build_options.no_llvm,
|
||||
.enable_asan = root_build_options.enable_asan,
|
||||
.enable_valgrind = root_build_options.enable_valgrind,
|
||||
.use_mimalloc = root_build_options.use_mimalloc,
|
||||
.override_no_export_cpp_apis = root_build_options.override_no_export_cpp_apis,
|
||||
};
|
||||
|
||||
@@ -724,7 +720,6 @@ fn addInternalImports(b: *Build, mod: *Module, opts: *BunBuildOptions) void {
|
||||
// Generated code exposed as individual modules.
|
||||
inline for (.{
|
||||
.{ .file = "ZigGeneratedClasses.zig", .import = "ZigGeneratedClasses" },
|
||||
.{ .file = "bindgen_generated.zig", .import = "bindgen_generated" },
|
||||
.{ .file = "ResolvedSourceTag.zig", .import = "ResolvedSourceTag" },
|
||||
.{ .file = "ErrorCode.zig", .import = "ErrorCode" },
|
||||
.{ .file = "runtime.out.js", .enable = opts.shouldEmbedCode() },
|
||||
|
||||
12
bun.lock
12
bun.lock
@@ -8,14 +8,14 @@
|
||||
"@lezer/cpp": "^1.1.3",
|
||||
"@types/bun": "workspace:*",
|
||||
"bun-tracestrings": "github:oven-sh/bun.report#912ca63e26c51429d3e6799aa2a6ab079b188fd8",
|
||||
"esbuild": "^0.21.5",
|
||||
"mitata": "^0.1.14",
|
||||
"esbuild": "^0.21.4",
|
||||
"mitata": "^0.1.11",
|
||||
"peechy": "0.4.34",
|
||||
"prettier": "^3.6.2",
|
||||
"prettier-plugin-organize-imports": "^4.3.0",
|
||||
"prettier": "^3.5.3",
|
||||
"prettier-plugin-organize-imports": "^4.0.0",
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"source-map-js": "^1.2.1",
|
||||
"source-map-js": "^1.2.0",
|
||||
"typescript": "5.9.2",
|
||||
},
|
||||
},
|
||||
@@ -284,7 +284,7 @@
|
||||
|
||||
"prettier": ["prettier@3.6.2", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ=="],
|
||||
|
||||
"prettier-plugin-organize-imports": ["prettier-plugin-organize-imports@4.3.0", "", { "peerDependencies": { "prettier": ">=2.0", "typescript": ">=2.9", "vue-tsc": "^2.1.0 || 3" }, "optionalPeers": ["vue-tsc"] }, "sha512-FxFz0qFhyBsGdIsb697f/EkvHzi5SZOhWAjxcx2dLt+Q532bAlhswcXGYB1yzjZ69kW8UoadFBw7TyNwlq96Iw=="],
|
||||
"prettier-plugin-organize-imports": ["prettier-plugin-organize-imports@4.2.0", "", { "peerDependencies": { "prettier": ">=2.0", "typescript": ">=2.9", "vue-tsc": "^2.1.0 || 3" }, "optionalPeers": ["vue-tsc"] }, "sha512-Zdy27UhlmyvATZi67BTnLcKTo8fm6Oik59Sz6H64PgZJVs6NJpPD1mT240mmJn62c98/QaL+r3kx9Q3gRpDajg=="],
|
||||
|
||||
"react": ["react@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ=="],
|
||||
|
||||
|
||||
@@ -10,4 +10,3 @@ preload = "./test/preload.ts"
|
||||
|
||||
[install]
|
||||
linker = "isolated"
|
||||
minimumReleaseAge = 1
|
||||
|
||||
@@ -86,20 +86,11 @@ elseif(APPLE)
|
||||
endif()
|
||||
|
||||
if(UNIX)
|
||||
# Nix LLVM doesn't support zstd compression, use zlib instead
|
||||
if(DEFINED ENV{NIX_CC})
|
||||
register_compiler_flags(
|
||||
DESCRIPTION "Enable debug symbols (zlib-compressed for Nix)"
|
||||
-g3 -gz=zlib ${DEBUG}
|
||||
-g1 ${RELEASE}
|
||||
)
|
||||
else()
|
||||
register_compiler_flags(
|
||||
DESCRIPTION "Enable debug symbols (zstd-compressed)"
|
||||
-g3 -gz=zstd ${DEBUG}
|
||||
-g1 ${RELEASE}
|
||||
)
|
||||
endif()
|
||||
register_compiler_flags(
|
||||
DESCRIPTION "Enable debug symbols"
|
||||
-g3 -gz=zstd ${DEBUG}
|
||||
-g1 ${RELEASE}
|
||||
)
|
||||
|
||||
register_compiler_flags(
|
||||
DESCRIPTION "Optimize debug symbols for LLDB"
|
||||
@@ -223,13 +214,10 @@ if(ENABLE_ASSERTIONS)
|
||||
_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_DEBUG ${DEBUG}
|
||||
)
|
||||
|
||||
# Nix glibc already sets _FORTIFY_SOURCE, don't override it
|
||||
if(NOT DEFINED ENV{NIX_CC})
|
||||
register_compiler_definitions(
|
||||
DESCRIPTION "Enable fortified sources (Release only)"
|
||||
_FORTIFY_SOURCE=3 ${RELEASE}
|
||||
)
|
||||
endif()
|
||||
register_compiler_definitions(
|
||||
DESCRIPTION "Enable fortified sources"
|
||||
_FORTIFY_SOURCE=3
|
||||
)
|
||||
|
||||
if(LINUX)
|
||||
register_compiler_definitions(
|
||||
|
||||
@@ -202,9 +202,4 @@ optionx(USE_WEBKIT_ICU BOOL "Use the ICU libraries from WebKit" DEFAULT ${DEFAUL
|
||||
|
||||
optionx(ERROR_LIMIT STRING "Maximum number of errors to show when compiling C++ code" DEFAULT "100")
|
||||
|
||||
# This is not an `option` because setting this variable to OFF is experimental
|
||||
# and unsupported. This replaces the `use_mimalloc` variable previously in
|
||||
# bun.zig, and enables C++ code to also be aware of the option.
|
||||
set(USE_MIMALLOC_AS_DEFAULT_ALLOCATOR ON)
|
||||
|
||||
list(APPEND CMAKE_ARGS -DCMAKE_EXPORT_COMPILE_COMMANDS=ON)
|
||||
|
||||
@@ -31,14 +31,6 @@
|
||||
"output": "BindgenSources.txt",
|
||||
"paths": ["src/**/*.bind.ts"]
|
||||
},
|
||||
{
|
||||
"output": "BindgenV2Sources.txt",
|
||||
"paths": ["src/**/*.bindv2.ts"]
|
||||
},
|
||||
{
|
||||
"output": "BindgenV2InternalSources.txt",
|
||||
"paths": ["src/codegen/bindgenv2/**/*.ts"]
|
||||
},
|
||||
{
|
||||
"output": "ZigSources.txt",
|
||||
"paths": ["src/**/*.zig"]
|
||||
|
||||
@@ -395,54 +395,6 @@ register_command(
|
||||
${BUN_BAKE_RUNTIME_OUTPUTS}
|
||||
)
|
||||
|
||||
set(BUN_BINDGENV2_SCRIPT ${CWD}/src/codegen/bindgenv2/script.ts)
|
||||
|
||||
absolute_sources(BUN_BINDGENV2_SOURCES ${CWD}/cmake/sources/BindgenV2Sources.txt)
|
||||
# These sources include the script itself.
|
||||
absolute_sources(BUN_BINDGENV2_INTERNAL_SOURCES
|
||||
${CWD}/cmake/sources/BindgenV2InternalSources.txt)
|
||||
string(REPLACE ";" "," BUN_BINDGENV2_SOURCES_COMMA_SEPARATED
|
||||
"${BUN_BINDGENV2_SOURCES}")
|
||||
|
||||
execute_process(
|
||||
COMMAND ${BUN_EXECUTABLE} run ${BUN_BINDGENV2_SCRIPT}
|
||||
--command=list-outputs
|
||||
--sources=${BUN_BINDGENV2_SOURCES_COMMA_SEPARATED}
|
||||
--codegen-path=${CODEGEN_PATH}
|
||||
RESULT_VARIABLE bindgen_result
|
||||
OUTPUT_VARIABLE bindgen_outputs
|
||||
)
|
||||
if(${bindgen_result})
|
||||
message(FATAL_ERROR "bindgenv2/script.ts exited with non-zero status")
|
||||
endif()
|
||||
foreach(output IN LISTS bindgen_outputs)
|
||||
if(output MATCHES "\.cpp$")
|
||||
list(APPEND BUN_BINDGENV2_CPP_OUTPUTS ${output})
|
||||
elseif(output MATCHES "\.zig$")
|
||||
list(APPEND BUN_BINDGENV2_ZIG_OUTPUTS ${output})
|
||||
else()
|
||||
message(FATAL_ERROR "unexpected bindgen output: [${output}]")
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
bun-bindgen-v2
|
||||
COMMENT
|
||||
"Generating bindings (v2)"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE} run ${BUN_BINDGENV2_SCRIPT}
|
||||
--command=generate
|
||||
--codegen-path=${CODEGEN_PATH}
|
||||
--sources=${BUN_BINDGENV2_SOURCES_COMMA_SEPARATED}
|
||||
SOURCES
|
||||
${BUN_BINDGENV2_SOURCES}
|
||||
${BUN_BINDGENV2_INTERNAL_SOURCES}
|
||||
OUTPUTS
|
||||
${BUN_BINDGENV2_CPP_OUTPUTS}
|
||||
${BUN_BINDGENV2_ZIG_OUTPUTS}
|
||||
)
|
||||
|
||||
set(BUN_BINDGEN_SCRIPT ${CWD}/src/codegen/bindgen.ts)
|
||||
|
||||
absolute_sources(BUN_BINDGEN_SOURCES ${CWD}/cmake/sources/BindgenSources.txt)
|
||||
@@ -621,7 +573,6 @@ set(BUN_ZIG_GENERATED_SOURCES
|
||||
${BUN_ZIG_GENERATED_CLASSES_OUTPUTS}
|
||||
${BUN_JAVASCRIPT_OUTPUTS}
|
||||
${BUN_CPP_OUTPUTS}
|
||||
${BUN_BINDGENV2_ZIG_OUTPUTS}
|
||||
)
|
||||
|
||||
# In debug builds, these are not embedded, but rather referenced at runtime.
|
||||
@@ -685,7 +636,6 @@ register_command(
|
||||
-Denable_logs=$<IF:$<BOOL:${ENABLE_LOGS}>,true,false>
|
||||
-Denable_asan=$<IF:$<BOOL:${ENABLE_ZIG_ASAN}>,true,false>
|
||||
-Denable_valgrind=$<IF:$<BOOL:${ENABLE_VALGRIND}>,true,false>
|
||||
-Duse_mimalloc=$<IF:$<BOOL:${USE_MIMALLOC_AS_DEFAULT_ALLOCATOR}>,true,false>
|
||||
-Dllvm_codegen_threads=${LLVM_ZIG_CODEGEN_THREADS}
|
||||
-Dversion=${VERSION}
|
||||
-Dreported_nodejs_version=${NODEJS_VERSION}
|
||||
@@ -762,7 +712,6 @@ list(APPEND BUN_CPP_SOURCES
|
||||
${BUN_JAVASCRIPT_OUTPUTS}
|
||||
${BUN_OBJECT_LUT_OUTPUTS}
|
||||
${BUN_BINDGEN_CPP_OUTPUTS}
|
||||
${BUN_BINDGENV2_CPP_OUTPUTS}
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
@@ -819,7 +768,7 @@ set_target_properties(${bun} PROPERTIES
|
||||
CXX_STANDARD_REQUIRED YES
|
||||
CXX_EXTENSIONS YES
|
||||
CXX_VISIBILITY_PRESET hidden
|
||||
C_STANDARD 17 # Cannot uprev to C23 because MSVC doesn't have support.
|
||||
C_STANDARD 17
|
||||
C_STANDARD_REQUIRED YES
|
||||
VISIBILITY_INLINES_HIDDEN YES
|
||||
)
|
||||
@@ -900,10 +849,6 @@ if(WIN32)
|
||||
)
|
||||
endif()
|
||||
|
||||
if(USE_MIMALLOC_AS_DEFAULT_ALLOCATOR)
|
||||
target_compile_definitions(${bun} PRIVATE USE_MIMALLOC=1)
|
||||
endif()
|
||||
|
||||
target_compile_definitions(${bun} PRIVATE
|
||||
_HAS_EXCEPTIONS=0
|
||||
LIBUS_USE_OPENSSL=1
|
||||
@@ -944,7 +889,7 @@ if(NOT WIN32)
|
||||
if (NOT ABI STREQUAL "musl")
|
||||
target_compile_options(${bun} PUBLIC
|
||||
-fsanitize=null
|
||||
-fno-sanitize-recover=all
|
||||
-fsanitize-recover=all
|
||||
-fsanitize=bounds
|
||||
-fsanitize=return
|
||||
-fsanitize=nullability-arg
|
||||
@@ -999,20 +944,6 @@ if(NOT WIN32)
|
||||
)
|
||||
|
||||
if(ENABLE_ASAN)
|
||||
target_compile_options(${bun} PUBLIC
|
||||
-fsanitize=null
|
||||
-fno-sanitize-recover=all
|
||||
-fsanitize=bounds
|
||||
-fsanitize=return
|
||||
-fsanitize=nullability-arg
|
||||
-fsanitize=nullability-assign
|
||||
-fsanitize=nullability-return
|
||||
-fsanitize=returns-nonnull-attribute
|
||||
-fsanitize=unreachable
|
||||
)
|
||||
target_link_libraries(${bun} PRIVATE
|
||||
-fsanitize=null
|
||||
)
|
||||
target_compile_options(${bun} PUBLIC -fsanitize=address)
|
||||
target_link_libraries(${bun} PUBLIC -fsanitize=address)
|
||||
endif()
|
||||
@@ -1059,6 +990,7 @@ if(APPLE)
|
||||
-Wl,-no_compact_unwind
|
||||
-Wl,-stack_size,0x1200000
|
||||
-fno-keep-static-consts
|
||||
-Wl,-map,${bun}.linker-map
|
||||
)
|
||||
|
||||
if(DEBUG)
|
||||
@@ -1078,7 +1010,6 @@ if(APPLE)
|
||||
target_link_options(${bun} PUBLIC
|
||||
-dead_strip
|
||||
-dead_strip_dylibs
|
||||
-Wl,-map,${bun}.linker-map
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
@@ -1112,17 +1043,6 @@ if(LINUX)
|
||||
)
|
||||
endif()
|
||||
|
||||
if (ENABLE_LTO)
|
||||
# We are optimizing for size at a slight debug-ability cost
|
||||
target_link_options(${bun} PUBLIC
|
||||
-Wl,--no-eh-frame-hdr
|
||||
)
|
||||
else()
|
||||
target_link_options(${bun} PUBLIC
|
||||
-Wl,--eh-frame-hdr
|
||||
)
|
||||
endif()
|
||||
|
||||
target_link_options(${bun} PUBLIC
|
||||
--ld-path=${LLD_PROGRAM}
|
||||
-fno-pic
|
||||
@@ -1137,9 +1057,11 @@ if(LINUX)
|
||||
# make debug info faster to load
|
||||
-Wl,--gdb-index
|
||||
-Wl,-z,combreloc
|
||||
-Wl,--no-eh-frame-hdr
|
||||
-Wl,--sort-section=name
|
||||
-Wl,--hash-style=both
|
||||
-Wl,--build-id=sha1 # Better for debugging than default
|
||||
-Wl,-Map=${bun}.linker-map
|
||||
)
|
||||
|
||||
# don't strip in debug, this seems to be needed so that the Zig std library
|
||||
@@ -1154,7 +1076,6 @@ if(LINUX)
|
||||
if (NOT DEBUG AND NOT ENABLE_ASAN AND NOT ENABLE_VALGRIND)
|
||||
target_link_options(${bun} PUBLIC
|
||||
-Wl,-icf=safe
|
||||
-Wl,-Map=${bun}.linker-map
|
||||
)
|
||||
endif()
|
||||
|
||||
@@ -1476,7 +1397,7 @@ if(NOT BUN_CPP_ONLY)
|
||||
list(APPEND bunFiles ${bun}.dSYM)
|
||||
endif()
|
||||
|
||||
if((APPLE OR LINUX) AND NOT ENABLE_ASAN)
|
||||
if(APPLE OR LINUX)
|
||||
list(APPEND bunFiles ${bun}.linker-map)
|
||||
endif()
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
google/highway
|
||||
COMMIT
|
||||
ac0d5d297b13ab1b89f48484fc7911082d76a93f
|
||||
12b325bc1793dee68ab2157995a690db859fe9e0
|
||||
)
|
||||
|
||||
set(HIGHWAY_CMAKE_ARGS
|
||||
|
||||
@@ -4,8 +4,8 @@ register_repository(
|
||||
REPOSITORY
|
||||
libuv/libuv
|
||||
COMMIT
|
||||
# Latest HEAD (includes recursion bug fix #4784)
|
||||
f3ce527ea940d926c40878ba5de219640c362811
|
||||
# Corresponds to v1.51.0
|
||||
5152db2cbfeb5582e9c27c5ea1dba2cd9e10759b
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
|
||||
@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
|
||||
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
|
||||
|
||||
if(NOT WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION 6d0f3aac0b817cc01a846b3754b21271adedac12)
|
||||
set(WEBKIT_VERSION 69fa2714ab5f917c2d15501ff8cfdccfaea78882)
|
||||
endif()
|
||||
|
||||
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM alpine:3.22 AS build
|
||||
FROM alpine:3.20 AS build
|
||||
|
||||
# https://github.com/oven-sh/bun/releases
|
||||
ARG BUN_VERSION=latest
|
||||
@@ -44,7 +44,7 @@ RUN apk --no-cache add ca-certificates curl dirmngr gpg gpg-agent unzip \
|
||||
&& rm -f "bun-linux-$build.zip" SHASUMS256.txt.asc SHASUMS256.txt \
|
||||
&& chmod +x /usr/local/bin/bun
|
||||
|
||||
FROM alpine:3.22
|
||||
FROM alpine:3.20
|
||||
|
||||
# Disable the runtime transpiler cache by default inside Docker containers.
|
||||
# On ephemeral containers, the cache is not useful
|
||||
|
||||
@@ -536,7 +536,7 @@ You can also access the `Server` object from the `fetch` handler. It's the secon
|
||||
const server = Bun.serve({
|
||||
fetch(req, server) {
|
||||
const ip = server.requestIP(req);
|
||||
return new Response(`Your IP is ${ip.address}`);
|
||||
return new Response(`Your IP is ${ip}`);
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
@@ -42,7 +42,6 @@ await client.incr("counter");
|
||||
By default, the client reads connection information from the following environment variables (in order of precedence):
|
||||
|
||||
- `REDIS_URL`
|
||||
- `VALKEY_URL`
|
||||
- If not set, defaults to `"redis://localhost:6379"`
|
||||
|
||||
### Connection Lifecycle
|
||||
|
||||
@@ -107,8 +107,6 @@ Bun.serve({
|
||||
|
||||
Contextual `data` can be attached to a new WebSocket in the `.upgrade()` call. This data is made available on the `ws.data` property inside the WebSocket handlers.
|
||||
|
||||
To strongly type `ws.data`, add a `data` property to the `websocket` handler object. This types `ws.data` across all lifecycle hooks.
|
||||
|
||||
```ts
|
||||
type WebSocketData = {
|
||||
createdAt: number;
|
||||
@@ -116,7 +114,8 @@ type WebSocketData = {
|
||||
authToken: string;
|
||||
};
|
||||
|
||||
Bun.serve({
|
||||
// TypeScript: specify the type of `data`
|
||||
Bun.serve<WebSocketData>({
|
||||
fetch(req, server) {
|
||||
const cookies = new Bun.CookieMap(req.headers.get("cookie")!);
|
||||
|
||||
@@ -132,12 +131,8 @@ Bun.serve({
|
||||
return undefined;
|
||||
},
|
||||
websocket: {
|
||||
// TypeScript: specify the type of ws.data like this
|
||||
data: {} as WebSocketData,
|
||||
|
||||
// handler called when a message is received
|
||||
async message(ws, message) {
|
||||
// ws.data is now properly typed as WebSocketData
|
||||
const user = getUserFromToken(ws.data.authToken);
|
||||
|
||||
await saveMessageToDatabase({
|
||||
@@ -150,10 +145,6 @@ Bun.serve({
|
||||
});
|
||||
```
|
||||
|
||||
{% callout %}
|
||||
**Note:** Previously, you could specify the type of `ws.data` using a type parameter on `Bun.serve`, like `Bun.serve<MyData>({...})`. This pattern was removed due to [a limitation in TypeScript](https://github.com/microsoft/TypeScript/issues/26242) in favor of the `data` property shown above.
|
||||
{% /callout %}
|
||||
|
||||
To connect to this server from the browser, create a new `WebSocket`.
|
||||
|
||||
```ts#browser.js
|
||||
@@ -173,7 +164,7 @@ socket.addEventListener("message", event => {
|
||||
Bun's `ServerWebSocket` implementation implements a native publish-subscribe API for topic-based broadcasting. Individual sockets can `.subscribe()` to a topic (specified with a string identifier) and `.publish()` messages to all other subscribers to that topic (excluding itself). This topic-based broadcast API is similar to [MQTT](https://en.wikipedia.org/wiki/MQTT) and [Redis Pub/Sub](https://redis.io/topics/pubsub).
|
||||
|
||||
```ts
|
||||
const server = Bun.serve({
|
||||
const server = Bun.serve<{ username: string }>({
|
||||
fetch(req, server) {
|
||||
const url = new URL(req.url);
|
||||
if (url.pathname === "/chat") {
|
||||
@@ -188,9 +179,6 @@ const server = Bun.serve({
|
||||
return new Response("Hello world");
|
||||
},
|
||||
websocket: {
|
||||
// TypeScript: specify the type of ws.data like this
|
||||
data: {} as { username: string },
|
||||
|
||||
open(ws) {
|
||||
const msg = `${ws.data.username} has entered the chat`;
|
||||
ws.subscribe("the-group-chat");
|
||||
|
||||
@@ -586,41 +586,12 @@ Codesign support requires Bun v1.2.4 or newer.
|
||||
|
||||
{% /callout %}
|
||||
|
||||
## Code splitting
|
||||
|
||||
Standalone executables support code splitting. Use `--compile` with `--splitting` to create an executable that loads code-split chunks at runtime.
|
||||
|
||||
```bash
|
||||
$ bun build --compile --splitting ./src/entry.ts --outdir ./build
|
||||
```
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
```ts#src/entry.ts
|
||||
console.log("Entrypoint loaded");
|
||||
const lazy = await import("./lazy.ts");
|
||||
lazy.hello();
|
||||
```
|
||||
|
||||
```ts#src/lazy.ts
|
||||
export function hello() {
|
||||
console.log("Lazy module loaded");
|
||||
}
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
```bash
|
||||
$ ./build/entry
|
||||
Entrypoint loaded
|
||||
Lazy module loaded
|
||||
```
|
||||
|
||||
## Unsupported CLI arguments
|
||||
|
||||
Currently, the `--compile` flag can only accept a single entrypoint at a time and does not support the following flags:
|
||||
|
||||
- `--outdir` — use `outfile` instead (except when using with `--splitting`).
|
||||
- `--outdir` — use `outfile` instead.
|
||||
- `--splitting`
|
||||
- `--public-path`
|
||||
- `--target=node` or `--target=browser`
|
||||
- `--no-bundle` - we always bundle everything into the executable.
|
||||
|
||||
@@ -1600,7 +1600,7 @@ interface BuildConfig {
|
||||
publicPath?: string;
|
||||
define?: Record<string, string>;
|
||||
loader?: { [k in string]: Loader };
|
||||
sourcemap?: "none" | "linked" | "inline" | "external" | boolean; // default: "none", true -> "inline"
|
||||
sourcemap?: "none" | "linked" | "inline" | "external" | "linked" | boolean; // default: "none", true -> "inline"
|
||||
/**
|
||||
* package.json `exports` conditions used when resolving imports
|
||||
*
|
||||
|
||||
@@ -2,29 +2,20 @@ Scaffold an empty Bun project with the interactive `bun init` command.
|
||||
|
||||
```bash
|
||||
$ bun init
|
||||
bun init helps you get started with a minimal project and tries to
|
||||
guess sensible defaults. Press ^C anytime to quit.
|
||||
|
||||
? Select a project template - Press return to submit.
|
||||
❯ Blank
|
||||
React
|
||||
Library
|
||||
package name (quickstart):
|
||||
entry point (index.ts):
|
||||
|
||||
✓ Select a project template: Blank
|
||||
|
||||
+ .gitignore
|
||||
+ index.ts
|
||||
+ tsconfig.json (for editor autocomplete)
|
||||
+ README.md
|
||||
Done! A package.json file was saved in the current directory.
|
||||
+ index.ts
|
||||
+ .gitignore
|
||||
+ tsconfig.json (for editor auto-complete)
|
||||
+ README.md
|
||||
|
||||
To get started, run:
|
||||
|
||||
bun run index.ts
|
||||
|
||||
bun install v$BUN_LATEST_VERSION
|
||||
|
||||
+ @types/bun@$BUN_LATEST_VERSION
|
||||
+ typescript@5.9.2
|
||||
|
||||
7 packages installed
|
||||
bun run index.ts
|
||||
```
|
||||
|
||||
Press `enter` to accept the default answer for each prompt, or pass the `-y` flag to auto-accept the defaults.
|
||||
|
||||
@@ -221,38 +221,6 @@ Bun uses a global cache at `~/.bun/install/cache/` to minimize disk usage. Packa
|
||||
|
||||
For complete documentation refer to [Package manager > Global cache](https://bun.com/docs/install/cache).
|
||||
|
||||
## Minimum release age
|
||||
|
||||
To protect against supply chain attacks where malicious packages are quickly published, you can configure a minimum age requirement for npm packages. Package versions published more recently than the specified threshold (in seconds) will be filtered out during installation.
|
||||
|
||||
```bash
|
||||
# Only install package versions published at least 3 days ago
|
||||
$ bun add @types/bun --minimum-release-age 259200 # seconds
|
||||
```
|
||||
|
||||
You can also configure this in `bunfig.toml`:
|
||||
|
||||
```toml
|
||||
[install]
|
||||
# Only install package versions published at least 3 days ago
|
||||
minimumReleaseAge = 259200 # seconds
|
||||
|
||||
# Exclude trusted packages from the age gate
|
||||
minimumReleaseAgeExcludes = ["@types/node", "typescript"]
|
||||
```
|
||||
|
||||
When the minimum age filter is active:
|
||||
|
||||
- Only affects new package resolution - existing packages in `bun.lock` remain unchanged
|
||||
- All dependencies (direct and transitive) are filtered to meet the age requirement when being resolved
|
||||
- When versions are blocked by the age gate, a stability check detects rapid bugfix patterns
|
||||
- If multiple versions were published close together just outside your age gate, it extends the filter to skip those potentially unstable versions and selects an older, more mature version
|
||||
- Searches up to 7 days after the age gate, however if still finding rapid releases it ignores stability check
|
||||
- Exact version requests (like `package@1.1.1`) still respect the age gate but bypass the stability check
|
||||
- Versions without a `time` field are treated as passing the age check (npm registry should always provide timestamps)
|
||||
|
||||
For more advanced security scanning, including integration with services & custom filtering, see [Package manager > Security Scanner API](https://bun.com/docs/install/security-scanner-api).
|
||||
|
||||
## Configuration
|
||||
|
||||
The default behavior of `bun install` can be configured in `bunfig.toml`. The default values are shown below.
|
||||
@@ -287,10 +255,6 @@ concurrentScripts = 16 # (cpu count or GOMAXPROCS) x2
|
||||
# installation strategy: "hoisted" or "isolated"
|
||||
# default: "hoisted"
|
||||
linker = "hoisted"
|
||||
|
||||
# minimum age config
|
||||
minimumReleaseAge = 259200 # seconds
|
||||
minimumReleaseAgeExcludes = ["@types/node", "typescript"]
|
||||
```
|
||||
|
||||
## CI/CD
|
||||
|
||||
@@ -84,12 +84,14 @@ $ bun publish --dry-run
|
||||
|
||||
### `--tolerate-republish`
|
||||
|
||||
Exit with code 0 instead of 1 if the package version already exists. Useful in CI/CD where jobs may be re-run.
|
||||
The `--tolerate-republish` flag makes `bun publish` exit with code 0 instead of code 1 when attempting to republish over an existing version number. This is useful in automated workflows where republishing the same version might occur and should not be treated as an error.
|
||||
|
||||
```sh
|
||||
$ bun publish --tolerate-republish
|
||||
```
|
||||
|
||||
Without this flag, attempting to publish a version that already exists will result in an error and exit code 1. With this flag, the command will exit successfully even when trying to republish an existing version.
|
||||
|
||||
### `--gzip-level`
|
||||
|
||||
Specify the level of gzip compression to use when packing the package. Only applies to `bun publish` without a tarball path argument. Values range from `0` to `9` (default is `9`).
|
||||
|
||||
@@ -257,13 +257,12 @@ $ bun test --watch
|
||||
|
||||
Bun supports the following lifecycle hooks:
|
||||
|
||||
| Hook | Description |
|
||||
| ---------------- | -------------------------------------------------------- |
|
||||
| `beforeAll` | Runs once before all tests. |
|
||||
| `beforeEach` | Runs before each test. |
|
||||
| `afterEach` | Runs after each test. |
|
||||
| `afterAll` | Runs once after all tests. |
|
||||
| `onTestFinished` | Runs after a test finishes, including after `afterEach`. |
|
||||
| Hook | Description |
|
||||
| ------------ | --------------------------- |
|
||||
| `beforeAll` | Runs once before all tests. |
|
||||
| `beforeEach` | Runs before each test. |
|
||||
| `afterEach` | Runs after each test. |
|
||||
| `afterAll` | Runs once after all tests. |
|
||||
|
||||
These hooks can be defined inside test files, or in a separate file that is preloaded with the `--preload` flag.
|
||||
|
||||
|
||||
@@ -24,8 +24,8 @@ import { watch } from "fs";
|
||||
const watcher = watch(
|
||||
import.meta.dir,
|
||||
{ recursive: true },
|
||||
(event, relativePath) => {
|
||||
console.log(`Detected ${event} in ${relativePath}`);
|
||||
(event, filename) => {
|
||||
console.log(`Detected ${event} in ${filename}`);
|
||||
},
|
||||
);
|
||||
```
|
||||
|
||||
@@ -7,7 +7,7 @@ When building a WebSocket server, it's typically necessary to store some identif
|
||||
With [Bun.serve()](https://bun.com/docs/api/websockets#contextual-data), this "contextual data" is set when the connection is initially upgraded by passing a `data` parameter in the `server.upgrade()` call.
|
||||
|
||||
```ts
|
||||
Bun.serve({
|
||||
Bun.serve<{ socketId: number }>({
|
||||
fetch(req, server) {
|
||||
const success = server.upgrade(req, {
|
||||
data: {
|
||||
@@ -20,9 +20,6 @@ Bun.serve({
|
||||
// ...
|
||||
},
|
||||
websocket: {
|
||||
// TypeScript: specify the type of ws.data like this
|
||||
data: {} as { socketId: number },
|
||||
|
||||
// define websocket handlers
|
||||
async message(ws, message) {
|
||||
// the contextual data is available as the `data` property
|
||||
@@ -44,7 +41,8 @@ type WebSocketData = {
|
||||
userId: string;
|
||||
};
|
||||
|
||||
Bun.serve({
|
||||
// TypeScript: specify the type of `data`
|
||||
Bun.serve<WebSocketData>({
|
||||
async fetch(req, server) {
|
||||
// use a library to parse cookies
|
||||
const cookies = parseCookies(req.headers.get("Cookie"));
|
||||
@@ -62,9 +60,6 @@ Bun.serve({
|
||||
if (upgraded) return undefined;
|
||||
},
|
||||
websocket: {
|
||||
// TypeScript: specify the type of ws.data like this
|
||||
data: {} as WebSocketData,
|
||||
|
||||
async message(ws, message) {
|
||||
// save the message to a database
|
||||
await saveMessageToDatabase({
|
||||
|
||||
@@ -7,7 +7,7 @@ Bun's server-side `WebSocket` API provides a native pub-sub API. Sockets can be
|
||||
This code snippet implements a simple single-channel chat server.
|
||||
|
||||
```ts
|
||||
const server = Bun.serve({
|
||||
const server = Bun.serve<{ username: string }>({
|
||||
fetch(req, server) {
|
||||
const cookies = req.headers.get("cookie");
|
||||
const username = getUsernameFromCookies(cookies);
|
||||
@@ -17,9 +17,6 @@ const server = Bun.serve({
|
||||
return new Response("Hello world");
|
||||
},
|
||||
websocket: {
|
||||
// TypeScript: specify the type of ws.data like this
|
||||
data: {} as { username: string },
|
||||
|
||||
open(ws) {
|
||||
const msg = `${ws.data.username} has entered the chat`;
|
||||
ws.subscribe("the-group-chat");
|
||||
|
||||
@@ -7,7 +7,7 @@ Start a simple WebSocket server using [`Bun.serve`](https://bun.com/docs/api/htt
|
||||
Inside `fetch`, we attempt to upgrade incoming `ws:` or `wss:` requests to WebSocket connections.
|
||||
|
||||
```ts
|
||||
const server = Bun.serve({
|
||||
const server = Bun.serve<{ authToken: string }>({
|
||||
fetch(req, server) {
|
||||
const success = server.upgrade(req);
|
||||
if (success) {
|
||||
|
||||
@@ -89,12 +89,6 @@ $ bun install --linker isolated
|
||||
|
||||
Isolated installs create strict dependency isolation similar to pnpm, preventing phantom dependencies and ensuring more deterministic builds. For complete documentation, see [Isolated installs](https://bun.com/docs/install/isolated).
|
||||
|
||||
To protect against supply chain attacks, set a minimum age (in seconds) for package versions:
|
||||
|
||||
```bash
|
||||
$ bun install --minimum-release-age 259200 # 3 days
|
||||
```
|
||||
|
||||
{% details summary="Configuring behavior" %}
|
||||
The default behavior of `bun install` can be configured in `bunfig.toml`:
|
||||
|
||||
@@ -128,12 +122,6 @@ concurrentScripts = 16 # (cpu count or GOMAXPROCS) x2
|
||||
# installation strategy: "hoisted" or "isolated"
|
||||
# default: "hoisted"
|
||||
linker = "hoisted"
|
||||
|
||||
# minimum package age in seconds (protects against supply chain attacks)
|
||||
minimumReleaseAge = 259200 # 3 days
|
||||
|
||||
# exclude packages from age requirement
|
||||
minimumReleaseAgeExcludes = ["@types/node", "typescript"]
|
||||
```
|
||||
|
||||
{% /details %}
|
||||
|
||||
@@ -36,10 +36,7 @@ linker = "isolated"
|
||||
|
||||
### Default behavior
|
||||
|
||||
- **Workspaces**: Bun uses **isolated** installs by default to prevent hoisting-related bugs
|
||||
- **Single projects**: Bun uses **hoisted** installs by default
|
||||
|
||||
To override the default, use `--linker hoisted` or `--linker isolated`, or set it in your configuration file.
|
||||
By default, Bun uses the **hoisted** installation strategy for all projects. To use isolated installs, you must explicitly specify the `--linker isolated` flag or set it in your configuration file.
|
||||
|
||||
## How isolated installs work
|
||||
|
||||
@@ -177,13 +174,14 @@ The main difference is that Bun uses symlinks in `node_modules` while pnpm uses
|
||||
|
||||
## When to use isolated installs
|
||||
|
||||
**Isolated installs are the default for workspaces.** You may want to explicitly enable them for single projects when:
|
||||
**Use isolated installs when:**
|
||||
|
||||
- Working in monorepos with multiple packages
|
||||
- Strict dependency management is required
|
||||
- Preventing phantom dependencies is important
|
||||
- Building libraries that need deterministic dependencies
|
||||
|
||||
**Switch to hoisted installs (including for workspaces) when:**
|
||||
**Use hoisted installs when:**
|
||||
|
||||
- Working with legacy code that assumes flat `node_modules`
|
||||
- Compatibility with existing build tools is required
|
||||
|
||||
@@ -38,21 +38,9 @@ In the root `package.json`, the `"workspaces"` key is used to indicate which sub
|
||||
```
|
||||
|
||||
{% callout %}
|
||||
**Glob support** — Bun supports full glob syntax in `"workspaces"`, including negative patterns (e.g. `!**/excluded/**`). See [here](https://bun.com/docs/api/glob#supported-glob-patterns) for a comprehensive list of supported syntax.
|
||||
**Glob support** — Bun supports full glob syntax in `"workspaces"` (see [here](https://bun.com/docs/api/glob#supported-glob-patterns) for a comprehensive list of supported syntax), _except_ for exclusions (e.g. `!**/excluded/**`), which are not implemented yet.
|
||||
{% /callout %}
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "my-project",
|
||||
"version": "1.0.0",
|
||||
"workspaces": [
|
||||
"packages/**",
|
||||
"!packages/**/test/**",
|
||||
"!packages/**/template/**"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
Each workspace has it's own `package.json`. When referencing other packages in the monorepo, semver or workspace protocols (e.g. `workspace:*`) can be used as the version field in your `package.json`.
|
||||
|
||||
```json
|
||||
|
||||
@@ -9,29 +9,20 @@ Run `bun init` to scaffold a new project. It's an interactive tool; for this tut
|
||||
|
||||
```bash
|
||||
$ bun init
|
||||
bun init helps you get started with a minimal project and tries to
|
||||
guess sensible defaults. Press ^C anytime to quit.
|
||||
|
||||
? Select a project template - Press return to submit.
|
||||
❯ Blank
|
||||
React
|
||||
Library
|
||||
package name (quickstart):
|
||||
entry point (index.ts):
|
||||
|
||||
✓ Select a project template: Blank
|
||||
|
||||
+ .gitignore
|
||||
+ index.ts
|
||||
+ tsconfig.json (for editor autocomplete)
|
||||
+ README.md
|
||||
Done! A package.json file was saved in the current directory.
|
||||
+ index.ts
|
||||
+ .gitignore
|
||||
+ tsconfig.json (for editor auto-complete)
|
||||
+ README.md
|
||||
|
||||
To get started, run:
|
||||
|
||||
bun run index.ts
|
||||
|
||||
bun install v$BUN_LATEST_VERSION
|
||||
|
||||
+ @types/bun@$BUN_LATEST_VERSION
|
||||
+ typescript@5.9.2
|
||||
|
||||
7 packages installed
|
||||
bun run index.ts
|
||||
```
|
||||
|
||||
Since our entry point is a `*.ts` file, Bun generates a `tsconfig.json` for you. If you're using plain JavaScript, it will generate a [`jsconfig.json`](https://code.visualstudio.com/docs/languages/jsconfig) instead.
|
||||
@@ -97,15 +88,11 @@ Bun can also execute `"scripts"` from your `package.json`. Add the following scr
|
||||
"name": "quickstart",
|
||||
"module": "index.ts",
|
||||
"type": "module",
|
||||
"private": true,
|
||||
+ "scripts": {
|
||||
+ "start": "bun run index.ts"
|
||||
+ },
|
||||
"devDependencies": {
|
||||
"@types/bun": "latest"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": "^5"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
@@ -249,81 +249,6 @@ This is useful for:
|
||||
|
||||
The `--concurrent` CLI flag will override this setting when specified.
|
||||
|
||||
### `test.onlyFailures`
|
||||
|
||||
When enabled, only failed tests are displayed in the output. This helps reduce noise in large test suites by hiding passing tests. Default `false`.
|
||||
|
||||
```toml
|
||||
[test]
|
||||
onlyFailures = true
|
||||
```
|
||||
|
||||
This is equivalent to using the `--only-failures` flag when running `bun test`.
|
||||
|
||||
### `test.reporter`
|
||||
|
||||
Configure the test reporter settings.
|
||||
|
||||
#### `test.reporter.dots`
|
||||
|
||||
Enable the dots reporter, which displays a compact output showing a dot for each test. Default `false`.
|
||||
|
||||
```toml
|
||||
[test.reporter]
|
||||
dots = true
|
||||
```
|
||||
|
||||
#### `test.reporter.junit`
|
||||
|
||||
Enable JUnit XML reporting and specify the output file path.
|
||||
|
||||
```toml
|
||||
[test.reporter]
|
||||
junit = "test-results.xml"
|
||||
```
|
||||
|
||||
This generates a JUnit XML report that can be consumed by CI systems and other tools.
|
||||
|
||||
### `test.randomize`
|
||||
|
||||
Run tests in random order. Default `false`.
|
||||
|
||||
```toml
|
||||
[test]
|
||||
randomize = true
|
||||
```
|
||||
|
||||
This helps catch bugs related to test interdependencies by running tests in a different order each time. When combined with `seed`, the random order becomes reproducible.
|
||||
|
||||
The `--randomize` CLI flag will override this setting when specified.
|
||||
|
||||
### `test.seed`
|
||||
|
||||
Set the random seed for test randomization. This option requires `randomize` to be `true`.
|
||||
|
||||
```toml
|
||||
[test]
|
||||
randomize = true
|
||||
seed = 2444615283
|
||||
```
|
||||
|
||||
Using a seed makes the randomized test order reproducible across runs, which is useful for debugging flaky tests. When you encounter a test failure with randomization enabled, you can use the same seed to reproduce the exact test order.
|
||||
|
||||
The `--seed` CLI flag will override this setting when specified.
|
||||
|
||||
### `test.rerunEach`
|
||||
|
||||
Re-run each test file a specified number of times. Default `0` (run once).
|
||||
|
||||
```toml
|
||||
[test]
|
||||
rerunEach = 3
|
||||
```
|
||||
|
||||
This is useful for catching flaky tests or non-deterministic behavior. Each test file will be executed the specified number of times.
|
||||
|
||||
The `--rerun-each` CLI flag will override this setting when specified.
|
||||
|
||||
## Package manager
|
||||
|
||||
Package management is a complex issue; to support a range of use cases, the behavior of `bun install` can be configured under the `[install]` section.
|
||||
@@ -645,20 +570,6 @@ Valid values are:
|
||||
|
||||
{% /table %}
|
||||
|
||||
### `install.minimumReleaseAge`
|
||||
|
||||
Configure a minimum age (in seconds) for npm package versions. Package versions published more recently than this threshold will be filtered out during installation. Default is `null` (disabled).
|
||||
|
||||
```toml
|
||||
[install]
|
||||
# Only install package versions published at least 3 days ago
|
||||
minimumReleaseAge = 259200
|
||||
# These packages will bypass the 3-day minimum age requirement
|
||||
minimumReleaseAgeExcludes = ["@types/bun", "typescript"]
|
||||
```
|
||||
|
||||
For more details see [Minimum release age](https://bun.com/docs/cli/install#minimum-release-age) in the install documentation.
|
||||
|
||||
<!-- ## Debugging -->
|
||||
|
||||
<!--
|
||||
@@ -686,7 +597,7 @@ editor = "code"
|
||||
|
||||
The `bun run` command can be configured under the `[run]` section. These apply to the `bun run` command and the `bun` command when running a file or executable or script.
|
||||
|
||||
Currently, `bunfig.toml` is only automatically loaded for `bun run` in a local project (it doesn't check for a global `.bunfig.toml`).
|
||||
Currently, `bunfig.toml` isn't always automatically loaded for `bun run` in a local project (it does check for a global `bunfig.toml`), so you might still need to pass `-c` or `-c=bunfig.toml` to use these settings.
|
||||
|
||||
### `run.shell` - use the system shell or Bun's shell
|
||||
|
||||
|
||||
@@ -174,29 +174,6 @@ import { stuff } from "foo";
|
||||
|
||||
The full specification of this algorithm are officially documented in the [Node.js documentation](https://nodejs.org/api/modules.html); we won't rehash it here. Briefly: if you import `from "foo"`, Bun scans up the file system for a `node_modules` directory containing the package `foo`.
|
||||
|
||||
### NODE_PATH
|
||||
|
||||
Bun supports `NODE_PATH` for additional module resolution directories:
|
||||
|
||||
```bash
|
||||
NODE_PATH=./packages bun run src/index.js
|
||||
```
|
||||
|
||||
```ts
|
||||
// packages/foo/index.js
|
||||
export const hello = "world";
|
||||
|
||||
// src/index.js
|
||||
import { hello } from "foo";
|
||||
```
|
||||
|
||||
Multiple paths use the platform's delimiter (`:` on Unix, `;` on Windows):
|
||||
|
||||
```bash
|
||||
NODE_PATH=./packages:./lib bun run src/index.js # Unix/macOS
|
||||
NODE_PATH=./packages;./lib bun run src/index.js # Windows
|
||||
```
|
||||
|
||||
Once it finds the `foo` package, Bun reads the `package.json` to determine how the package should be imported. To determine the package's entrypoint, Bun first reads the `exports` field and checks for the following conditions.
|
||||
|
||||
```jsonc#package.json
|
||||
|
||||
@@ -65,34 +65,6 @@ Test files matching this pattern will behave as if the `--concurrent` flag was p
|
||||
|
||||
The `--concurrent` CLI flag will override this setting when specified, forcing all tests to run concurrently regardless of the glob pattern.
|
||||
|
||||
#### randomize
|
||||
|
||||
Run tests in random order to identify tests with hidden dependencies:
|
||||
|
||||
```toml
|
||||
[test]
|
||||
randomize = true
|
||||
```
|
||||
|
||||
#### seed
|
||||
|
||||
Specify a seed for reproducible random test order. Requires `randomize = true`:
|
||||
|
||||
```toml
|
||||
[test]
|
||||
randomize = true
|
||||
seed = 2444615283
|
||||
```
|
||||
|
||||
#### rerunEach
|
||||
|
||||
Re-run each test file multiple times to identify flaky tests:
|
||||
|
||||
```toml
|
||||
[test]
|
||||
rerunEach = 3
|
||||
```
|
||||
|
||||
### Coverage options
|
||||
|
||||
In addition to the options documented in the [coverage documentation](./coverage.md), the following options are available:
|
||||
|
||||
@@ -34,15 +34,6 @@ test/package-json-lint.test.ts:
|
||||
Ran 4 tests across 1 files. [0.66ms]
|
||||
```
|
||||
|
||||
### Dots Reporter
|
||||
|
||||
The dots reporter shows `.` for passing tests and `F` for failures—useful for large test suites.
|
||||
|
||||
```sh
|
||||
$ bun test --dots
|
||||
$ bun test --reporter=dots
|
||||
```
|
||||
|
||||
### JUnit XML Reporter
|
||||
|
||||
For CI/CD environments, Bun supports generating JUnit XML reports. JUnit XML is a widely-adopted format for test results that can be parsed by many CI/CD systems, including GitLab, Jenkins, and others.
|
||||
|
||||
61
flake.lock
generated
61
flake.lock
generated
@@ -1,61 +0,0 @@
|
||||
{
|
||||
"nodes": {
|
||||
"flake-utils": {
|
||||
"inputs": {
|
||||
"systems": "systems"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1731533236,
|
||||
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1759831965,
|
||||
"narHash": "sha256-vgPm2xjOmKdZ0xKA6yLXPJpjOtQPHfaZDRtH+47XEBo=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "c9b6fb798541223bbb396d287d16f43520250518",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-unstable",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"flake-utils": "flake-utils",
|
||||
"nixpkgs": "nixpkgs"
|
||||
}
|
||||
},
|
||||
"systems": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
||||
175
flake.nix
175
flake.nix
@@ -1,175 +0,0 @@
|
||||
{
|
||||
description = "Bun - A fast all-in-one JavaScript runtime";
|
||||
|
||||
# Uncomment this when you set up Cachix to enable automatic binary cache
|
||||
# nixConfig = {
|
||||
# extra-substituters = [
|
||||
# "https://bun-dev.cachix.org"
|
||||
# ];
|
||||
# extra-trusted-public-keys = [
|
||||
# "bun-dev.cachix.org-1:REPLACE_WITH_YOUR_PUBLIC_KEY"
|
||||
# ];
|
||||
# };
|
||||
|
||||
inputs = {
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
|
||||
flake-utils.url = "github:numtide/flake-utils";
|
||||
};
|
||||
|
||||
outputs = { self, nixpkgs, flake-utils }:
|
||||
flake-utils.lib.eachDefaultSystem (system:
|
||||
let
|
||||
pkgs = import nixpkgs {
|
||||
inherit system;
|
||||
config = {
|
||||
allowUnfree = true;
|
||||
};
|
||||
};
|
||||
|
||||
# LLVM 19 - matching the bootstrap script (targets 19.1.7, actual version from nixpkgs-unstable)
|
||||
llvm = pkgs.llvm_19;
|
||||
clang = pkgs.clang_19;
|
||||
lld = pkgs.lld_19;
|
||||
|
||||
# Node.js 24 - matching the bootstrap script (targets 24.3.0, actual version from nixpkgs-unstable)
|
||||
nodejs = pkgs.nodejs_24;
|
||||
|
||||
# Build tools and dependencies
|
||||
packages = [
|
||||
# Core build tools
|
||||
pkgs.cmake # Expected: 3.30+ on nixos-unstable as of 2025-10
|
||||
pkgs.ninja
|
||||
pkgs.pkg-config
|
||||
pkgs.ccache
|
||||
|
||||
# Compilers and toolchain - version pinned to LLVM 19
|
||||
clang
|
||||
llvm
|
||||
lld
|
||||
pkgs.gcc
|
||||
pkgs.rustc
|
||||
pkgs.cargo
|
||||
pkgs.go
|
||||
|
||||
# Bun itself (for running build scripts via `bun bd`)
|
||||
pkgs.bun
|
||||
|
||||
# Node.js - version pinned to 24
|
||||
nodejs
|
||||
|
||||
# Python for build scripts
|
||||
pkgs.python3
|
||||
|
||||
# Other build dependencies from bootstrap.sh
|
||||
pkgs.libtool
|
||||
pkgs.ruby
|
||||
pkgs.perl
|
||||
|
||||
# Libraries
|
||||
pkgs.openssl
|
||||
pkgs.zlib
|
||||
pkgs.libxml2
|
||||
pkgs.libiconv
|
||||
|
||||
# Development tools
|
||||
pkgs.git
|
||||
pkgs.curl
|
||||
pkgs.wget
|
||||
pkgs.unzip
|
||||
pkgs.xz
|
||||
|
||||
# Additional dependencies for Linux
|
||||
] ++ pkgs.lib.optionals pkgs.stdenv.isLinux [
|
||||
pkgs.gdb # for debugging core dumps (from bootstrap.sh line 1535)
|
||||
|
||||
# Chromium dependencies for Puppeteer testing (from bootstrap.sh lines 1397-1483)
|
||||
# X11 and graphics libraries
|
||||
pkgs.xorg.libX11
|
||||
pkgs.xorg.libxcb
|
||||
pkgs.xorg.libXcomposite
|
||||
pkgs.xorg.libXcursor
|
||||
pkgs.xorg.libXdamage
|
||||
pkgs.xorg.libXext
|
||||
pkgs.xorg.libXfixes
|
||||
pkgs.xorg.libXi
|
||||
pkgs.xorg.libXrandr
|
||||
pkgs.xorg.libXrender
|
||||
pkgs.xorg.libXScrnSaver
|
||||
pkgs.xorg.libXtst
|
||||
pkgs.libxkbcommon
|
||||
pkgs.mesa
|
||||
pkgs.nspr
|
||||
pkgs.nss
|
||||
pkgs.cups
|
||||
pkgs.dbus
|
||||
pkgs.expat
|
||||
pkgs.fontconfig
|
||||
pkgs.freetype
|
||||
pkgs.glib
|
||||
pkgs.gtk3
|
||||
pkgs.pango
|
||||
pkgs.cairo
|
||||
pkgs.alsa-lib
|
||||
pkgs.at-spi2-atk
|
||||
pkgs.at-spi2-core
|
||||
pkgs.libgbm # for hardware acceleration
|
||||
pkgs.liberation_ttf # fonts-liberation
|
||||
pkgs.atk
|
||||
pkgs.libdrm
|
||||
pkgs.xorg.libxshmfence
|
||||
pkgs.gdk-pixbuf
|
||||
] ++ pkgs.lib.optionals pkgs.stdenv.isDarwin [
|
||||
# macOS specific dependencies
|
||||
pkgs.darwin.apple_sdk.frameworks.CoreFoundation
|
||||
pkgs.darwin.apple_sdk.frameworks.CoreServices
|
||||
pkgs.darwin.apple_sdk.frameworks.Security
|
||||
];
|
||||
|
||||
in
|
||||
{
|
||||
devShells.default = (pkgs.mkShell.override {
|
||||
stdenv = pkgs.clangStdenv;
|
||||
}) {
|
||||
inherit packages;
|
||||
|
||||
shellHook = ''
|
||||
# Set up build environment
|
||||
export CC="${pkgs.lib.getExe clang}"
|
||||
export CXX="${pkgs.lib.getExe' clang "clang++"}"
|
||||
export AR="${llvm}/bin/llvm-ar"
|
||||
export RANLIB="${llvm}/bin/llvm-ranlib"
|
||||
export CMAKE_C_COMPILER="$CC"
|
||||
export CMAKE_CXX_COMPILER="$CXX"
|
||||
export CMAKE_AR="$AR"
|
||||
export CMAKE_RANLIB="$RANLIB"
|
||||
export CMAKE_SYSTEM_PROCESSOR="$(uname -m)"
|
||||
export TMPDIR="''${TMPDIR:-/tmp}"
|
||||
'' + pkgs.lib.optionalString pkgs.stdenv.isLinux ''
|
||||
export LD="${pkgs.lib.getExe' lld "ld.lld"}"
|
||||
export NIX_CFLAGS_LINK="''${NIX_CFLAGS_LINK:+$NIX_CFLAGS_LINK }-fuse-ld=lld"
|
||||
export LD_LIBRARY_PATH="${pkgs.lib.makeLibraryPath packages}''${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH}"
|
||||
'' + ''
|
||||
|
||||
# Print welcome message
|
||||
echo "====================================="
|
||||
echo "Bun Development Environment"
|
||||
echo "====================================="
|
||||
echo "Node.js: $(node --version 2>/dev/null || echo 'not found')"
|
||||
echo "Bun: $(bun --version 2>/dev/null || echo 'not found')"
|
||||
echo "Clang: $(clang --version 2>/dev/null | head -n1 || echo 'not found')"
|
||||
echo "CMake: $(cmake --version 2>/dev/null | head -n1 || echo 'not found')"
|
||||
echo "LLVM: ${llvm.version}"
|
||||
echo ""
|
||||
echo "Quick start:"
|
||||
echo " bun bd # Build debug binary"
|
||||
echo " bun bd test <test-file> # Run tests"
|
||||
echo "====================================="
|
||||
'';
|
||||
|
||||
# Additional environment variables
|
||||
CMAKE_BUILD_TYPE = "Debug";
|
||||
ENABLE_CCACHE = "1";
|
||||
};
|
||||
}
|
||||
);
|
||||
}
|
||||
@@ -8,8 +8,6 @@
|
||||
# Thread::initializePlatformThreading() in ThreadingPOSIX.cpp) to the JS thread to suspend or resume
|
||||
# it. So stopping the process would just create noise when debugging any long-running script.
|
||||
process handle -p true -s false -n false SIGPWR
|
||||
process handle -p true -s false -n false SIGUSR1
|
||||
process handle -p true -s false -n false SIGUSR2
|
||||
|
||||
command script import -c lldb_pretty_printers.py
|
||||
type category enable zig.lang
|
||||
|
||||
15
package.json
15
package.json
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "bun",
|
||||
"version": "1.3.2",
|
||||
"version": "1.2.24",
|
||||
"workspaces": [
|
||||
"./packages/bun-types",
|
||||
"./packages/@types/bun"
|
||||
@@ -11,14 +11,14 @@
|
||||
"@lezer/cpp": "^1.1.3",
|
||||
"@types/bun": "workspace:*",
|
||||
"bun-tracestrings": "github:oven-sh/bun.report#912ca63e26c51429d3e6799aa2a6ab079b188fd8",
|
||||
"esbuild": "^0.21.5",
|
||||
"mitata": "^0.1.14",
|
||||
"esbuild": "^0.21.4",
|
||||
"mitata": "^0.1.11",
|
||||
"peechy": "0.4.34",
|
||||
"prettier": "^3.6.2",
|
||||
"prettier-plugin-organize-imports": "^4.3.0",
|
||||
"prettier": "^3.5.3",
|
||||
"prettier-plugin-organize-imports": "^4.0.0",
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"source-map-js": "^1.2.1",
|
||||
"source-map-js": "^1.2.0",
|
||||
"typescript": "5.9.2"
|
||||
},
|
||||
"resolutions": {
|
||||
@@ -86,10 +86,9 @@
|
||||
"clean:zig": "rm -rf build/debug/cache/zig build/debug/CMakeCache.txt 'build/debug/*.o' .zig-cache zig-out || true",
|
||||
"machine:linux:ubuntu": "./scripts/machine.mjs ssh --cloud=aws --arch=x64 --instance-type c7i.2xlarge --os=linux --distro=ubuntu --release=25.04",
|
||||
"machine:linux:debian": "./scripts/machine.mjs ssh --cloud=aws --arch=x64 --instance-type c7i.2xlarge --os=linux --distro=debian --release=12",
|
||||
"machine:linux:alpine": "./scripts/machine.mjs ssh --cloud=aws --arch=x64 --instance-type c7i.2xlarge --os=linux --distro=alpine --release=3.22",
|
||||
"machine:linux:alpine": "./scripts/machine.mjs ssh --cloud=aws --arch=x64 --instance-type c7i.2xlarge --os=linux --distro=alpine --release=3.21",
|
||||
"machine:linux:amazonlinux": "./scripts/machine.mjs ssh --cloud=aws --arch=x64 --instance-type c7i.2xlarge --os=linux --distro=amazonlinux --release=2023",
|
||||
"machine:windows:2019": "./scripts/machine.mjs ssh --cloud=aws --arch=x64 --instance-type c7i.2xlarge --os=windows --release=2019",
|
||||
"machine:freebsd": "./scripts/machine.mjs ssh --cloud=aws --arch=x64 --instance-type c7i.large --os=freebsd --release=14.3",
|
||||
"sync-webkit-source": "bun ./scripts/sync-webkit-source.ts"
|
||||
}
|
||||
}
|
||||
|
||||
1471
packages/bun-types/bun.d.ts
vendored
1471
packages/bun-types/bun.d.ts
vendored
File diff suppressed because it is too large
Load Diff
2
packages/bun-types/bun.ns.d.ts
vendored
2
packages/bun-types/bun.ns.d.ts
vendored
@@ -3,3 +3,5 @@ import * as BunModule from "bun";
|
||||
declare global {
|
||||
export import Bun = BunModule;
|
||||
}
|
||||
|
||||
export {};
|
||||
|
||||
5
packages/bun-types/deprecated.d.ts
vendored
5
packages/bun-types/deprecated.d.ts
vendored
@@ -98,11 +98,6 @@ declare module "bun" {
|
||||
): void;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link Serve.Options Bun.Serve.Options<T, R>} instead
|
||||
*/
|
||||
type ServeOptions<T = undefined, R extends string = never> = Serve.Options<T, R>;
|
||||
|
||||
/** @deprecated Use {@link SQL.Query Bun.SQL.Query} */
|
||||
type SQLQuery<T = any> = SQL.Query<T>;
|
||||
|
||||
|
||||
76
packages/bun-types/globals.d.ts
vendored
76
packages/bun-types/globals.d.ts
vendored
@@ -7,13 +7,6 @@ declare module "bun" {
|
||||
type LibWorkerOrBunWorker = LibDomIsLoaded extends true ? {} : Bun.Worker;
|
||||
type LibEmptyOrBunWebSocket = LibDomIsLoaded extends true ? {} : Bun.WebSocket;
|
||||
|
||||
type LibEmptyOrNodeStreamWebCompressionStream = LibDomIsLoaded extends true
|
||||
? {}
|
||||
: import("node:stream/web").CompressionStream;
|
||||
type LibEmptyOrNodeStreamWebDecompressionStream = LibDomIsLoaded extends true
|
||||
? {}
|
||||
: import("node:stream/web").DecompressionStream;
|
||||
|
||||
type LibPerformanceOrNodePerfHooksPerformance = LibDomIsLoaded extends true ? {} : import("perf_hooks").Performance;
|
||||
type LibEmptyOrPerformanceEntry = LibDomIsLoaded extends true ? {} : import("node:perf_hooks").PerformanceEntry;
|
||||
type LibEmptyOrPerformanceMark = LibDomIsLoaded extends true ? {} : import("node:perf_hooks").PerformanceMark;
|
||||
@@ -278,30 +271,6 @@ declare var Event: {
|
||||
new (type: string, eventInitDict?: Bun.EventInit): Event;
|
||||
};
|
||||
|
||||
/**
|
||||
* Unimplemented in Bun
|
||||
*/
|
||||
interface CompressionStream extends Bun.__internal.LibEmptyOrNodeStreamWebCompressionStream {}
|
||||
/**
|
||||
* Unimplemented in Bun
|
||||
*/
|
||||
declare var CompressionStream: Bun.__internal.UseLibDomIfAvailable<
|
||||
"CompressionStream",
|
||||
typeof import("node:stream/web").CompressionStream
|
||||
>;
|
||||
|
||||
/**
|
||||
* Unimplemented in Bun
|
||||
*/
|
||||
interface DecompressionStream extends Bun.__internal.LibEmptyOrNodeStreamWebCompressionStream {}
|
||||
/**
|
||||
* Unimplemented in Bun
|
||||
*/
|
||||
declare var DecompressionStream: Bun.__internal.UseLibDomIfAvailable<
|
||||
"DecompressionStream",
|
||||
typeof import("node:stream/web").DecompressionStream
|
||||
>;
|
||||
|
||||
interface EventTarget {
|
||||
/**
|
||||
* Adds a new handler for the `type` event. Any given `listener` is added only once per `type` and per `capture` option value.
|
||||
@@ -891,10 +860,7 @@ interface ErrnoException extends Error {
|
||||
syscall?: string | undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* An abnormal event (called an exception) which occurs as a result of calling a
|
||||
* method or accessing a property of a web API
|
||||
*/
|
||||
/** An abnormal event (called an exception) which occurs as a result of calling a method or accessing a property of a web API. */
|
||||
interface DOMException extends Error {
|
||||
readonly message: string;
|
||||
readonly name: string;
|
||||
@@ -924,35 +890,11 @@ interface DOMException extends Error {
|
||||
readonly INVALID_NODE_TYPE_ERR: 24;
|
||||
readonly DATA_CLONE_ERR: 25;
|
||||
}
|
||||
declare var DOMException: {
|
||||
prototype: DOMException;
|
||||
new (message?: string, name?: string): DOMException;
|
||||
readonly INDEX_SIZE_ERR: 1;
|
||||
readonly DOMSTRING_SIZE_ERR: 2;
|
||||
readonly HIERARCHY_REQUEST_ERR: 3;
|
||||
readonly WRONG_DOCUMENT_ERR: 4;
|
||||
readonly INVALID_CHARACTER_ERR: 5;
|
||||
readonly NO_DATA_ALLOWED_ERR: 6;
|
||||
readonly NO_MODIFICATION_ALLOWED_ERR: 7;
|
||||
readonly NOT_FOUND_ERR: 8;
|
||||
readonly NOT_SUPPORTED_ERR: 9;
|
||||
readonly INUSE_ATTRIBUTE_ERR: 10;
|
||||
readonly INVALID_STATE_ERR: 11;
|
||||
readonly SYNTAX_ERR: 12;
|
||||
readonly INVALID_MODIFICATION_ERR: 13;
|
||||
readonly NAMESPACE_ERR: 14;
|
||||
readonly INVALID_ACCESS_ERR: 15;
|
||||
readonly VALIDATION_ERR: 16;
|
||||
readonly TYPE_MISMATCH_ERR: 17;
|
||||
readonly SECURITY_ERR: 18;
|
||||
readonly NETWORK_ERR: 19;
|
||||
readonly ABORT_ERR: 20;
|
||||
readonly URL_MISMATCH_ERR: 21;
|
||||
readonly QUOTA_EXCEEDED_ERR: 22;
|
||||
readonly TIMEOUT_ERR: 23;
|
||||
readonly INVALID_NODE_TYPE_ERR: 24;
|
||||
readonly DATA_CLONE_ERR: 25;
|
||||
};
|
||||
|
||||
// declare var DOMException: {
|
||||
// prototype: DOMException;
|
||||
// new (message?: string, name?: string): DOMException;
|
||||
// };
|
||||
|
||||
declare function alert(message?: string): void;
|
||||
declare function confirm(message?: string): boolean;
|
||||
@@ -1663,6 +1605,12 @@ declare var AbortSignal: Bun.__internal.UseLibDomIfAvailable<
|
||||
}
|
||||
>;
|
||||
|
||||
interface DOMException {}
|
||||
declare var DOMException: Bun.__internal.UseLibDomIfAvailable<
|
||||
"DOMException",
|
||||
{ prototype: DOMException; new (): DOMException }
|
||||
>;
|
||||
|
||||
interface FormData {
|
||||
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/append) */
|
||||
append(name: string, value: string | Blob): void;
|
||||
|
||||
1
packages/bun-types/index.d.ts
vendored
1
packages/bun-types/index.d.ts
vendored
@@ -21,7 +21,6 @@
|
||||
/// <reference path="./redis.d.ts" />
|
||||
/// <reference path="./shell.d.ts" />
|
||||
/// <reference path="./experimental.d.ts" />
|
||||
/// <reference path="./serve.d.ts" />
|
||||
/// <reference path="./sql.d.ts" />
|
||||
/// <reference path="./security.d.ts" />
|
||||
|
||||
|
||||
1272
packages/bun-types/serve.d.ts
vendored
1272
packages/bun-types/serve.d.ts
vendored
File diff suppressed because it is too large
Load Diff
3
packages/bun-types/shell.d.ts
vendored
3
packages/bun-types/shell.d.ts
vendored
@@ -112,9 +112,8 @@ declare module "bun" {
|
||||
* By default, the shell will write to the current process's stdout and stderr, as well as buffering that output.
|
||||
*
|
||||
* This configures the shell to only buffer the output.
|
||||
* @param isQuiet - Whether to suppress output. Defaults to true.
|
||||
*/
|
||||
quiet(isQuiet?: boolean): this;
|
||||
quiet(): this;
|
||||
|
||||
/**
|
||||
* Read from stdout as a string, line by line
|
||||
|
||||
4
packages/bun-types/test-globals.d.ts
vendored
4
packages/bun-types/test-globals.d.ts
vendored
@@ -15,8 +15,10 @@ declare var beforeAll: typeof import("bun:test").beforeAll;
|
||||
declare var beforeEach: typeof import("bun:test").beforeEach;
|
||||
declare var afterEach: typeof import("bun:test").afterEach;
|
||||
declare var afterAll: typeof import("bun:test").afterAll;
|
||||
declare var setDefaultTimeout: typeof import("bun:test").setDefaultTimeout;
|
||||
declare var mock: typeof import("bun:test").mock;
|
||||
declare var spyOn: typeof import("bun:test").spyOn;
|
||||
declare var jest: typeof import("bun:test").jest;
|
||||
declare var vi: typeof import("bun:test").vi;
|
||||
declare var xit: typeof import("bun:test").xit;
|
||||
declare var xtest: typeof import("bun:test").xtest;
|
||||
declare var xdescribe: typeof import("bun:test").xdescribe;
|
||||
|
||||
55
packages/bun-types/test.d.ts
vendored
55
packages/bun-types/test.d.ts
vendored
@@ -358,28 +358,6 @@ declare module "bun:test" {
|
||||
fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
|
||||
options?: HookOptions,
|
||||
): void;
|
||||
/**
|
||||
* Runs a function after a test finishes, including after all afterEach hooks.
|
||||
*
|
||||
* This is useful for cleanup tasks that need to run at the very end of a test,
|
||||
* after all other hooks have completed.
|
||||
*
|
||||
* Can only be called inside a test, not in describe blocks.
|
||||
*
|
||||
* @example
|
||||
* test("my test", () => {
|
||||
* onTestFinished(() => {
|
||||
* // This runs after all afterEach hooks
|
||||
* console.log("Test finished!");
|
||||
* });
|
||||
* });
|
||||
*
|
||||
* @param fn the function to run
|
||||
*/
|
||||
export function onTestFinished(
|
||||
fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
|
||||
options?: HookOptions,
|
||||
): void;
|
||||
/**
|
||||
* Sets the default timeout for all tests in the current file. If a test specifies a timeout, it will
|
||||
* override this value. The default timeout is 5000ms (5 seconds).
|
||||
@@ -412,20 +390,11 @@ declare module "bun:test" {
|
||||
*/
|
||||
repeats?: number;
|
||||
}
|
||||
|
||||
namespace __internal {
|
||||
type IsTuple<T> = T extends readonly unknown[]
|
||||
? number extends T["length"]
|
||||
? false // It's an array with unknown length, not a tuple
|
||||
: true // It's an array with a fixed length (a tuple)
|
||||
: false; // Not an array at all
|
||||
|
||||
/**
|
||||
* Accepts `[1, 2, 3] | ["a", "b", "c"]` and returns `[1 | "a", 2 | "b", 3 | "c"]`
|
||||
*/
|
||||
type Flatten<T, Copy extends T = T> = { [Key in keyof T]: Copy[Key] };
|
||||
}
|
||||
|
||||
type IsTuple<T> = T extends readonly unknown[]
|
||||
? number extends T["length"]
|
||||
? false // It's an array with unknown length, not a tuple
|
||||
: true // It's an array with a fixed length (a tuple)
|
||||
: false; // Not an array at all
|
||||
/**
|
||||
* Runs a test.
|
||||
*
|
||||
@@ -449,16 +418,10 @@ declare module "bun:test" {
|
||||
*
|
||||
* @category Testing
|
||||
*/
|
||||
export interface Test<T extends ReadonlyArray<unknown>> {
|
||||
export interface Test<T extends Readonly<any[]>> {
|
||||
(
|
||||
label: string,
|
||||
|
||||
fn: (
|
||||
...args: __internal.IsTuple<T> extends true
|
||||
? [...table: __internal.Flatten<T>, done: (err?: unknown) => void]
|
||||
: T
|
||||
) => void | Promise<unknown>,
|
||||
|
||||
fn: (...args: IsTuple<T> extends true ? [...T, (err?: unknown) => void] : T) => void | Promise<unknown>,
|
||||
/**
|
||||
* - If a `number`, sets the timeout for the test in milliseconds.
|
||||
* - If an `object`, sets the options for the test.
|
||||
@@ -550,8 +513,8 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param table Array of Arrays with the arguments that are passed into the test fn for each row.
|
||||
*/
|
||||
each<T extends Readonly<[unknown, ...unknown[]]>>(table: readonly T[]): Test<T>;
|
||||
each<T extends unknown[]>(table: readonly T[]): Test<T>;
|
||||
each<T extends Readonly<[any, ...any[]]>>(table: readonly T[]): Test<[...T]>;
|
||||
each<T extends any[]>(table: readonly T[]): Test<[...T]>;
|
||||
each<T>(table: T[]): Test<[T]>;
|
||||
}
|
||||
/**
|
||||
|
||||
@@ -717,25 +717,6 @@ LIBUS_SOCKET_DESCRIPTOR bsd_accept_socket(LIBUS_SOCKET_DESCRIPTOR fd, struct bsd
|
||||
return LIBUS_SOCKET_ERROR;
|
||||
}
|
||||
|
||||
#ifdef __APPLE__
|
||||
/* A bug in XNU (the macOS kernel) can cause accept() to return a socket but addrlen=0.
|
||||
* This happens when an IPv4 connection is made to an IPv6 dual-stack listener
|
||||
* and the connection is immediately aborted (sends RST packet).
|
||||
* However, there might be buffered data from connectx() before the abort. */
|
||||
if (addr->len == 0) {
|
||||
/* Check if there's any pending data before discarding the socket */
|
||||
char peek_buf[1];
|
||||
ssize_t has_data = recv(accepted_fd, peek_buf, 1, MSG_PEEK | MSG_DONTWAIT);
|
||||
|
||||
if (has_data <= 0) {
|
||||
/* No data available, socket is truly dead - discard it */
|
||||
bsd_close_socket(accepted_fd);
|
||||
continue; /* Try to accept the next connection */
|
||||
}
|
||||
/* If has_data > 0, let the socket through - there's buffered data to read */
|
||||
}
|
||||
#endif
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
@@ -226,11 +226,11 @@ struct us_bun_socket_context_options_t {
|
||||
const char *ca_file_name;
|
||||
const char *ssl_ciphers;
|
||||
int ssl_prefer_low_memory_usage; /* Todo: rename to prefer_low_memory_usage and apply for TCP as well */
|
||||
const char * const *key;
|
||||
const char **key;
|
||||
unsigned int key_count;
|
||||
const char * const *cert;
|
||||
const char **cert;
|
||||
unsigned int cert_count;
|
||||
const char * const *ca;
|
||||
const char **ca;
|
||||
unsigned int ca_count;
|
||||
unsigned int secure_options;
|
||||
int reject_unauthorized;
|
||||
|
||||
@@ -303,10 +303,10 @@ public:
|
||||
auto context = (struct us_socket_context_t *)this->httpContext;
|
||||
struct us_socket_t *s = context->head_sockets;
|
||||
while (s) {
|
||||
// no matter the type of socket will always contain the AsyncSocketData
|
||||
auto *data = ((AsyncSocket<SSL> *) s)->getAsyncSocketData();
|
||||
HttpResponseData<SSL> *httpResponseData = HttpResponse<SSL>::getHttpResponseDataS(s);
|
||||
httpResponseData->shouldCloseOnceIdle = true;
|
||||
struct us_socket_t *next = s->next;
|
||||
if (data->isIdle) {
|
||||
if (httpResponseData->isIdle) {
|
||||
us_socket_close(SSL, s, LIBUS_SOCKET_CLOSE_CODE_CLEAN_SHUTDOWN, 0);
|
||||
}
|
||||
s = next;
|
||||
@@ -641,10 +641,6 @@ public:
|
||||
httpContext->getSocketContextData()->onClientError = std::move(onClientError);
|
||||
}
|
||||
|
||||
void setOnSocketUpgraded(HttpContextData<SSL>::OnSocketUpgradedCallback onUpgraded) {
|
||||
httpContext->getSocketContextData()->onSocketUpgraded = onUpgraded;
|
||||
}
|
||||
|
||||
TemplatedApp &&run() {
|
||||
uWS::run();
|
||||
return std::move(*this);
|
||||
|
||||
@@ -83,7 +83,6 @@ struct AsyncSocketData {
|
||||
|
||||
/* Or empty */
|
||||
AsyncSocketData() = default;
|
||||
bool isIdle = false;
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
@@ -253,7 +253,6 @@ private:
|
||||
/* Mark that we are inside the parser now */
|
||||
httpContextData->flags.isParsingHttp = true;
|
||||
httpResponseData->isIdle = false;
|
||||
|
||||
// clients need to know the cursor after http parse, not servers!
|
||||
// how far did we read then? we need to know to continue with websocket parsing data? or?
|
||||
|
||||
|
||||
@@ -43,11 +43,11 @@ struct alignas(16) HttpContextData {
|
||||
template <bool> friend struct TemplatedApp;
|
||||
private:
|
||||
std::vector<MoveOnlyFunction<void(HttpResponse<SSL> *, int)>> filterHandlers;
|
||||
using OnSocketClosedCallback = void (*)(void* userData, int is_ssl, struct us_socket_t *rawSocket);
|
||||
using OnSocketDataCallback = void (*)(void* userData, int is_ssl, struct us_socket_t *rawSocket, const char *data, int length, bool last);
|
||||
using OnSocketDrainCallback = void (*)(void* userData, int is_ssl, struct us_socket_t *rawSocket);
|
||||
using OnSocketUpgradedCallback = void (*)(void* userData, int is_ssl, struct us_socket_t *rawSocket);
|
||||
using OnClientErrorCallback = MoveOnlyFunction<void(int is_ssl, struct us_socket_t *rawSocket, uWS::HttpParserError errorCode, char *rawPacket, int rawPacketLength)>;
|
||||
using OnSocketClosedCallback = void (*)(void* userData, int is_ssl, struct us_socket_t *rawSocket);
|
||||
|
||||
|
||||
MoveOnlyFunction<void(const char *hostname)> missingServerNameHandler;
|
||||
|
||||
@@ -66,7 +66,6 @@ private:
|
||||
OnSocketClosedCallback onSocketClosed = nullptr;
|
||||
OnSocketDrainCallback onSocketDrain = nullptr;
|
||||
OnSocketDataCallback onSocketData = nullptr;
|
||||
OnSocketUpgradedCallback onSocketUpgraded = nullptr;
|
||||
OnClientErrorCallback onClientError = nullptr;
|
||||
|
||||
uint64_t maxHeaderSize = 0; // 0 means no limit
|
||||
@@ -79,7 +78,6 @@ private:
|
||||
}
|
||||
|
||||
public:
|
||||
|
||||
HttpFlags flags;
|
||||
};
|
||||
|
||||
|
||||
@@ -316,20 +316,14 @@ public:
|
||||
HttpContext<SSL> *httpContext = (HttpContext<SSL> *) us_socket_context(SSL, (struct us_socket_t *) this);
|
||||
|
||||
/* Move any backpressure out of HttpResponse */
|
||||
auto* responseData = getHttpResponseData();
|
||||
BackPressure backpressure(std::move(((AsyncSocketData<SSL> *) responseData)->buffer));
|
||||
|
||||
auto* socketData = responseData->socketData;
|
||||
HttpContextData<SSL> *httpContextData = httpContext->getSocketContextData();
|
||||
|
||||
BackPressure backpressure(std::move(((AsyncSocketData<SSL> *) getHttpResponseData())->buffer));
|
||||
|
||||
/* Destroy HttpResponseData */
|
||||
responseData->~HttpResponseData();
|
||||
getHttpResponseData()->~HttpResponseData();
|
||||
|
||||
/* Before we adopt and potentially change socket, check if we are corked */
|
||||
bool wasCorked = Super::isCorked();
|
||||
|
||||
|
||||
|
||||
/* Adopting a socket invalidates it, do not rely on it directly to carry any data */
|
||||
us_socket_t *usSocket = us_socket_context_adopt_socket(SSL, (us_socket_context_t *) webSocketContext, (us_socket_t *) this, sizeof(WebSocketData) + sizeof(UserData));
|
||||
WebSocket<SSL, true, UserData> *webSocket = (WebSocket<SSL, true, UserData> *) usSocket;
|
||||
@@ -340,12 +334,10 @@ public:
|
||||
}
|
||||
|
||||
/* Initialize websocket with any moved backpressure intact */
|
||||
webSocket->init(perMessageDeflate, compressOptions, std::move(backpressure), socketData, httpContextData->onSocketClosed);
|
||||
if (httpContextData->onSocketUpgraded) {
|
||||
httpContextData->onSocketUpgraded(socketData, SSL, usSocket);
|
||||
}
|
||||
webSocket->init(perMessageDeflate, compressOptions, std::move(backpressure));
|
||||
|
||||
/* We should only mark this if inside the parser; if upgrading "async" we cannot set this */
|
||||
HttpContextData<SSL> *httpContextData = httpContext->getSocketContextData();
|
||||
if (httpContextData->flags.isParsingHttp) {
|
||||
/* We need to tell the Http parser that we changed socket */
|
||||
httpContextData->upgradedWebSocket = webSocket;
|
||||
@@ -359,6 +351,7 @@ public:
|
||||
|
||||
/* Move construct the UserData right before calling open handler */
|
||||
new (webSocket->getUserData()) UserData(std::forward<UserData>(userData));
|
||||
|
||||
|
||||
/* Emit open event and start the timeout */
|
||||
if (webSocketContextData->openHandler) {
|
||||
@@ -478,7 +471,7 @@ public:
|
||||
return internalEnd({nullptr, 0}, 0, false, false, closeConnection);
|
||||
}
|
||||
|
||||
void flushHeaders(bool flushImmediately = false) {
|
||||
void flushHeaders() {
|
||||
|
||||
writeStatus(HTTP_200_OK);
|
||||
|
||||
@@ -499,10 +492,6 @@ public:
|
||||
Super::write("\r\n", 2);
|
||||
httpResponseData->state |= HttpResponseData<SSL>::HTTP_WRITE_CALLED;
|
||||
}
|
||||
if (flushImmediately) {
|
||||
/* Uncork the socket to send data to the client immediately */
|
||||
this->uncork();
|
||||
}
|
||||
}
|
||||
/* Write parts of the response in chunking fashion. Starts timeout if failed. */
|
||||
bool write(std::string_view data, size_t *writtenPtr = nullptr) {
|
||||
|
||||
@@ -109,6 +109,9 @@ struct HttpResponseData : AsyncSocketData<SSL>, HttpParser {
|
||||
uint8_t idleTimeout = 10; // default HTTP_TIMEOUT 10 seconds
|
||||
bool fromAncientRequest = false;
|
||||
bool isConnectRequest = false;
|
||||
bool isIdle = true;
|
||||
bool shouldCloseOnceIdle = false;
|
||||
|
||||
|
||||
#ifdef UWS_WITH_PROXY
|
||||
ProxyParser proxyParser;
|
||||
|
||||
@@ -34,8 +34,8 @@ struct WebSocket : AsyncSocket<SSL> {
|
||||
private:
|
||||
typedef AsyncSocket<SSL> Super;
|
||||
|
||||
void *init(bool perMessageDeflate, CompressOptions compressOptions, BackPressure &&backpressure, void *socketData, WebSocketData::OnSocketClosedCallback onSocketClosed) {
|
||||
new (us_socket_ext(SSL, (us_socket_t *) this)) WebSocketData(perMessageDeflate, compressOptions, std::move(backpressure), socketData, onSocketClosed);
|
||||
void *init(bool perMessageDeflate, CompressOptions compressOptions, BackPressure &&backpressure) {
|
||||
new (us_socket_ext(SSL, (us_socket_t *) this)) WebSocketData(perMessageDeflate, compressOptions, std::move(backpressure));
|
||||
return this;
|
||||
}
|
||||
public:
|
||||
|
||||
@@ -256,9 +256,6 @@ private:
|
||||
|
||||
/* For whatever reason, if we already have emitted close event, do not emit it again */
|
||||
WebSocketData *webSocketData = (WebSocketData *) (us_socket_ext(SSL, s));
|
||||
if (webSocketData->socketData && webSocketData->onSocketClosed) {
|
||||
webSocketData->onSocketClosed(webSocketData->socketData, SSL, (us_socket_t *) s);
|
||||
}
|
||||
if (!webSocketData->isShuttingDown) {
|
||||
/* Emit close event */
|
||||
auto *webSocketContextData = (WebSocketContextData<SSL, USERDATA> *) us_socket_context_ext(SSL, us_socket_context(SSL, (us_socket_t *) s));
|
||||
|
||||
@@ -52,6 +52,7 @@ struct WebSocketContextData {
|
||||
private:
|
||||
|
||||
public:
|
||||
|
||||
/* This one points to the App's shared topicTree */
|
||||
TopicTree<TopicTreeMessage, TopicTreeBigMessage> *topicTree;
|
||||
|
||||
|
||||
@@ -38,7 +38,6 @@ private:
|
||||
unsigned int controlTipLength = 0;
|
||||
bool isShuttingDown = 0;
|
||||
bool hasTimedOut = false;
|
||||
|
||||
enum CompressionStatus : char {
|
||||
DISABLED,
|
||||
ENABLED,
|
||||
@@ -53,12 +52,7 @@ private:
|
||||
/* We could be a subscriber */
|
||||
Subscriber *subscriber = nullptr;
|
||||
public:
|
||||
using OnSocketClosedCallback = void (*)(void* userData, int is_ssl, struct us_socket_t *rawSocket);
|
||||
void *socketData = nullptr;
|
||||
/* node http compatibility callbacks */
|
||||
OnSocketClosedCallback onSocketClosed = nullptr;
|
||||
|
||||
WebSocketData(bool perMessageDeflate, CompressOptions compressOptions, BackPressure &&backpressure, void *socketData, OnSocketClosedCallback onSocketClosed) : AsyncSocketData<false>(std::move(backpressure)), WebSocketState<true>() {
|
||||
WebSocketData(bool perMessageDeflate, CompressOptions compressOptions, BackPressure &&backpressure) : AsyncSocketData<false>(std::move(backpressure)), WebSocketState<true>() {
|
||||
compressionStatus = perMessageDeflate ? ENABLED : DISABLED;
|
||||
|
||||
/* Initialize the dedicated sliding window(s) */
|
||||
@@ -70,10 +64,6 @@ public:
|
||||
inflationStream = new InflationStream(compressOptions);
|
||||
}
|
||||
}
|
||||
// never close websocket sockets when closing idle connections
|
||||
this->isIdle = false;
|
||||
this->socketData = socketData;
|
||||
this->onSocketClosed = onSocketClosed;
|
||||
}
|
||||
|
||||
~WebSocketData() {
|
||||
|
||||
@@ -1,347 +0,0 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
declare global {
|
||||
var process: {
|
||||
env: Record<string, string | undefined>;
|
||||
};
|
||||
}
|
||||
|
||||
interface GitHubIssue {
|
||||
number: number;
|
||||
title: string;
|
||||
user: { id: number };
|
||||
created_at: string;
|
||||
pull_request?: object;
|
||||
}
|
||||
|
||||
interface GitHubComment {
|
||||
id: number;
|
||||
body: string;
|
||||
created_at: string;
|
||||
user: { type?: string; id: number };
|
||||
}
|
||||
|
||||
interface GitHubReaction {
|
||||
user: { id: number };
|
||||
content: string;
|
||||
}
|
||||
|
||||
async function sleep(ms: number): Promise<void> {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
async function githubRequest<T>(
|
||||
endpoint: string,
|
||||
token: string,
|
||||
method: string = "GET",
|
||||
body?: any,
|
||||
retryCount: number = 0,
|
||||
): Promise<T> {
|
||||
const maxRetries = 3;
|
||||
|
||||
const response = await fetch(`https://api.github.com${endpoint}`, {
|
||||
method,
|
||||
headers: {
|
||||
Authorization: `Bearer ${token}`,
|
||||
Accept: "application/vnd.github+json",
|
||||
"User-Agent": "auto-close-duplicates-script",
|
||||
...(body && { "Content-Type": "application/json" }),
|
||||
},
|
||||
...(body && { body: JSON.stringify(body) }),
|
||||
});
|
||||
|
||||
// Check rate limit headers
|
||||
const rateLimitRemaining = response.headers.get("x-ratelimit-remaining");
|
||||
const rateLimitReset = response.headers.get("x-ratelimit-reset");
|
||||
|
||||
if (rateLimitRemaining && parseInt(rateLimitRemaining) < 100) {
|
||||
console.warn(`[WARNING] GitHub API rate limit low: ${rateLimitRemaining} requests remaining`);
|
||||
|
||||
if (parseInt(rateLimitRemaining) < 10) {
|
||||
const resetTime = rateLimitReset ? parseInt(rateLimitReset) * 1000 : Date.now() + 60000;
|
||||
const waitTime = Math.max(0, resetTime - Date.now());
|
||||
console.warn(`[WARNING] Rate limit critically low, waiting ${Math.ceil(waitTime / 1000)}s until reset`);
|
||||
await sleep(waitTime + 1000); // Add 1s buffer
|
||||
}
|
||||
}
|
||||
|
||||
// Handle rate limit errors with retry
|
||||
if (response.status === 429 || response.status === 403) {
|
||||
if (retryCount >= maxRetries) {
|
||||
throw new Error(`GitHub API rate limit exceeded after ${maxRetries} retries`);
|
||||
}
|
||||
|
||||
const retryAfter = response.headers.get("retry-after");
|
||||
const waitTime = retryAfter ? parseInt(retryAfter) * 1000 : Math.min(1000 * Math.pow(2, retryCount), 32000);
|
||||
|
||||
console.warn(
|
||||
`[WARNING] Rate limited (${response.status}), retry ${retryCount + 1}/${maxRetries} after ${waitTime}ms`,
|
||||
);
|
||||
await sleep(waitTime);
|
||||
|
||||
return githubRequest<T>(endpoint, token, method, body, retryCount + 1);
|
||||
}
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`GitHub API request failed: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
return response.json();
|
||||
}
|
||||
|
||||
async function fetchAllComments(
|
||||
owner: string,
|
||||
repo: string,
|
||||
issueNumber: number,
|
||||
token: string,
|
||||
): Promise<GitHubComment[]> {
|
||||
const allComments: GitHubComment[] = [];
|
||||
let page = 1;
|
||||
const perPage = 100;
|
||||
|
||||
while (true) {
|
||||
const comments: GitHubComment[] = await githubRequest(
|
||||
`/repos/${owner}/${repo}/issues/${issueNumber}/comments?per_page=${perPage}&page=${page}`,
|
||||
token,
|
||||
);
|
||||
|
||||
if (comments.length === 0) break;
|
||||
|
||||
allComments.push(...comments);
|
||||
page++;
|
||||
|
||||
// Safety limit
|
||||
if (page > 20) break;
|
||||
}
|
||||
|
||||
return allComments;
|
||||
}
|
||||
|
||||
async function fetchAllReactions(
|
||||
owner: string,
|
||||
repo: string,
|
||||
commentId: number,
|
||||
token: string,
|
||||
authorId?: number,
|
||||
): Promise<GitHubReaction[]> {
|
||||
const allReactions: GitHubReaction[] = [];
|
||||
let page = 1;
|
||||
const perPage = 100;
|
||||
|
||||
while (true) {
|
||||
const reactions: GitHubReaction[] = await githubRequest(
|
||||
`/repos/${owner}/${repo}/issues/comments/${commentId}/reactions?per_page=${perPage}&page=${page}`,
|
||||
token,
|
||||
);
|
||||
|
||||
if (reactions.length === 0) break;
|
||||
|
||||
allReactions.push(...reactions);
|
||||
|
||||
// Early exit if we're looking for a specific author and found their -1 reaction
|
||||
if (authorId && reactions.some(r => r.user.id === authorId && r.content === "-1")) {
|
||||
console.log(`[DEBUG] Found author thumbs down reaction, short-circuiting pagination`);
|
||||
break;
|
||||
}
|
||||
|
||||
page++;
|
||||
|
||||
// Safety limit
|
||||
if (page > 20) break;
|
||||
}
|
||||
|
||||
return allReactions;
|
||||
}
|
||||
|
||||
function escapeRegExp(str: string): string {
|
||||
return str.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
}
|
||||
|
||||
function extractDuplicateIssueNumber(commentBody: string, owner: string, repo: string): number | null {
|
||||
// Escape owner and repo to prevent ReDoS attacks
|
||||
const escapedOwner = escapeRegExp(owner);
|
||||
const escapedRepo = escapeRegExp(repo);
|
||||
|
||||
// Try to match same-repo GitHub issue URL format first: https://github.com/owner/repo/issues/123
|
||||
const repoUrlPattern = new RegExp(`github\\.com/${escapedOwner}/${escapedRepo}/issues/(\\d+)`);
|
||||
let match = commentBody.match(repoUrlPattern);
|
||||
if (match) {
|
||||
return parseInt(match[1], 10);
|
||||
}
|
||||
|
||||
// Fallback to #123 format (assumes same repo)
|
||||
match = commentBody.match(/#(\d+)/);
|
||||
if (match) {
|
||||
return parseInt(match[1], 10);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
async function closeIssueAsDuplicate(
|
||||
owner: string,
|
||||
repo: string,
|
||||
issueNumber: number,
|
||||
duplicateOfNumber: number,
|
||||
token: string,
|
||||
): Promise<void> {
|
||||
// Close the issue as duplicate and add the duplicate label
|
||||
await githubRequest(`/repos/${owner}/${repo}/issues/${issueNumber}`, token, "PATCH", {
|
||||
state: "closed",
|
||||
state_reason: "duplicate",
|
||||
labels: ["duplicate"],
|
||||
});
|
||||
|
||||
await githubRequest(`/repos/${owner}/${repo}/issues/${issueNumber}/comments`, token, "POST", {
|
||||
body: `This issue has been automatically closed as a duplicate of #${duplicateOfNumber}.
|
||||
|
||||
If this is incorrect, please re-open this issue or create a new one.
|
||||
|
||||
🤖 Generated with [Claude Code](https://claude.ai/code)`,
|
||||
});
|
||||
}
|
||||
|
||||
async function autoCloseDuplicates(): Promise<void> {
|
||||
console.log("[DEBUG] Starting auto-close duplicates script");
|
||||
|
||||
const token = process.env.GITHUB_TOKEN;
|
||||
if (!token) {
|
||||
throw new Error("GITHUB_TOKEN environment variable is required");
|
||||
}
|
||||
console.log("[DEBUG] GitHub token found");
|
||||
|
||||
// Parse GITHUB_REPOSITORY (format: "owner/repo")
|
||||
const repository = process.env.GITHUB_REPOSITORY || "oven-sh/bun";
|
||||
const [owner, repo] = repository.split("/");
|
||||
if (!owner || !repo) {
|
||||
throw new Error(`Invalid GITHUB_REPOSITORY format: ${repository}`);
|
||||
}
|
||||
console.log(`[DEBUG] Repository: ${owner}/${repo}`);
|
||||
|
||||
const threeDaysAgo = new Date();
|
||||
threeDaysAgo.setDate(threeDaysAgo.getDate() - 3);
|
||||
console.log(`[DEBUG] Checking for duplicate comments older than: ${threeDaysAgo.toISOString()}`);
|
||||
|
||||
console.log("[DEBUG] Fetching open issues created more than 3 days ago...");
|
||||
const allIssues: GitHubIssue[] = [];
|
||||
let page = 1;
|
||||
const perPage = 100;
|
||||
|
||||
while (true) {
|
||||
const pageIssues: GitHubIssue[] = await githubRequest(
|
||||
`/repos/${owner}/${repo}/issues?state=open&per_page=${perPage}&page=${page}`,
|
||||
token,
|
||||
);
|
||||
|
||||
if (pageIssues.length === 0) break;
|
||||
|
||||
// Filter for issues created more than 3 days ago and exclude pull requests
|
||||
const oldEnoughIssues = pageIssues.filter(
|
||||
issue => !issue.pull_request && new Date(issue.created_at) <= threeDaysAgo,
|
||||
);
|
||||
|
||||
allIssues.push(...oldEnoughIssues);
|
||||
page++;
|
||||
|
||||
// Safety limit to avoid infinite loops
|
||||
if (page > 20) break;
|
||||
}
|
||||
|
||||
const issues = allIssues;
|
||||
console.log(`[DEBUG] Found ${issues.length} open issues`);
|
||||
|
||||
let processedCount = 0;
|
||||
let candidateCount = 0;
|
||||
|
||||
for (const issue of issues) {
|
||||
processedCount++;
|
||||
console.log(`[DEBUG] Processing issue #${issue.number} (${processedCount}/${issues.length}): ${issue.title}`);
|
||||
|
||||
console.log(`[DEBUG] Fetching comments for issue #${issue.number}...`);
|
||||
const comments = await fetchAllComments(owner, repo, issue.number, token);
|
||||
console.log(`[DEBUG] Issue #${issue.number} has ${comments.length} comments`);
|
||||
|
||||
const dupeComments = comments.filter(
|
||||
comment =>
|
||||
comment.body.includes("Found") &&
|
||||
comment.body.includes("possible duplicate") &&
|
||||
comment.user?.type === "Bot" &&
|
||||
comment.body.includes("<!-- dedupe-bot:marker -->"),
|
||||
);
|
||||
console.log(`[DEBUG] Issue #${issue.number} has ${dupeComments.length} duplicate detection comments`);
|
||||
|
||||
if (dupeComments.length === 0) {
|
||||
console.log(`[DEBUG] Issue #${issue.number} - no duplicate comments found, skipping`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const lastDupeComment = dupeComments[dupeComments.length - 1];
|
||||
const dupeCommentDate = new Date(lastDupeComment.created_at);
|
||||
console.log(
|
||||
`[DEBUG] Issue #${issue.number} - most recent duplicate comment from: ${dupeCommentDate.toISOString()}`,
|
||||
);
|
||||
|
||||
if (dupeCommentDate > threeDaysAgo) {
|
||||
console.log(`[DEBUG] Issue #${issue.number} - duplicate comment is too recent, skipping`);
|
||||
continue;
|
||||
}
|
||||
console.log(
|
||||
`[DEBUG] Issue #${issue.number} - duplicate comment is old enough (${Math.floor(
|
||||
(Date.now() - dupeCommentDate.getTime()) / (1000 * 60 * 60 * 24),
|
||||
)} days)`,
|
||||
);
|
||||
|
||||
// Filter for human comments (not bot comments) after the duplicate comment
|
||||
const commentsAfterDupe = comments.filter(
|
||||
comment => new Date(comment.created_at) > dupeCommentDate && comment.user?.type !== "Bot",
|
||||
);
|
||||
console.log(
|
||||
`[DEBUG] Issue #${issue.number} - ${commentsAfterDupe.length} human comments after duplicate detection`,
|
||||
);
|
||||
|
||||
if (commentsAfterDupe.length > 0) {
|
||||
console.log(`[DEBUG] Issue #${issue.number} - has human activity after duplicate comment, skipping`);
|
||||
continue;
|
||||
}
|
||||
|
||||
console.log(`[DEBUG] Issue #${issue.number} - checking reactions on duplicate comment...`);
|
||||
const reactions = await fetchAllReactions(owner, repo, lastDupeComment.id, token, issue.user.id);
|
||||
console.log(`[DEBUG] Issue #${issue.number} - duplicate comment has ${reactions.length} reactions`);
|
||||
|
||||
const authorThumbsDown = reactions.some(
|
||||
reaction => reaction.user.id === issue.user.id && reaction.content === "-1",
|
||||
);
|
||||
console.log(`[DEBUG] Issue #${issue.number} - author thumbs down reaction: ${authorThumbsDown}`);
|
||||
|
||||
if (authorThumbsDown) {
|
||||
console.log(`[DEBUG] Issue #${issue.number} - author disagreed with duplicate detection, skipping`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const duplicateIssueNumber = extractDuplicateIssueNumber(lastDupeComment.body, owner, repo);
|
||||
if (!duplicateIssueNumber) {
|
||||
console.log(`[DEBUG] Issue #${issue.number} - could not extract duplicate issue number from comment, skipping`);
|
||||
continue;
|
||||
}
|
||||
|
||||
candidateCount++;
|
||||
const issueUrl = `https://github.com/${owner}/${repo}/issues/${issue.number}`;
|
||||
|
||||
try {
|
||||
console.log(`[INFO] Auto-closing issue #${issue.number} as duplicate of #${duplicateIssueNumber}: ${issueUrl}`);
|
||||
await closeIssueAsDuplicate(owner, repo, issue.number, duplicateIssueNumber, token);
|
||||
console.log(`[SUCCESS] Successfully closed issue #${issue.number} as duplicate of #${duplicateIssueNumber}`);
|
||||
} catch (error) {
|
||||
console.error(`[ERROR] Failed to close issue #${issue.number} as duplicate: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(
|
||||
`[DEBUG] Script completed. Processed ${processedCount} issues, found ${candidateCount} candidates for auto-close`,
|
||||
);
|
||||
}
|
||||
|
||||
autoCloseDuplicates().catch(console.error);
|
||||
|
||||
// Make it a module
|
||||
export {};
|
||||
@@ -244,7 +244,7 @@ function Install-NodeJs {
|
||||
}
|
||||
|
||||
function Install-Bun {
|
||||
Install-Package bun -Version "1.3.1"
|
||||
Install-Package bun -Version "1.2.17"
|
||||
}
|
||||
|
||||
function Install-Cygwin {
|
||||
|
||||
@@ -907,7 +907,7 @@ setup_node_gyp_cache() {
|
||||
}
|
||||
|
||||
bun_version_exact() {
|
||||
print "1.3.1"
|
||||
print "1.2.17"
|
||||
}
|
||||
|
||||
install_bun() {
|
||||
@@ -1060,11 +1060,12 @@ install_llvm() {
|
||||
install_packages "llvm@$(llvm_version)"
|
||||
;;
|
||||
apk)
|
||||
# alpine doesn't have a lld19 package on 3.21 atm so use bare one for now
|
||||
install_packages \
|
||||
"llvm$(llvm_version)" \
|
||||
"clang$(llvm_version)" \
|
||||
"scudo-malloc" \
|
||||
"lld$(llvm_version)" \
|
||||
"lld" \
|
||||
"llvm$(llvm_version)-dev" # Ensures llvm-symbolizer is installed
|
||||
;;
|
||||
esac
|
||||
|
||||
@@ -15,11 +15,8 @@ interface CloseAction {
|
||||
|
||||
let closeAction: CloseAction | null = null;
|
||||
|
||||
// Compute lowercase once for performance
|
||||
const bodyLower = body.toLowerCase();
|
||||
|
||||
// Check for workers_terminated
|
||||
if (bodyLower.includes("workers_terminated")) {
|
||||
if (body.includes("workers_terminated")) {
|
||||
closeAction = {
|
||||
reason: "not_planned",
|
||||
comment: `Duplicate of #15964
|
||||
@@ -28,10 +25,7 @@ We are tracking worker stability issues in https://github.com/oven-sh/bun/issues
|
||||
}
|
||||
|
||||
// Check for better-sqlite3 with RunCommand or AutoCommand
|
||||
else if (
|
||||
bodyLower.includes("better-sqlite3") &&
|
||||
(bodyLower.includes("runcommand") || bodyLower.includes("autocommand"))
|
||||
) {
|
||||
else if (body.includes("better-sqlite3") && (body.includes("[RunCommand]") || body.includes("[AutoCommand]"))) {
|
||||
closeAction = {
|
||||
reason: "not_planned",
|
||||
comment: `Duplicate of #4290.
|
||||
@@ -39,45 +33,12 @@ better-sqlite3 is not supported yet in Bun due to missing V8 C++ APIs. For now,
|
||||
};
|
||||
}
|
||||
|
||||
// Check for ENOTCONN with Transport and standalone_executable on v1.2.23
|
||||
else if (
|
||||
bodyLower.includes("enotconn") &&
|
||||
bodyLower.includes("transport") &&
|
||||
bodyLower.includes("standalone_executable") &&
|
||||
/\bv?1\.2\.23\b/i.test(bodyLower)
|
||||
) {
|
||||
closeAction = {
|
||||
reason: "completed",
|
||||
comment: `Duplicate of #23342.
|
||||
This issue was fixed in Bun v1.3. Please upgrade to the latest version:
|
||||
|
||||
\`\`\`sh
|
||||
bun upgrade
|
||||
\`\`\``,
|
||||
};
|
||||
}
|
||||
|
||||
// Check for WASM IPInt 32 stack traces - be very specific to avoid false positives
|
||||
else if (bodyLower.includes("wasm_trampoline_wasm_ipint_call_wide32")) {
|
||||
closeAction = {
|
||||
reason: "not_planned",
|
||||
comment: `Duplicate of #17841.
|
||||
This is a known issue with JavaScriptCore's WASM In-place interpreter on Linux x64. You can work around it by:
|
||||
|
||||
1. Setting \`BUN_JSC_useWasmIPInt=0\` to disable IPInt (reverts to older Wasm interpreter)
|
||||
2. Using an aarch64 CPU instead of x86_64
|
||||
3. Using \`BUN_JSC_jitPolicyScale=0\` to force JIT compilation (may impact startup performance)
|
||||
|
||||
We've reported this to WebKit and are tracking the issue in #17841.`,
|
||||
};
|
||||
}
|
||||
|
||||
// Check for CPU architecture issues (Segmentation Fault/Illegal Instruction with no_avx)
|
||||
else if (
|
||||
(bodyLower.includes("segmentation fault") ||
|
||||
bodyLower.includes("illegal instruction") ||
|
||||
bodyLower.includes("illegalinstruction")) &&
|
||||
bodyLower.includes("no_avx")
|
||||
(body.includes("Segmentation Fault") ||
|
||||
body.includes("Illegal Instruction") ||
|
||||
body.includes("IllegalInstruction")) &&
|
||||
body.includes("no_avx")
|
||||
) {
|
||||
let comment = `Bun requires a CPU with the micro-architecture [\`nehalem\`](https://en.wikipedia.org/wiki/Nehalem_(microarchitecture)) or later (released in 2008). If you're using a CPU emulator like qemu, then try enabling x86-64-v2.`;
|
||||
|
||||
|
||||
@@ -389,9 +389,6 @@ const aws = {
|
||||
owner = "amazon";
|
||||
name = `Windows_Server-${release || "*"}-English-Full-Base-*`;
|
||||
}
|
||||
} else if (os === "freebsd") {
|
||||
owner = "782442783595"; // upstream member of FreeBSD team, likely Colin Percival
|
||||
name = `FreeBSD ${release}-STABLE-${{ "aarch64": "arm64", "x64": "amd64" }[arch] ?? "amd64"}-* UEFI-PREFERRED cloud-init UFS`;
|
||||
}
|
||||
|
||||
if (!name) {
|
||||
@@ -403,7 +400,6 @@ const aws = {
|
||||
"owner-alias": owner,
|
||||
"name": name,
|
||||
});
|
||||
// console.table(baseImages.map(v => v.Name));
|
||||
|
||||
if (!baseImages.length) {
|
||||
throw new Error(`No base image found: ${inspect(options)}`);
|
||||
@@ -429,8 +425,6 @@ const aws = {
|
||||
}
|
||||
|
||||
const { ImageId, Name, RootDeviceName, BlockDeviceMappings } = image;
|
||||
// console.table({ os, arch, instanceType, Name, ImageId });
|
||||
|
||||
const blockDeviceMappings = BlockDeviceMappings.map(device => {
|
||||
const { DeviceName } = device;
|
||||
if (DeviceName === RootDeviceName) {
|
||||
@@ -626,7 +620,6 @@ const aws = {
|
||||
* @property {SshKey[]} [sshKeys]
|
||||
* @property {string} [username]
|
||||
* @property {string} [password]
|
||||
* @property {Os} [os]
|
||||
*/
|
||||
|
||||
/**
|
||||
@@ -655,7 +648,6 @@ function getCloudInit(cloudInit) {
|
||||
const authorizedKeys = cloudInit["sshKeys"]?.map(({ publicKey }) => publicKey) || [];
|
||||
|
||||
let sftpPath = "/usr/lib/openssh/sftp-server";
|
||||
let shell = "/bin/bash";
|
||||
switch (cloudInit["distro"]) {
|
||||
case "alpine":
|
||||
sftpPath = "/usr/lib/ssh/sftp-server";
|
||||
@@ -666,18 +658,6 @@ function getCloudInit(cloudInit) {
|
||||
sftpPath = "/usr/libexec/openssh/sftp-server";
|
||||
break;
|
||||
}
|
||||
switch (cloudInit["os"]) {
|
||||
case "linux":
|
||||
case "windows":
|
||||
// handled above
|
||||
break;
|
||||
case "freebsd":
|
||||
sftpPath = "/usr/libexec/openssh/sftp-server";
|
||||
shell = "/bin/csh";
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unsupported os: ${cloudInit["os"]}`);
|
||||
}
|
||||
|
||||
let users;
|
||||
if (username === "root") {
|
||||
@@ -691,7 +671,7 @@ function getCloudInit(cloudInit) {
|
||||
users:
|
||||
- name: ${username}
|
||||
sudo: ALL=(ALL) NOPASSWD:ALL
|
||||
shell: ${shell}
|
||||
shell: /bin/bash
|
||||
ssh_authorized_keys:
|
||||
${authorizedKeys.map(key => ` - ${key}`).join("\n")}
|
||||
|
||||
@@ -1070,7 +1050,7 @@ function getCloud(name) {
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {"linux" | "darwin" | "windows" | "freebsd"} Os
|
||||
* @typedef {"linux" | "darwin" | "windows"} Os
|
||||
* @typedef {"aarch64" | "x64"} Arch
|
||||
* @typedef {"macos" | "windowsserver" | "debian" | "ubuntu" | "alpine" | "amazonlinux"} Distro
|
||||
*/
|
||||
@@ -1224,7 +1204,6 @@ async function main() {
|
||||
};
|
||||
|
||||
let { detached, bootstrap, ci, os, arch, distro, release, features } = options;
|
||||
if (os === "freebsd") bootstrap = false;
|
||||
|
||||
let name = `${os}-${arch}-${(release || "").replace(/\./g, "")}`;
|
||||
|
||||
|
||||
@@ -80,7 +80,6 @@ function getNodeParallelTestTimeout(testPath) {
|
||||
if (testPath.includes("test-dns")) {
|
||||
return 90_000;
|
||||
}
|
||||
if (!isCI) return 60_000; // everything slower in debug mode
|
||||
return 20_000;
|
||||
}
|
||||
|
||||
@@ -450,7 +449,7 @@ async function runTests() {
|
||||
|
||||
if (parallelism > 1) {
|
||||
console.log(grouptitle);
|
||||
result = await fn(index);
|
||||
result = await fn();
|
||||
} else {
|
||||
result = await startGroup(grouptitle, fn);
|
||||
}
|
||||
@@ -470,7 +469,6 @@ async function runTests() {
|
||||
const label = `${getAnsi(color)}[${index}/${total}] ${title} - ${error}${getAnsi("reset")}`;
|
||||
startGroup(label, () => {
|
||||
if (parallelism > 1) return;
|
||||
if (!isCI) return;
|
||||
process.stderr.write(stdoutPreview);
|
||||
});
|
||||
|
||||
@@ -581,11 +579,8 @@ async function runTests() {
|
||||
const title = relative(cwd, absoluteTestPath).replaceAll(sep, "/");
|
||||
if (isNodeTest(testPath)) {
|
||||
const testContent = readFileSync(absoluteTestPath, "utf-8");
|
||||
let runWithBunTest = title.includes("needs-test") || testContent.includes("node:test");
|
||||
// don't wanna have a filter for includes("bun:test") but these need our mocks
|
||||
runWithBunTest ||= title === "test/js/node/test/parallel/test-fs-append-file-flush.js";
|
||||
runWithBunTest ||= title === "test/js/node/test/parallel/test-fs-write-file-flush.js";
|
||||
runWithBunTest ||= title === "test/js/node/test/parallel/test-fs-write-stream-flush.js";
|
||||
const runWithBunTest =
|
||||
title.includes("needs-test") || testContent.includes("bun:test") || testContent.includes("node:test");
|
||||
const subcommand = runWithBunTest ? "test" : "run";
|
||||
const env = {
|
||||
FORCE_COLOR: "0",
|
||||
@@ -673,9 +668,7 @@ async function runTests() {
|
||||
const title = join(relative(cwd, vendorPath), testPath).replace(/\\/g, "/");
|
||||
|
||||
if (testRunner === "bun") {
|
||||
await runTest(title, index =>
|
||||
spawnBunTest(execPath, testPath, { cwd: vendorPath, env: { TEST_SERIAL_ID: index } }),
|
||||
);
|
||||
await runTest(title, () => spawnBunTest(execPath, testPath, { cwd: vendorPath }));
|
||||
} else {
|
||||
const testRunnerPath = join(cwd, "test", "runners", `${testRunner}.ts`);
|
||||
if (!existsSync(testRunnerPath)) {
|
||||
@@ -1302,7 +1295,6 @@ async function spawnBun(execPath, { args, cwd, timeout, env, stdout, stderr }) {
|
||||
* @param {object} [opts]
|
||||
* @param {string} [opts.cwd]
|
||||
* @param {string[]} [opts.args]
|
||||
* @param {object} [opts.env]
|
||||
* @returns {Promise<TestResult>}
|
||||
*/
|
||||
async function spawnBunTest(execPath, testPath, opts = { cwd }) {
|
||||
@@ -1336,7 +1328,6 @@ async function spawnBunTest(execPath, testPath, opts = { cwd }) {
|
||||
|
||||
const env = {
|
||||
GITHUB_ACTIONS: "true", // always true so annotations are parsed
|
||||
...opts["env"],
|
||||
};
|
||||
if ((basename(execPath).includes("asan") || !isCI) && shouldValidateExceptions(relative(cwd, absPath))) {
|
||||
env.BUN_JSC_validateExceptionChecks = "1";
|
||||
|
||||
@@ -1,60 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# This script updates SQLite amalgamation files with the required compiler flags.
|
||||
# It downloads the SQLite source, configures it with necessary flags, builds the
|
||||
# amalgamation, and copies the generated files to the Bun source tree.
|
||||
#
|
||||
# Usage:
|
||||
# ./scripts/update-sqlite-amalgamation.sh <version_number> <year>
|
||||
#
|
||||
# Example:
|
||||
# ./scripts/update-sqlite-amalgamation.sh 3500400 2025
|
||||
#
|
||||
# The version number is a 7-digit SQLite version (e.g., 3500400 for 3.50.4)
|
||||
# The year is the release year found in the download URL
|
||||
|
||||
if [ $# -ne 2 ]; then
|
||||
echo "Usage: $0 <version_number> <year>"
|
||||
echo "Example: $0 3500400 2025"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
VERSION_NUM="$1"
|
||||
YEAR="$2"
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
|
||||
# Create temporary directory
|
||||
TEMP_DIR=$(mktemp -d)
|
||||
trap 'rm -rf "$TEMP_DIR"' EXIT
|
||||
|
||||
cd "$TEMP_DIR"
|
||||
|
||||
echo "Downloading SQLite source version $VERSION_NUM from year $YEAR..."
|
||||
DOWNLOAD_URL="https://sqlite.org/$YEAR/sqlite-src-$VERSION_NUM.zip"
|
||||
echo "URL: $DOWNLOAD_URL"
|
||||
|
||||
wget -q "$DOWNLOAD_URL"
|
||||
unzip -q "sqlite-src-$VERSION_NUM.zip"
|
||||
cd "sqlite-src-$VERSION_NUM"
|
||||
|
||||
echo "Configuring SQLite with required flags..."
|
||||
# These flags must be set during amalgamation generation for them to take effect
|
||||
# in the parser and other compile-time generated code
|
||||
CFLAGS="-DSQLITE_ENABLE_UPDATE_DELETE_LIMIT=1 -DSQLITE_ENABLE_COLUMN_METADATA=1"
|
||||
./configure CFLAGS="$CFLAGS" > /dev/null 2>&1
|
||||
|
||||
echo "Building amalgamation..."
|
||||
make sqlite3.c > /dev/null 2>&1
|
||||
|
||||
echo "Copying files to Bun source tree..."
|
||||
# Add clang-format off directive and copy the amalgamation
|
||||
echo "// clang-format off" > "$REPO_ROOT/src/bun.js/bindings/sqlite/sqlite3.c"
|
||||
cat sqlite3.c >> "$REPO_ROOT/src/bun.js/bindings/sqlite/sqlite3.c"
|
||||
|
||||
echo "// clang-format off" > "$REPO_ROOT/src/bun.js/bindings/sqlite/sqlite3_local.h"
|
||||
cat sqlite3.h >> "$REPO_ROOT/src/bun.js/bindings/sqlite/sqlite3_local.h"
|
||||
|
||||
echo "✓ Successfully updated SQLite amalgamation files"
|
||||
@@ -1538,7 +1538,7 @@ export function parseNumber(value) {
|
||||
|
||||
/**
|
||||
* @param {string} string
|
||||
* @returns {"darwin" | "linux" | "windows" | "freebsd"}
|
||||
* @returns {"darwin" | "linux" | "windows"}
|
||||
*/
|
||||
export function parseOs(string) {
|
||||
if (/darwin|apple|mac/i.test(string)) {
|
||||
@@ -1550,9 +1550,6 @@ export function parseOs(string) {
|
||||
if (/win/i.test(string)) {
|
||||
return "windows";
|
||||
}
|
||||
if (/freebsd/i.test(string)) {
|
||||
return "freebsd";
|
||||
}
|
||||
throw new Error(`Unsupported operating system: ${string}`);
|
||||
}
|
||||
|
||||
@@ -1903,21 +1900,22 @@ export function getUsernameForDistro(distro) {
|
||||
if (/windows/i.test(distro)) {
|
||||
return "administrator";
|
||||
}
|
||||
|
||||
if (/alpine|centos/i.test(distro)) {
|
||||
return "root";
|
||||
}
|
||||
|
||||
if (/debian/i.test(distro)) {
|
||||
return "admin";
|
||||
}
|
||||
|
||||
if (/ubuntu/i.test(distro)) {
|
||||
return "ubuntu";
|
||||
}
|
||||
|
||||
if (/amazon|amzn|al\d+|rhel/i.test(distro)) {
|
||||
return "ec2-user";
|
||||
}
|
||||
if (/freebsd/i.test(distro)) {
|
||||
return "root";
|
||||
}
|
||||
|
||||
throw new Error(`Unsupported distro: ${distro}`);
|
||||
}
|
||||
|
||||
103
shell.nix
103
shell.nix
@@ -1,103 +0,0 @@
|
||||
# Simple shell.nix for users without flakes enabled
|
||||
# For reproducible builds with locked dependencies, use: nix develop
|
||||
# This uses unpinned <nixpkgs> for simplicity; flake.nix provides version pinning via flake.lock
|
||||
{ pkgs ? import <nixpkgs> {} }:
|
||||
|
||||
pkgs.mkShell rec {
|
||||
packages = with pkgs; [
|
||||
# Core build tools (matching bootstrap.sh)
|
||||
cmake
|
||||
ninja
|
||||
clang_19
|
||||
llvm_19
|
||||
lld_19
|
||||
nodejs_24
|
||||
bun
|
||||
rustc
|
||||
cargo
|
||||
go
|
||||
python3
|
||||
ccache
|
||||
pkg-config
|
||||
gnumake
|
||||
libtool
|
||||
ruby
|
||||
perl
|
||||
|
||||
# Libraries
|
||||
openssl
|
||||
zlib
|
||||
libxml2
|
||||
|
||||
# Development tools
|
||||
git
|
||||
curl
|
||||
wget
|
||||
unzip
|
||||
xz
|
||||
|
||||
# Linux-specific: gdb and Chromium deps for testing
|
||||
] ++ pkgs.lib.optionals pkgs.stdenv.isLinux [
|
||||
gdb
|
||||
# Chromium dependencies for Puppeteer tests
|
||||
xorg.libX11
|
||||
xorg.libxcb
|
||||
xorg.libXcomposite
|
||||
xorg.libXcursor
|
||||
xorg.libXdamage
|
||||
xorg.libXext
|
||||
xorg.libXfixes
|
||||
xorg.libXi
|
||||
xorg.libXrandr
|
||||
xorg.libXrender
|
||||
xorg.libXScrnSaver
|
||||
xorg.libXtst
|
||||
libxkbcommon
|
||||
mesa
|
||||
nspr
|
||||
nss
|
||||
cups
|
||||
dbus
|
||||
expat
|
||||
fontconfig
|
||||
freetype
|
||||
glib
|
||||
gtk3
|
||||
pango
|
||||
cairo
|
||||
alsa-lib
|
||||
at-spi2-atk
|
||||
at-spi2-core
|
||||
libgbm
|
||||
liberation_ttf
|
||||
atk
|
||||
libdrm
|
||||
xorg.libxshmfence
|
||||
gdk-pixbuf
|
||||
];
|
||||
|
||||
shellHook = ''
|
||||
export CC="${pkgs.lib.getExe pkgs.clang_19}"
|
||||
export CXX="${pkgs.lib.getExe' pkgs.clang_19 "clang++"}"
|
||||
export AR="${pkgs.llvm_19}/bin/llvm-ar"
|
||||
export RANLIB="${pkgs.llvm_19}/bin/llvm-ranlib"
|
||||
export CMAKE_C_COMPILER="$CC"
|
||||
export CMAKE_CXX_COMPILER="$CXX"
|
||||
export CMAKE_AR="$AR"
|
||||
export CMAKE_RANLIB="$RANLIB"
|
||||
export CMAKE_SYSTEM_PROCESSOR=$(uname -m)
|
||||
export TMPDIR=''${TMPDIR:-/tmp}
|
||||
'' + pkgs.lib.optionalString pkgs.stdenv.isLinux ''
|
||||
export LD="${pkgs.lib.getExe' pkgs.lld_19 "ld.lld"}"
|
||||
export NIX_CFLAGS_LINK="''${NIX_CFLAGS_LINK:+$NIX_CFLAGS_LINK }-fuse-ld=lld"
|
||||
export LD_LIBRARY_PATH="${pkgs.lib.makeLibraryPath packages}''${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH}"
|
||||
'' + ''
|
||||
|
||||
echo "====================================="
|
||||
echo "Bun Development Environment (Nix)"
|
||||
echo "====================================="
|
||||
echo "To build: bun bd"
|
||||
echo "To test: bun bd test <test-file>"
|
||||
echo "====================================="
|
||||
'';
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
CLAUDE.md
|
||||
@@ -1,11 +0,0 @@
|
||||
## Zig
|
||||
|
||||
Syntax reminders:
|
||||
|
||||
- Private fields are fully supported in Zig with the `#` prefix. `struct { #foo: u32 };` makes a struct with a private field named `#foo`.
|
||||
- Decl literals in Zig are recommended. `const decl: Decl = .{ .binding = 0, .value = 0 };`
|
||||
|
||||
Conventions:
|
||||
|
||||
- Prefer `@import` at the **bottom** of the file, but the auto formatter will move them so you don't need to worry about it.
|
||||
- You must be patient with the build.
|
||||
@@ -114,9 +114,6 @@ pub fn exit(code: u32) noreturn {
|
||||
bun.debug_allocator_data.backing = null;
|
||||
}
|
||||
|
||||
// Flush output before exiting to ensure all messages are visible
|
||||
Output.flush();
|
||||
|
||||
switch (Environment.os) {
|
||||
.mac => std.c.exit(@bitCast(code)),
|
||||
.windows => {
|
||||
|
||||
@@ -431,27 +431,6 @@ pub const StandaloneModuleGraph = struct {
|
||||
}
|
||||
};
|
||||
|
||||
if (comptime bun.Environment.is_canary or bun.Environment.isDebug) {
|
||||
if (bun.env_var.BUN_FEATURE_FLAG_DUMP_CODE.get()) |dump_code_dir| {
|
||||
const buf = bun.path_buffer_pool.get();
|
||||
defer bun.path_buffer_pool.put(buf);
|
||||
const dest_z = bun.path.joinAbsStringBufZ(dump_code_dir, buf, &.{dest_path}, .auto);
|
||||
|
||||
// Scoped block to handle dump failures without skipping module emission
|
||||
dump: {
|
||||
const file = bun.sys.File.makeOpen(dest_z, bun.O.WRONLY | bun.O.CREAT | bun.O.TRUNC, 0o664).unwrap() catch |err| {
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to open {s}: {s}", .{ dest_path, @errorName(err) });
|
||||
break :dump;
|
||||
};
|
||||
defer file.close();
|
||||
file.writeAll(output_file.value.buffer.bytes).unwrap() catch |err| {
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to write {s}: {s}", .{ dest_path, @errorName(err) });
|
||||
break :dump;
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var module = CompiledModuleGraphFile{
|
||||
.name = string_builder.fmtAppendCountZ("{s}{s}", .{
|
||||
prefix,
|
||||
@@ -525,58 +504,25 @@ pub const StandaloneModuleGraph = struct {
|
||||
|
||||
pub const CompileResult = union(enum) {
|
||||
success: void,
|
||||
error_message: []const u8,
|
||||
|
||||
err: Error,
|
||||
|
||||
const Error = union(enum) {
|
||||
message: []const u8,
|
||||
reason: Reason,
|
||||
|
||||
pub const Reason = enum {
|
||||
no_entry_point,
|
||||
no_output_files,
|
||||
|
||||
pub fn message(this: Reason) []const u8 {
|
||||
return switch (this) {
|
||||
.no_entry_point => "No entry point found for compilation",
|
||||
.no_output_files => "No output files to bundle",
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub fn slice(this: *const Error) []const u8 {
|
||||
return switch (this.*) {
|
||||
.message => this.message,
|
||||
.reason => this.reason.message(),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub fn fail(reason: Error.Reason) CompileResult {
|
||||
return .{ .err = .{ .reason = reason } };
|
||||
}
|
||||
|
||||
pub fn failFmt(comptime fmt: []const u8, args: anytype) CompileResult {
|
||||
return .{ .err = .{ .message = bun.handleOom(std.fmt.allocPrint(bun.default_allocator, fmt, args)) } };
|
||||
pub fn fail(msg: []const u8) CompileResult {
|
||||
return .{ .error_message = msg };
|
||||
}
|
||||
|
||||
pub fn deinit(this: *const @This()) void {
|
||||
switch (this.*) {
|
||||
.success => {},
|
||||
.err => switch (this.err) {
|
||||
.message => bun.default_allocator.free(this.err.message),
|
||||
.reason => {},
|
||||
},
|
||||
if (this.* == .error_message) {
|
||||
bun.default_allocator.free(this.error_message);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub fn inject(bytes: []const u8, self_exe: [:0]const u8, inject_options: InjectOptions, target: *const CompileTarget) bun.FileDescriptor {
|
||||
var buf: bun.PathBuffer = undefined;
|
||||
var zname: [:0]const u8 = bun.fs.FileSystem.tmpname("bun-build", &buf, @as(u64, @bitCast(std.time.milliTimestamp()))) catch |err| {
|
||||
var zname: [:0]const u8 = bun.span(bun.fs.FileSystem.instance.tmpname("bun-build", &buf, @as(u64, @bitCast(std.time.milliTimestamp()))) catch |err| {
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to get temporary file name: {s}", .{@errorName(err)});
|
||||
return bun.invalid_fd;
|
||||
};
|
||||
});
|
||||
|
||||
const cleanup = struct {
|
||||
pub fn toClean(name: [:0]const u8, fd: bun.FileDescriptor) void {
|
||||
@@ -984,9 +930,9 @@ pub const StandaloneModuleGraph = struct {
|
||||
self_exe_path: ?[]const u8,
|
||||
) !CompileResult {
|
||||
const bytes = toBytes(allocator, module_prefix, output_files, output_format, compile_exec_argv) catch |err| {
|
||||
return CompileResult.failFmt("failed to generate module graph bytes: {s}", .{@errorName(err)});
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "failed to generate module graph bytes: {s}", .{@errorName(err)}) catch "failed to generate module graph bytes");
|
||||
};
|
||||
if (bytes.len == 0) return CompileResult.fail(.no_output_files);
|
||||
if (bytes.len == 0) return CompileResult.fail("no output files to bundle");
|
||||
defer allocator.free(bytes);
|
||||
|
||||
var free_self_exe = false;
|
||||
@@ -995,26 +941,28 @@ pub const StandaloneModuleGraph = struct {
|
||||
break :brk bun.handleOom(allocator.dupeZ(u8, path));
|
||||
} else if (target.isDefault())
|
||||
bun.selfExePath() catch |err| {
|
||||
return CompileResult.failFmt("failed to get self executable path: {s}", .{@errorName(err)});
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "failed to get self executable path: {s}", .{@errorName(err)}) catch "failed to get self executable path");
|
||||
}
|
||||
else blk: {
|
||||
var exe_path_buf: bun.PathBuffer = undefined;
|
||||
const version_str = bun.handleOom(std.fmt.allocPrintZ(allocator, "{}", .{target}));
|
||||
defer allocator.free(version_str);
|
||||
|
||||
var version_str_buf: [1024]u8 = undefined;
|
||||
const version_str = std.fmt.bufPrintZ(&version_str_buf, "{}", .{target}) catch {
|
||||
return CompileResult.fail("failed to format target version string");
|
||||
};
|
||||
var needs_download: bool = true;
|
||||
const dest_z = target.exePath(&exe_path_buf, version_str, env, &needs_download);
|
||||
|
||||
if (needs_download) {
|
||||
target.downloadToPath(env, allocator, dest_z) catch |err| {
|
||||
return switch (err) {
|
||||
error.TargetNotFound => CompileResult.failFmt("Target platform '{}' is not available for download. Check if this version of Bun supports this target.", .{target}),
|
||||
error.NetworkError => CompileResult.failFmt("Network error downloading executable for '{}'. Check your internet connection and proxy settings.", .{target}),
|
||||
error.InvalidResponse => CompileResult.failFmt("Downloaded file for '{}' appears to be corrupted. Please try again.", .{target}),
|
||||
error.ExtractionFailed => CompileResult.failFmt("Failed to extract executable for '{}'. The download may be incomplete.", .{target}),
|
||||
error.UnsupportedTarget => CompileResult.failFmt("Target '{}' is not supported", .{target}),
|
||||
else => CompileResult.failFmt("Failed to download '{}': {s}", .{ target, @errorName(err) }),
|
||||
const msg = switch (err) {
|
||||
error.TargetNotFound => std.fmt.allocPrint(allocator, "Target platform '{}' is not available for download. Check if this version of Bun supports this target.", .{target}) catch "Target platform not available for download",
|
||||
error.NetworkError => std.fmt.allocPrint(allocator, "Network error downloading executable for '{}'. Check your internet connection and proxy settings.", .{target}) catch "Network error downloading executable",
|
||||
error.InvalidResponse => std.fmt.allocPrint(allocator, "Downloaded file for '{}' appears to be corrupted. Please try again.", .{target}) catch "Downloaded file is corrupted",
|
||||
error.ExtractionFailed => std.fmt.allocPrint(allocator, "Failed to extract executable for '{}'. The download may be incomplete.", .{target}) catch "Failed to extract downloaded executable",
|
||||
error.UnsupportedTarget => std.fmt.allocPrint(allocator, "Target '{}' is not supported", .{target}) catch "Unsupported target",
|
||||
else => std.fmt.allocPrint(allocator, "Failed to download '{}': {s}", .{ target, @errorName(err) }) catch "Download failed",
|
||||
};
|
||||
return CompileResult.fail(msg);
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1044,7 +992,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
// Get the current path of the temp file
|
||||
var temp_buf: bun.PathBuffer = undefined;
|
||||
const temp_path = bun.getFdPath(fd, &temp_buf) catch |err| {
|
||||
return CompileResult.failFmt("Failed to get temp file path: {s}", .{@errorName(err)});
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "Failed to get temp file path: {s}", .{@errorName(err)}) catch "Failed to get temp file path");
|
||||
};
|
||||
|
||||
// Build the absolute destination path
|
||||
@@ -1052,7 +1000,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
// Get the current working directory and join with outfile
|
||||
var cwd_buf: bun.PathBuffer = undefined;
|
||||
const cwd_path = bun.getcwd(&cwd_buf) catch |err| {
|
||||
return CompileResult.failFmt("Failed to get current directory: {s}", .{@errorName(err)});
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "Failed to get current directory: {s}", .{@errorName(err)}) catch "Failed to get current directory");
|
||||
};
|
||||
const dest_path = if (std.fs.path.isAbsolute(outfile))
|
||||
outfile
|
||||
@@ -1080,12 +1028,12 @@ pub const StandaloneModuleGraph = struct {
|
||||
const err = bun.windows.Win32Error.get();
|
||||
if (err.toSystemErrno()) |sys_err| {
|
||||
if (sys_err == .EISDIR) {
|
||||
return CompileResult.failFmt("{s} is a directory. Please choose a different --outfile or delete the directory", .{outfile});
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "{s} is a directory. Please choose a different --outfile or delete the directory", .{outfile}) catch "outfile is a directory");
|
||||
} else {
|
||||
return CompileResult.failFmt("failed to move executable to {s}: {s}", .{ dest_path, @tagName(sys_err) });
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "failed to move executable to {s}: {s}", .{ dest_path, @tagName(sys_err) }) catch "failed to move executable");
|
||||
}
|
||||
} else {
|
||||
return CompileResult.failFmt("failed to move executable to {s}", .{dest_path});
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "failed to move executable to {s}", .{dest_path}) catch "failed to move executable");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1107,7 +1055,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
windows_options.description,
|
||||
windows_options.copyright,
|
||||
) catch |err| {
|
||||
return CompileResult.failFmt("Failed to set Windows metadata: {s}", .{@errorName(err)});
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "Failed to set Windows metadata: {s}", .{@errorName(err)}) catch "Failed to set Windows metadata");
|
||||
};
|
||||
}
|
||||
return .success;
|
||||
@@ -1115,14 +1063,14 @@ pub const StandaloneModuleGraph = struct {
|
||||
|
||||
var buf: bun.PathBuffer = undefined;
|
||||
const temp_location = bun.getFdPath(fd, &buf) catch |err| {
|
||||
return CompileResult.failFmt("failed to get path for fd: {s}", .{@errorName(err)});
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "failed to get path for fd: {s}", .{@errorName(err)}) catch "failed to get path for file descriptor");
|
||||
};
|
||||
const temp_posix = std.posix.toPosixPath(temp_location) catch |err| {
|
||||
return CompileResult.failFmt("path too long: {s}", .{@errorName(err)});
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "path too long: {s}", .{@errorName(err)}) catch "path too long");
|
||||
};
|
||||
const outfile_basename = std.fs.path.basename(outfile);
|
||||
const outfile_posix = std.posix.toPosixPath(outfile_basename) catch |err| {
|
||||
return CompileResult.failFmt("outfile name too long: {s}", .{@errorName(err)});
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "outfile name too long: {s}", .{@errorName(err)}) catch "outfile name too long");
|
||||
};
|
||||
|
||||
bun.sys.moveFileZWithHandle(
|
||||
@@ -1138,9 +1086,9 @@ pub const StandaloneModuleGraph = struct {
|
||||
_ = Syscall.unlink(&temp_posix);
|
||||
|
||||
if (err == error.IsDir or err == error.EISDIR) {
|
||||
return CompileResult.failFmt("{s} is a directory. Please choose a different --outfile or delete the directory", .{outfile});
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "{s} is a directory. Please choose a different --outfile or delete the directory", .{outfile}) catch "outfile is a directory");
|
||||
} else {
|
||||
return CompileResult.failFmt("failed to rename {s} to {s}: {s}", .{ temp_location, outfile, @errorName(err) });
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "failed to rename {s} to {s}: {s}", .{ temp_location, outfile, @errorName(err) }) catch "failed to rename file");
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1328,7 +1276,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
var whichbuf: bun.PathBuffer = undefined;
|
||||
if (bun.which(
|
||||
&whichbuf,
|
||||
bun.env_var.PATH.get() orelse return error.FileNotFound,
|
||||
bun.getenvZ("PATH") orelse return error.FileNotFound,
|
||||
"",
|
||||
bun.argv[0],
|
||||
)) |path| {
|
||||
@@ -1551,7 +1499,7 @@ const w = std.os.windows;
|
||||
const bun = @import("bun");
|
||||
const Environment = bun.Environment;
|
||||
const Output = bun.Output;
|
||||
const SourceMap = bun.SourceMap;
|
||||
const SourceMap = bun.sourcemap;
|
||||
const StringPointer = bun.StringPointer;
|
||||
const Syscall = bun.sys;
|
||||
const macho = bun.macho;
|
||||
|
||||
158
src/Watcher.zig
158
src/Watcher.zig
@@ -95,18 +95,9 @@ pub fn init(comptime T: type, ctx: *T, fs: *bun.fs.FileSystem, allocator: std.me
|
||||
|
||||
try Platform.init(&watcher.platform, fs.top_level_dir);
|
||||
|
||||
// Initialize trace file if BUN_WATCHER_TRACE env var is set
|
||||
WatcherTrace.init();
|
||||
|
||||
return watcher;
|
||||
}
|
||||
|
||||
/// Write trace events to the trace file if enabled.
|
||||
/// This runs on the watcher thread, so no locking is needed.
|
||||
pub fn writeTraceEvents(this: *Watcher, events: []WatchEvent, changed_files: []?[:0]u8) void {
|
||||
WatcherTrace.writeEvents(this, events, changed_files);
|
||||
}
|
||||
|
||||
pub fn start(this: *Watcher) !void {
|
||||
bun.assert(this.watchloop_handle == null);
|
||||
this.thread = try std.Thread.spawn(.{}, threadMain, .{this});
|
||||
@@ -253,9 +244,6 @@ fn threadMain(this: *Watcher) !void {
|
||||
}
|
||||
this.watchlist.deinit(this.allocator);
|
||||
|
||||
// Close trace file if open
|
||||
WatcherTrace.deinit();
|
||||
|
||||
const allocator = this.allocator;
|
||||
allocator.destroy(this);
|
||||
}
|
||||
@@ -312,48 +300,6 @@ fn watchLoop(this: *Watcher) bun.sys.Maybe(void) {
|
||||
return .success;
|
||||
}
|
||||
|
||||
/// Register a file descriptor with kqueue on macOS without validation.
|
||||
///
|
||||
/// Preconditions (caller must ensure):
|
||||
/// - `fd` is a valid, open file descriptor
|
||||
/// - `fd` is not already registered with this kqueue
|
||||
/// - `watchlist_id` matches the entry's index in the watchlist
|
||||
///
|
||||
/// Note: This function does not propagate kevent registration errors.
|
||||
/// If registration fails, the file will not be watched but no error is returned.
|
||||
pub fn addFileDescriptorToKQueueWithoutChecks(this: *Watcher, fd: bun.FileDescriptor, watchlist_id: usize) void {
|
||||
const KEvent = std.c.Kevent;
|
||||
|
||||
// https://developer.apple.com/library/archive/documentation/System/Conceptual/ManPages_iPhoneOS/man2/kqueue.2.html
|
||||
var event = std.mem.zeroes(KEvent);
|
||||
|
||||
event.flags = std.c.EV.ADD | std.c.EV.CLEAR | std.c.EV.ENABLE;
|
||||
// we want to know about the vnode
|
||||
event.filter = std.c.EVFILT.VNODE;
|
||||
|
||||
event.fflags = std.c.NOTE.WRITE | std.c.NOTE.RENAME | std.c.NOTE.DELETE;
|
||||
|
||||
// id
|
||||
event.ident = @intCast(fd.native());
|
||||
|
||||
// Store the index for fast filtering later
|
||||
event.udata = @as(usize, @intCast(watchlist_id));
|
||||
var events: [1]KEvent = .{event};
|
||||
|
||||
// This took a lot of work to figure out the right permutation
|
||||
// Basically:
|
||||
// - We register the event here.
|
||||
// our while(true) loop above receives notification of changes to any of the events created here.
|
||||
_ = std.posix.system.kevent(
|
||||
this.platform.fd.unwrap().?.native(),
|
||||
@as([]KEvent, events[0..1]).ptr,
|
||||
1,
|
||||
@as([]KEvent, events[0..1]).ptr,
|
||||
0,
|
||||
null,
|
||||
);
|
||||
}
|
||||
|
||||
fn appendFileAssumeCapacity(
|
||||
this: *Watcher,
|
||||
fd: bun.FileDescriptor,
|
||||
@@ -392,7 +338,36 @@ fn appendFileAssumeCapacity(
|
||||
};
|
||||
|
||||
if (comptime Environment.isMac) {
|
||||
this.addFileDescriptorToKQueueWithoutChecks(fd, watchlist_id);
|
||||
const KEvent = std.c.Kevent;
|
||||
|
||||
// https://developer.apple.com/library/archive/documentation/System/Conceptual/ManPages_iPhoneOS/man2/kqueue.2.html
|
||||
var event = std.mem.zeroes(KEvent);
|
||||
|
||||
event.flags = std.c.EV.ADD | std.c.EV.CLEAR | std.c.EV.ENABLE;
|
||||
// we want to know about the vnode
|
||||
event.filter = std.c.EVFILT.VNODE;
|
||||
|
||||
event.fflags = std.c.NOTE.WRITE | std.c.NOTE.RENAME | std.c.NOTE.DELETE;
|
||||
|
||||
// id
|
||||
event.ident = @intCast(fd.native());
|
||||
|
||||
// Store the hash for fast filtering later
|
||||
event.udata = @as(usize, @intCast(watchlist_id));
|
||||
var events: [1]KEvent = .{event};
|
||||
|
||||
// This took a lot of work to figure out the right permutation
|
||||
// Basically:
|
||||
// - We register the event here.
|
||||
// our while(true) loop above receives notification of changes to any of the events created here.
|
||||
_ = std.posix.system.kevent(
|
||||
this.platform.fd.unwrap().?.native(),
|
||||
@as([]KEvent, events[0..1]).ptr,
|
||||
1,
|
||||
@as([]KEvent, events[0..1]).ptr,
|
||||
0,
|
||||
null,
|
||||
);
|
||||
} else if (comptime Environment.isLinux) {
|
||||
// var file_path_to_use_ = std.mem.trimRight(u8, file_path_, "/");
|
||||
// var buf: [bun.MAX_PATH_BYTES+1]u8 = undefined;
|
||||
@@ -625,78 +600,6 @@ pub fn addDirectory(
|
||||
return this.appendDirectoryAssumeCapacity(fd, file_path, hash, clone_file_path);
|
||||
}
|
||||
|
||||
/// Lazily watch a file by path (slow path).
|
||||
///
|
||||
/// This function is used when a file needs to be watched but was not
|
||||
/// encountered during the normal import graph traversal. On macOS, it
|
||||
/// opens a file descriptor with O_EVTONLY to obtain an inode reference.
|
||||
///
|
||||
/// Thread-safe: uses internal locking to prevent race conditions.
|
||||
///
|
||||
/// Returns:
|
||||
/// - true if the file is successfully added to the watchlist or already watched
|
||||
/// - false if the file cannot be opened or added to the watchlist
|
||||
pub fn addFileByPathSlow(
|
||||
this: *Watcher,
|
||||
file_path: string,
|
||||
loader: options.Loader,
|
||||
) bool {
|
||||
if (file_path.len == 0) return false;
|
||||
const hash = getHash(file_path);
|
||||
|
||||
// Check if already watched (with lock to avoid race with removal)
|
||||
{
|
||||
this.mutex.lock();
|
||||
const already_watched = this.indexOf(hash) != null;
|
||||
this.mutex.unlock();
|
||||
|
||||
if (already_watched) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Only open fd if we might need it
|
||||
var fd: bun.FileDescriptor = bun.invalid_fd;
|
||||
if (Environment.isMac) {
|
||||
const path_z = std.posix.toPosixPath(file_path) catch return false;
|
||||
switch (bun.sys.open(&path_z, bun.c.O_EVTONLY, 0)) {
|
||||
.result => |opened| fd = opened,
|
||||
.err => return false,
|
||||
}
|
||||
}
|
||||
|
||||
const res = this.addFile(fd, file_path, hash, loader, bun.invalid_fd, null, true);
|
||||
switch (res) {
|
||||
.result => {
|
||||
// On macOS, addFile may have found the file already watched (race)
|
||||
// and returned success without using our fd. Close it if unused.
|
||||
if ((comptime Environment.isMac) and fd.isValid()) {
|
||||
this.mutex.lock();
|
||||
const maybe_idx = this.indexOf(hash);
|
||||
const stored_fd = if (maybe_idx) |idx|
|
||||
this.watchlist.items(.fd)[idx]
|
||||
else
|
||||
bun.invalid_fd;
|
||||
this.mutex.unlock();
|
||||
|
||||
// Only close if entry exists and stored fd differs from ours.
|
||||
// Race scenarios:
|
||||
// 1. Entry removed (maybe_idx == null): our fd was stored then closed by flushEvictions → don't close
|
||||
// 2. Entry exists with different fd: another thread added entry, addFile didn't use our fd → close ours
|
||||
// 3. Entry exists with same fd: our fd was stored → don't close
|
||||
if (maybe_idx != null and stored_fd.native() != fd.native()) {
|
||||
fd.close();
|
||||
}
|
||||
}
|
||||
return true;
|
||||
},
|
||||
.err => {
|
||||
if (fd.isValid()) fd.close();
|
||||
return false;
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn addFile(
|
||||
this: *Watcher,
|
||||
fd: bun.FileDescriptor,
|
||||
@@ -773,7 +676,6 @@ pub fn onMaybeWatchDirectory(watch: *Watcher, file_path: string, dir_fd: bun.Sto
|
||||
|
||||
const string = []const u8;
|
||||
|
||||
const WatcherTrace = @import("./watcher/WatcherTrace.zig");
|
||||
const WindowsWatcher = @import("./watcher/WindowsWatcher.zig");
|
||||
const options = @import("./options.zig");
|
||||
const std = @import("std");
|
||||
|
||||
@@ -361,7 +361,7 @@ pub fn BSSStringList(comptime _count: usize, comptime _item_length: usize) type
|
||||
return instance;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Self) void {
|
||||
pub fn deinit(self: *const Self) void {
|
||||
_ = self;
|
||||
bun.default_allocator.destroy(instance);
|
||||
loaded = false;
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
///
|
||||
/// ```
|
||||
/// // Either owned by the default allocator, or borrowed
|
||||
/// const MaybeOwnedFoo = bun.ptr.OwnedIn(*Foo, bun.allocators.MaybeOwned(bun.DefaultAllocator));
|
||||
/// const MaybeOwnedFoo = bun.ptr.Owned(*Foo, bun.allocators.MaybeOwned(bun.DefaultAllocator));
|
||||
///
|
||||
/// var owned_foo: MaybeOwnedFoo = .new(makeFoo());
|
||||
/// var borrowed_foo: MaybeOwnedFoo = .fromRawIn(some_foo_ptr, .initBorrowed());
|
||||
|
||||
@@ -216,7 +216,8 @@ pub extern fn mi_new_reallocn(p: ?*anyopaque, newcount: usize, size: usize) ?*an
|
||||
pub const MI_SMALL_WSIZE_MAX = @as(c_int, 128);
|
||||
pub const MI_SMALL_SIZE_MAX = MI_SMALL_WSIZE_MAX * @import("std").zig.c_translation.sizeof(?*anyopaque);
|
||||
pub const MI_ALIGNMENT_MAX = (@as(c_int, 16) * @as(c_int, 1024)) * @as(c_ulong, 1024);
|
||||
pub const MI_MAX_ALIGN_SIZE = 16;
|
||||
|
||||
const MI_MAX_ALIGN_SIZE = 16;
|
||||
|
||||
pub fn mustUseAlignedAlloc(alignment: std.mem.Alignment) bool {
|
||||
return alignment.toByteUnits() > MI_MAX_ALIGN_SIZE;
|
||||
|
||||
@@ -12,10 +12,12 @@ pub fn isEnabled() bool {
|
||||
.no => false,
|
||||
.unknown => {
|
||||
enabled = detect: {
|
||||
if (bun.env_var.DO_NOT_TRACK.get()) {
|
||||
break :detect .no;
|
||||
if (bun.getenvZ("DO_NOT_TRACK")) |x| {
|
||||
if (x.len == 1 and x[0] == '1') {
|
||||
break :detect .no;
|
||||
}
|
||||
}
|
||||
if (bun.env_var.HYPERFINE_RANDOMIZED_ENVIRONMENT_OFFSET.get() != null) {
|
||||
if (bun.getenvZ("HYPERFINE_RANDOMIZED_ENVIRONMENT_OFFSET") != null) {
|
||||
break :detect .no;
|
||||
}
|
||||
break :detect .yes;
|
||||
@@ -26,6 +28,31 @@ pub fn isEnabled() bool {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn isCI() bool {
|
||||
return switch (is_ci) {
|
||||
.yes => true,
|
||||
.no => false,
|
||||
.unknown => {
|
||||
is_ci = detect: {
|
||||
inline for (.{
|
||||
"CI",
|
||||
"TDDIUM",
|
||||
"GITHUB_ACTIONS",
|
||||
"JENKINS_URL",
|
||||
"bamboo.buildKey",
|
||||
}) |key| {
|
||||
if (bun.getenvZ(key) != null) {
|
||||
break :detect .yes;
|
||||
}
|
||||
}
|
||||
break :detect .no;
|
||||
};
|
||||
bun.assert(is_ci == .yes or is_ci == .no);
|
||||
return is_ci == .yes;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/// This answers, "What parts of bun are people actually using?"
|
||||
pub const Features = struct {
|
||||
pub var builtin_modules = std.enums.EnumSet(bun.jsc.ModuleLoader.HardcodedModule).initEmpty();
|
||||
|
||||
@@ -321,9 +321,8 @@ pub const ByteWriter = Writer(*std.io.FixedBufferStream([]u8));
|
||||
pub const FileWriter = Writer(std.fs.File);
|
||||
|
||||
pub const api = struct {
|
||||
// these are in sync with BunLoaderType in headers-handwritten.h
|
||||
pub const Loader = enum(u8) {
|
||||
_none = 254,
|
||||
_none = 255,
|
||||
jsx = 1,
|
||||
js = 2,
|
||||
ts = 3,
|
||||
@@ -2335,6 +2334,9 @@ pub const api = struct {
|
||||
/// line_text
|
||||
line_text: []const u8,
|
||||
|
||||
/// suggestion
|
||||
suggestion: []const u8,
|
||||
|
||||
/// offset
|
||||
offset: u32 = 0,
|
||||
|
||||
@@ -2346,6 +2348,7 @@ pub const api = struct {
|
||||
this.line = try reader.readValue(i32);
|
||||
this.column = try reader.readValue(i32);
|
||||
this.line_text = try reader.readValue([]const u8);
|
||||
this.suggestion = try reader.readValue([]const u8);
|
||||
this.offset = try reader.readValue(u32);
|
||||
return this;
|
||||
}
|
||||
@@ -2356,6 +2359,7 @@ pub const api = struct {
|
||||
try writer.writeInt(this.line);
|
||||
try writer.writeInt(this.column);
|
||||
try writer.writeValue(@TypeOf(this.line_text), this.line_text);
|
||||
try writer.writeValue(@TypeOf(this.suggestion), this.suggestion);
|
||||
try writer.writeInt(this.offset);
|
||||
}
|
||||
};
|
||||
@@ -2820,18 +2824,14 @@ pub const api = struct {
|
||||
/// token
|
||||
token: []const u8,
|
||||
|
||||
/// email
|
||||
email: []const u8,
|
||||
|
||||
pub fn dupe(this: NpmRegistry, allocator: std.mem.Allocator) NpmRegistry {
|
||||
const buf = bun.handleOom(allocator.alloc(u8, this.url.len + this.username.len + this.password.len + this.token.len + this.email.len));
|
||||
const buf = bun.handleOom(allocator.alloc(u8, this.url.len + this.username.len + this.password.len + this.token.len));
|
||||
|
||||
var out: NpmRegistry = .{
|
||||
.url = "",
|
||||
.username = "",
|
||||
.password = "",
|
||||
.token = "",
|
||||
.email = "",
|
||||
};
|
||||
|
||||
var i: usize = 0;
|
||||
@@ -2852,7 +2852,6 @@ pub const api = struct {
|
||||
this.username = try reader.readValue([]const u8);
|
||||
this.password = try reader.readValue([]const u8);
|
||||
this.token = try reader.readValue([]const u8);
|
||||
this.email = try reader.readValue([]const u8);
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -2861,7 +2860,6 @@ pub const api = struct {
|
||||
try writer.writeValue(@TypeOf(this.username), this.username);
|
||||
try writer.writeValue(@TypeOf(this.password), this.password);
|
||||
try writer.writeValue(@TypeOf(this.token), this.token);
|
||||
try writer.writeValue(@TypeOf(this.email), this.email);
|
||||
}
|
||||
|
||||
pub const Parser = struct {
|
||||
@@ -3054,11 +3052,173 @@ pub const api = struct {
|
||||
|
||||
security_scanner: ?[]const u8 = null,
|
||||
|
||||
minimum_release_age_ms: ?f64 = null,
|
||||
minimum_release_age_excludes: ?[]const []const u8 = null,
|
||||
pub fn decode(reader: anytype) anyerror!BunInstall {
|
||||
var this = std.mem.zeroes(BunInstall);
|
||||
|
||||
public_hoist_pattern: ?install.PnpmMatcher = null,
|
||||
hoist_pattern: ?install.PnpmMatcher = null,
|
||||
while (true) {
|
||||
switch (try reader.readByte()) {
|
||||
0 => {
|
||||
return this;
|
||||
},
|
||||
|
||||
1 => {
|
||||
this.default_registry = try reader.readValue(NpmRegistry);
|
||||
},
|
||||
2 => {
|
||||
this.scoped = try reader.readValue(NpmRegistryMap);
|
||||
},
|
||||
3 => {
|
||||
this.lockfile_path = try reader.readValue([]const u8);
|
||||
},
|
||||
4 => {
|
||||
this.save_lockfile_path = try reader.readValue([]const u8);
|
||||
},
|
||||
5 => {
|
||||
this.cache_directory = try reader.readValue([]const u8);
|
||||
},
|
||||
6 => {
|
||||
this.dry_run = try reader.readValue(bool);
|
||||
},
|
||||
7 => {
|
||||
this.force = try reader.readValue(bool);
|
||||
},
|
||||
8 => {
|
||||
this.save_dev = try reader.readValue(bool);
|
||||
},
|
||||
9 => {
|
||||
this.save_optional = try reader.readValue(bool);
|
||||
},
|
||||
10 => {
|
||||
this.save_peer = try reader.readValue(bool);
|
||||
},
|
||||
11 => {
|
||||
this.save_lockfile = try reader.readValue(bool);
|
||||
},
|
||||
12 => {
|
||||
this.production = try reader.readValue(bool);
|
||||
},
|
||||
13 => {
|
||||
this.save_yarn_lockfile = try reader.readValue(bool);
|
||||
},
|
||||
14 => {
|
||||
this.native_bin_links = try reader.readArray([]const u8);
|
||||
},
|
||||
15 => {
|
||||
this.disable_cache = try reader.readValue(bool);
|
||||
},
|
||||
16 => {
|
||||
this.disable_manifest_cache = try reader.readValue(bool);
|
||||
},
|
||||
17 => {
|
||||
this.global_dir = try reader.readValue([]const u8);
|
||||
},
|
||||
18 => {
|
||||
this.global_bin_dir = try reader.readValue([]const u8);
|
||||
},
|
||||
19 => {
|
||||
this.frozen_lockfile = try reader.readValue(bool);
|
||||
},
|
||||
20 => {
|
||||
this.exact = try reader.readValue(bool);
|
||||
},
|
||||
21 => {
|
||||
this.concurrent_scripts = try reader.readValue(u32);
|
||||
},
|
||||
else => {
|
||||
return error.InvalidMessage;
|
||||
},
|
||||
}
|
||||
}
|
||||
unreachable;
|
||||
}
|
||||
|
||||
pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
|
||||
if (this.default_registry) |default_registry| {
|
||||
try writer.writeFieldID(1);
|
||||
try writer.writeValue(@TypeOf(default_registry), default_registry);
|
||||
}
|
||||
if (this.scoped) |scoped| {
|
||||
try writer.writeFieldID(2);
|
||||
try writer.writeValue(@TypeOf(scoped), scoped);
|
||||
}
|
||||
if (this.lockfile_path) |lockfile_path| {
|
||||
try writer.writeFieldID(3);
|
||||
try writer.writeValue(@TypeOf(lockfile_path), lockfile_path);
|
||||
}
|
||||
if (this.save_lockfile_path) |save_lockfile_path| {
|
||||
try writer.writeFieldID(4);
|
||||
try writer.writeValue(@TypeOf(save_lockfile_path), save_lockfile_path);
|
||||
}
|
||||
if (this.cache_directory) |cache_directory| {
|
||||
try writer.writeFieldID(5);
|
||||
try writer.writeValue(@TypeOf(cache_directory), cache_directory);
|
||||
}
|
||||
if (this.dry_run) |dry_run| {
|
||||
try writer.writeFieldID(6);
|
||||
try writer.writeInt(@as(u8, @intFromBool(dry_run)));
|
||||
}
|
||||
if (this.force) |force| {
|
||||
try writer.writeFieldID(7);
|
||||
try writer.writeInt(@as(u8, @intFromBool(force)));
|
||||
}
|
||||
if (this.save_dev) |save_dev| {
|
||||
try writer.writeFieldID(8);
|
||||
try writer.writeInt(@as(u8, @intFromBool(save_dev)));
|
||||
}
|
||||
if (this.save_optional) |save_optional| {
|
||||
try writer.writeFieldID(9);
|
||||
try writer.writeInt(@as(u8, @intFromBool(save_optional)));
|
||||
}
|
||||
if (this.save_peer) |save_peer| {
|
||||
try writer.writeFieldID(10);
|
||||
try writer.writeInt(@as(u8, @intFromBool(save_peer)));
|
||||
}
|
||||
if (this.save_lockfile) |save_lockfile| {
|
||||
try writer.writeFieldID(11);
|
||||
try writer.writeInt(@as(u8, @intFromBool(save_lockfile)));
|
||||
}
|
||||
if (this.production) |production| {
|
||||
try writer.writeFieldID(12);
|
||||
try writer.writeInt(@as(u8, @intFromBool(production)));
|
||||
}
|
||||
if (this.save_yarn_lockfile) |save_yarn_lockfile| {
|
||||
try writer.writeFieldID(13);
|
||||
try writer.writeInt(@as(u8, @intFromBool(save_yarn_lockfile)));
|
||||
}
|
||||
if (this.native_bin_links) |native_bin_links| {
|
||||
try writer.writeFieldID(14);
|
||||
try writer.writeArray([]const u8, native_bin_links);
|
||||
}
|
||||
if (this.disable_cache) |disable_cache| {
|
||||
try writer.writeFieldID(15);
|
||||
try writer.writeInt(@as(u8, @intFromBool(disable_cache)));
|
||||
}
|
||||
if (this.disable_manifest_cache) |disable_manifest_cache| {
|
||||
try writer.writeFieldID(16);
|
||||
try writer.writeInt(@as(u8, @intFromBool(disable_manifest_cache)));
|
||||
}
|
||||
if (this.global_dir) |global_dir| {
|
||||
try writer.writeFieldID(17);
|
||||
try writer.writeValue(@TypeOf(global_dir), global_dir);
|
||||
}
|
||||
if (this.global_bin_dir) |global_bin_dir| {
|
||||
try writer.writeFieldID(18);
|
||||
try writer.writeValue(@TypeOf(global_bin_dir), global_bin_dir);
|
||||
}
|
||||
if (this.frozen_lockfile) |frozen_lockfile| {
|
||||
try writer.writeFieldID(19);
|
||||
try writer.writeInt(@as(u8, @intFromBool(frozen_lockfile)));
|
||||
}
|
||||
if (this.exact) |exact| {
|
||||
try writer.writeFieldID(20);
|
||||
try writer.writeInt(@as(u8, @intFromBool(exact)));
|
||||
}
|
||||
if (this.concurrent_scripts) |concurrent_scripts| {
|
||||
try writer.writeFieldID(21);
|
||||
try writer.writeInt(concurrent_scripts);
|
||||
}
|
||||
try writer.endMessage();
|
||||
}
|
||||
};
|
||||
|
||||
pub const ClientServerModule = struct {
|
||||
@@ -3221,5 +3381,4 @@ const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const OOM = bun.OOM;
|
||||
const install = bun.install;
|
||||
const js_ast = bun.ast;
|
||||
|
||||
13
src/ast.zig
13
src/ast.zig
@@ -345,18 +345,6 @@ pub const ExportsKind = enum {
|
||||
pub fn jsonStringify(self: @This(), writer: anytype) !void {
|
||||
return try writer.write(@tagName(self));
|
||||
}
|
||||
|
||||
pub fn toModuleType(self: @This()) bun.options.ModuleType {
|
||||
return switch (self) {
|
||||
.none => .unknown,
|
||||
.cjs => .cjs,
|
||||
|
||||
.esm_with_dynamic_fallback,
|
||||
.esm_with_dynamic_fallback_from_cjs,
|
||||
.esm,
|
||||
=> .esm,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const DeclaredSymbol = struct {
|
||||
@@ -622,7 +610,6 @@ pub const ToJSError = error{
|
||||
MacroError,
|
||||
OutOfMemory,
|
||||
JSError,
|
||||
JSTerminated,
|
||||
};
|
||||
|
||||
/// Say you need to allocate a bunch of tiny arrays
|
||||
|
||||
@@ -1100,30 +1100,11 @@ pub const String = struct {
|
||||
}
|
||||
|
||||
pub fn eqlComptime(s: *const String, comptime value: []const u8) bool {
|
||||
if (!s.isUTF8()) {
|
||||
bun.assertf(s.next == null, "transpiler: utf-16 string is a rope", .{}); // utf-16 strings are not ropes
|
||||
return strings.eqlComptimeUTF16(s.slice16(), value);
|
||||
}
|
||||
if (s.next == null) {
|
||||
// latin-1 or utf-8, non-rope
|
||||
return strings.eqlComptime(s.data, value);
|
||||
}
|
||||
|
||||
// latin-1 or utf-8, rope
|
||||
return eql8Rope(s, value);
|
||||
}
|
||||
fn eql8Rope(s: *const String, value: []const u8) bool {
|
||||
bun.assertf(s.next != null and s.isUTF8(), "transpiler: bad call to eql8Rope", .{});
|
||||
if (s.rope_len != value.len) return false;
|
||||
var i: usize = 0;
|
||||
var next: ?*const String = s;
|
||||
while (next) |current| : (next = current.next) {
|
||||
if (!strings.eqlLong(current.data, value[i..][0..current.data.len], false)) return false;
|
||||
i += current.data.len;
|
||||
}
|
||||
bun.assertf(i == value.len, "transpiler: rope string length mismatch 1", .{});
|
||||
bun.assertf(i == s.rope_len, "transpiler: rope string length mismatch 2", .{});
|
||||
return true;
|
||||
bun.assert(s.next == null);
|
||||
return if (s.isUTF8())
|
||||
strings.eqlComptime(s.data, value)
|
||||
else
|
||||
strings.eqlComptimeUTF16(s.slice16(), value);
|
||||
}
|
||||
|
||||
pub fn hasPrefixComptime(s: *const String, comptime value: anytype) bool {
|
||||
|
||||
@@ -51,7 +51,7 @@ pub const MacroContext = struct {
|
||||
bun.assert(!isMacroPath(import_record_path_without_macro_prefix));
|
||||
|
||||
const input_specifier = brk: {
|
||||
if (jsc.ModuleLoader.HardcodedModule.Alias.get(import_record_path, .bun, .{})) |replacement| {
|
||||
if (jsc.ModuleLoader.HardcodedModule.Alias.get(import_record_path, .bun)) |replacement| {
|
||||
break :brk replacement.path;
|
||||
}
|
||||
|
||||
@@ -591,7 +591,9 @@ pub const Runner = struct {
|
||||
}
|
||||
};
|
||||
|
||||
return CallData.callWrapper(.{
|
||||
// TODO: can change back to `return CallData.callWrapper(.{`
|
||||
// when https://github.com/ziglang/zig/issues/16242 is fixed
|
||||
return CallData.callWrapper(CallArgs{
|
||||
macro,
|
||||
log,
|
||||
allocator,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user