Compare commits

..

1 Commits

Author SHA1 Message Date
Claude Bot
e1afd658c7 fix(parser): function declarations inside labeled statements should hoist in sloppy mode
Per ECMAScript Annex B, function declarations inside labeled statements
in sloppy mode should hoist like regular function declarations, not like
block-scoped functions.

Previously, Bun incorrectly transformed:
```js
foo:
    function bar() { return "bar"; }
console.log(bar()); // ReferenceError: bar is not defined
```

Into:
```js
foo: {
  let bar = function() { return "bar"; };
}
console.log(bar()); // bar is undefined outside the block
```

Now the function declaration is preserved as-is, matching Node.js/V8 behavior.

Fixes #25737

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-12 12:20:47 +00:00
605 changed files with 4657 additions and 150600 deletions

View File

@@ -114,8 +114,6 @@ const buildPlatforms = [
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.22" },
{ os: "windows", arch: "x64", release: "2019" },
{ os: "windows", arch: "x64", baseline: true, release: "2019" },
// TODO: Enable when Windows ARM64 CI runners are ready
// { os: "windows", arch: "aarch64", release: "2019" },
];
/**
@@ -138,8 +136,6 @@ const testPlatforms = [
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.22", tier: "latest" },
{ os: "windows", arch: "x64", release: "2019", tier: "oldest" },
{ os: "windows", arch: "x64", release: "2019", baseline: true, tier: "oldest" },
// TODO: Enable when Windows ARM64 CI runners are ready
// { os: "windows", arch: "aarch64", release: "2019", tier: "oldest" },
];
/**

View File

@@ -36,20 +36,16 @@ function Log-Debug {
}
}
# Detect system architecture
$script:IsARM64 = [System.Runtime.InteropServices.RuntimeInformation]::OSArchitecture -eq [System.Runtime.InteropServices.Architecture]::Arm64
$script:VsArch = if ($script:IsARM64) { "arm64" } else { "amd64" }
# Load Visual Studio environment if not already loaded
function Ensure-VSEnvironment {
if ($null -eq $env:VSINSTALLDIR) {
Log-Info "Loading Visual Studio environment for $script:VsArch..."
Log-Info "Loading Visual Studio environment..."
$vswhere = "C:\Program Files (x86)\Microsoft Visual Studio\Installer\vswhere.exe"
if (!(Test-Path $vswhere)) {
throw "Command not found: vswhere (did you install Visual Studio?)"
}
$vsDir = & $vswhere -prerelease -latest -property installationPath
if ($null -eq $vsDir) {
$vsDir = Get-ChildItem -Path "C:\Program Files\Microsoft Visual Studio\2022" -Directory -ErrorAction SilentlyContinue
@@ -58,20 +54,20 @@ function Ensure-VSEnvironment {
}
$vsDir = $vsDir.FullName
}
Push-Location $vsDir
try {
$vsShell = Join-Path -Path $vsDir -ChildPath "Common7\Tools\Launch-VsDevShell.ps1"
. $vsShell -Arch $script:VsArch -HostArch $script:VsArch
. $vsShell -Arch amd64 -HostArch amd64
} finally {
Pop-Location
}
Log-Success "Visual Studio environment loaded"
}
if ($env:VSCMD_ARG_TGT_ARCH -eq "x86") {
throw "Visual Studio environment is targeting 32 bit x86, but only 64-bit architectures (x64/arm64) are supported."
throw "Visual Studio environment is targeting 32 bit, but only 64 bit is supported."
}
}
@@ -190,10 +186,8 @@ function Install-KeyLocker {
}
# Download MSI installer
# Note: KeyLocker tools currently only available for x64, but works on ARM64 via emulation
$msiArch = "x64"
$msiUrl = "https://bun-ci-assets.bun.sh/Keylockertools-windows-${msiArch}.msi"
$msiPath = Join-Path $env:TEMP "Keylockertools-windows-${msiArch}.msi"
$msiUrl = "https://bun-ci-assets.bun.sh/Keylockertools-windows-x64.msi"
$msiPath = Join-Path $env:TEMP "Keylockertools-windows-x64.msi"
Log-Info "Downloading MSI from: $msiUrl"
Log-Info "Downloading to: $msiPath"

View File

@@ -219,9 +219,6 @@ function create_release() {
bun-windows-x64-profile.zip
bun-windows-x64-baseline.zip
bun-windows-x64-baseline-profile.zip
# TODO: Enable when Windows ARM64 CI runners are ready
# bun-windows-aarch64.zip
# bun-windows-aarch64-profile.zip
)
function upload_artifact() {

View File

@@ -6,7 +6,8 @@ To do that:
- git fetch upstream
- git merge upstream main
- Fix the merge conflicts
- bun build.ts debug
- cd ../../ (back to bun)
- make jsc-build (this will take about 7 minutes)
- While it compiles, in another task review the JSC commits between the last version of Webkit and the new version. Write up a summary of the webkit changes in a file called "webkit-changes.md"
- bun run build:local (build a build of Bun with the new Webkit, make sure it compiles)
- After making sure it compiles, run some code to make sure things work. something like ./build/debug-local/bun-debug --print '42' should be all you need
@@ -20,7 +21,3 @@ To do that:
- commit + push (without adding the webkit-changes.md file)
- create PR titled "Upgrade Webkit to the <commit-sha>", paste your webkit-changes.md into the PR description
- delete the webkit-changes.md file
Things to check for a successful upgrade:
- Did JSType in vendor/WebKit/Source/JavaScriptCore have any recent changes? Does the enum values align with whats present in src/bun.js/bindings/JSType.zig?
- Were there any changes to the webcore code generator? If there are C++ compilation errors, check for differences in some of the generated code in like vendor/WebKit/source/WebCore/bindings/scripts/test/JS/

View File

@@ -88,7 +88,7 @@ jobs:
commit-message: "deps: update c-ares to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update c-ares to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-cares
branch: deps/update-cares-${{ github.run_number }}
body: |
## What does this PR do?

View File

@@ -91,7 +91,7 @@ jobs:
commit-message: "deps: update hdrhistogram to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update hdrhistogram to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-hdrhistogram
branch: deps/update-hdrhistogram-${{ github.run_number }}
body: |
## What does this PR do?

View File

@@ -107,7 +107,7 @@ jobs:
commit-message: "deps: update highway to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update highway to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-highway
branch: deps/update-highway-${{ github.run_number }}
body: |
## What does this PR do?

View File

@@ -88,7 +88,7 @@ jobs:
commit-message: "deps: update libarchive to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update libarchive to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-libarchive
branch: deps/update-libarchive-${{ github.run_number }}
body: |
## What does this PR do?

View File

@@ -88,7 +88,7 @@ jobs:
commit-message: "deps: update libdeflate to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update libdeflate to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-libdeflate
branch: deps/update-libdeflate-${{ github.run_number }}
body: |
## What does this PR do?

View File

@@ -100,7 +100,7 @@ jobs:
commit-message: "deps: update lolhtml to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update lolhtml to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-lolhtml
branch: deps/update-lolhtml-${{ github.run_number }}
body: |
## What does this PR do?

View File

@@ -105,7 +105,7 @@ jobs:
commit-message: "deps: update lshpack to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update lshpack to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-lshpack
branch: deps/update-lshpack-${{ github.run_number }}
body: |
## What does this PR do?

View File

@@ -74,7 +74,7 @@ jobs:
```
${{ env.changed_files }}
```
branch: certs/update-root-certs
branch: certs/update-root-certs-${{ github.run_number }}
base: main
delete-branch: true
labels:

View File

@@ -83,7 +83,7 @@ jobs:
commit-message: "deps: update sqlite to ${{ steps.check-version.outputs.latest }}"
title: "deps: update sqlite to ${{ steps.check-version.outputs.latest }}"
delete-branch: true
branch: deps/update-sqlite
branch: deps/update-sqlite-${{ steps.check-version.outputs.latest }}
body: |
## What does this PR do?

View File

@@ -68,7 +68,7 @@ jobs:
commit-message: "deps: update ${{ matrix.package }} to ${{ steps.check-version.outputs.latest }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update ${{ matrix.package }} to ${{ steps.check-version.outputs.latest }}"
delete-branch: true
branch: deps/update-${{ matrix.package }}
branch: deps/update-${{ matrix.package }}-${{ github.run_number }}
body: |
## What does this PR do?

View File

@@ -88,7 +88,7 @@ jobs:
commit-message: "deps: update zstd to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update zstd to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-zstd
branch: deps/update-zstd-${{ github.run_number }}
body: |
## What does this PR do?

1
.gitignore vendored
View File

@@ -1,5 +1,4 @@
.claude/settings.local.json
.direnv
.DS_Store
.env
.envrc

View File

@@ -10,8 +10,6 @@ This is the Bun repository - an all-in-one JavaScript runtime & toolkit designed
- **Run tests with your debug build**: `bun bd test <test-file>`
- **CRITICAL**: Never use `bun test` directly - it won't include your changes
- **Run any command with debug build**: `bun bd <command>`
- **Run with JavaScript exception scope verification**: `BUN_JSC_validateExceptionChecks=1
BUN_JSC_dumpSimulatedThrows=1 bun bd <command>`
Tip: Bun is already installed and in $PATH. The `bd` subcommand is a package.json script.
@@ -211,24 +209,3 @@ Built-in JavaScript modules use special syntax and are organized as:
12. **Branch names must start with `claude/`** - This is a requirement for the CI to work.
**ONLY** push up changes after running `bun bd test <file>` and ensuring your tests pass.
## Debugging CI Failures
Use `scripts/buildkite-failures.ts` to fetch and analyze CI build failures:
```bash
# View failures for current branch
bun run scripts/buildkite-failures.ts
# View failures for a specific build number
bun run scripts/buildkite-failures.ts 35051
# View failures for a GitHub PR
bun run scripts/buildkite-failures.ts #26173
bun run scripts/buildkite-failures.ts https://github.com/oven-sh/bun/pull/26173
# Wait for build to complete (polls every 10s until pass/fail)
bun run scripts/buildkite-failures.ts --wait
```
The script fetches logs from BuildKite's public API and saves complete logs to `/tmp/bun-build-{number}-{platform}-{step}.log`. It displays a summary of errors and the file path for each failed job. Use `--wait` to poll continuously until the build completes or fails.

2
LATEST
View File

@@ -1 +1 @@
1.3.6
1.3.5

View File

@@ -36,7 +36,6 @@ Bun statically links these libraries:
| [`libbase64`](https://github.com/aklomp/base64/blob/master/LICENSE) | BSD 2-Clause |
| [`libuv`](https://github.com/libuv/libuv) (on Windows) | MIT |
| [`libdeflate`](https://github.com/ebiggers/libdeflate) | MIT |
| [`uucode`](https://github.com/jacobsandlund/uucode) | MIT |
| A fork of [`uWebsockets`](https://github.com/jarred-sumner/uwebsockets) | Apache 2.0 licensed |
| Parts of [Tigerbeetle's IO code](https://github.com/tigerbeetle/tigerbeetle/blob/532c8b70b9142c17e07737ab6d3da68d7500cbca/src/io/windows.zig#L1) | Apache 2.0 licensed |

View File

@@ -25,7 +25,6 @@
"strip-ansi": "^7.1.0",
"tar": "^7.4.3",
"tinycolor2": "^1.6.0",
"wrap-ansi": "^9.0.0",
"zx": "^7.2.3",
},
"devDependencies": {
@@ -170,7 +169,7 @@
"ansi-regex": ["ansi-regex@6.0.1", "", {}, "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA=="],
"ansi-styles": ["ansi-styles@6.2.3", "https://artifactory.infra.ant.dev:443/artifactory/api/npm/npm-all/ansi-styles/-/ansi-styles-6.2.3.tgz", {}, "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg=="],
"ansi-styles": ["ansi-styles@3.2.1", "", { "dependencies": { "color-convert": "^1.9.0" } }, "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA=="],
"atomic-sleep": ["atomic-sleep@1.0.0", "", {}, "sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ=="],
@@ -494,8 +493,6 @@
"which": ["which@3.0.1", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "bin/which.js" } }, "sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg=="],
"wrap-ansi": ["wrap-ansi@9.0.2", "https://artifactory.infra.ant.dev:443/artifactory/api/npm/npm-all/wrap-ansi/-/wrap-ansi-9.0.2.tgz", { "dependencies": { "ansi-styles": "^6.2.1", "string-width": "^7.0.0", "strip-ansi": "^7.1.0" } }, "sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww=="],
"yallist": ["yallist@5.0.0", "", {}, "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw=="],
"yaml": ["yaml@2.3.4", "", {}, "sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA=="],
@@ -506,6 +503,8 @@
"@babel/highlight/chalk": ["chalk@2.4.2", "", { "dependencies": { "ansi-styles": "^3.2.1", "escape-string-regexp": "^1.0.5", "supports-color": "^5.3.0" } }, "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ=="],
"ansi-styles/color-convert": ["color-convert@1.9.3", "", { "dependencies": { "color-name": "1.1.3" } }, "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg=="],
"avvio/fastq": ["fastq@1.19.1", "", { "dependencies": { "reusify": "^1.0.4" } }, "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ=="],
"cross-spawn/which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="],
@@ -518,10 +517,6 @@
"npm-run-path/path-key": ["path-key@4.0.0", "", {}, "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ=="],
"@babel/highlight/chalk/ansi-styles": ["ansi-styles@3.2.1", "", { "dependencies": { "color-convert": "^1.9.0" } }, "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA=="],
"@babel/highlight/chalk/ansi-styles/color-convert": ["color-convert@1.9.3", "", { "dependencies": { "color-name": "1.1.3" } }, "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg=="],
"@babel/highlight/chalk/ansi-styles/color-convert/color-name": ["color-name@1.1.3", "", {}, "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw=="],
"ansi-styles/color-convert/color-name": ["color-name@1.1.3", "", {}, "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw=="],
}
}

View File

@@ -1,15 +0,0 @@
{
"lockfileVersion": 1,
"configVersion": 1,
"workspaces": {
"": {
"name": "json5-benchmark",
"dependencies": {
"json5": "^2.2.3",
},
},
},
"packages": {
"json5": ["json5@2.2.3", "", { "bin": { "json5": "lib/cli.js" } }, "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg=="],
}
}

View File

@@ -1,88 +0,0 @@
import JSON5 from "json5";
import { bench, group, run } from "../runner.mjs";
const isBun = typeof Bun !== "undefined" && Bun.JSON5;
function sizeLabel(n) {
if (n >= 1024 * 1024) return `${(n / 1024 / 1024).toFixed(1)}MB`;
if (n >= 1024) return `${(n / 1024).toFixed(0)}KB`;
return `${n}B`;
}
// -- parse inputs --
const smallJson5 = `{
// User profile
name: "John Doe",
age: 30,
email: 'john@example.com',
active: true,
}`;
function generateLargeJson5(count) {
const lines = ["{\n // Auto-generated dataset\n items: [\n"];
for (let i = 0; i < count; i++) {
lines.push(` {
id: ${i},
name: 'item_${i}',
value: ${(Math.random() * 1000).toFixed(2)},
hex: 0x${i.toString(16).toUpperCase()},
active: ${i % 2 === 0},
tags: ['tag_${i % 10}', 'category_${i % 5}',],
// entry ${i}
},\n`);
}
lines.push(" ],\n total: " + count + ",\n status: 'complete',\n}\n");
return lines.join("");
}
const largeJson5 = generateLargeJson5(6500);
// -- stringify inputs --
const smallObject = {
name: "John Doe",
age: 30,
email: "john@example.com",
active: true,
};
const largeObject = {
items: Array.from({ length: 10000 }, (_, i) => ({
id: i,
name: `item_${i}`,
value: +(Math.random() * 1000).toFixed(2),
active: i % 2 === 0,
tags: [`tag_${i % 10}`, `category_${i % 5}`],
})),
total: 10000,
status: "complete",
};
const stringify = isBun ? Bun.JSON5.stringify : JSON5.stringify;
// -- parse benchmarks --
group(`parse small (${sizeLabel(smallJson5.length)})`, () => {
if (isBun) bench("Bun.JSON5.parse", () => Bun.JSON5.parse(smallJson5));
bench("json5.parse", () => JSON5.parse(smallJson5));
});
group(`parse large (${sizeLabel(largeJson5.length)})`, () => {
if (isBun) bench("Bun.JSON5.parse", () => Bun.JSON5.parse(largeJson5));
bench("json5.parse", () => JSON5.parse(largeJson5));
});
// -- stringify benchmarks --
group(`stringify small (${sizeLabel(stringify(smallObject).length)})`, () => {
if (isBun) bench("Bun.JSON5.stringify", () => Bun.JSON5.stringify(smallObject));
bench("json5.stringify", () => JSON5.stringify(smallObject));
});
group(`stringify large (${sizeLabel(stringify(largeObject).length)})`, () => {
if (isBun) bench("Bun.JSON5.stringify", () => Bun.JSON5.stringify(largeObject));
bench("json5.stringify", () => JSON5.stringify(largeObject));
});
await run();

View File

@@ -1,7 +0,0 @@
{
"name": "json5-benchmark",
"version": "1.0.0",
"dependencies": {
"json5": "^2.2.3"
}
}

View File

@@ -18,7 +18,6 @@
"react": "^18.3.1",
"react-dom": "^18.3.1",
"string-width": "7.1.0",
"wrap-ansi": "^9.0.0",
"strip-ansi": "^7.1.0",
"tar": "^7.4.3",
"tinycolor2": "^1.6.0",

View File

@@ -14,4 +14,3 @@ export function run(opts = {}) {
export const bench = Mitata.bench;
export const group = Mitata.group;
export const summary = Mitata.summary;

View File

@@ -1,38 +0,0 @@
// @runtime bun,node
import { Buffer } from "node:buffer";
import { bench, group, run } from "../runner.mjs";
// Small arrays (common case)
const int32Array8 = [1, 2, 3, 4, 5, 6, 7, 8];
const doubleArray8 = [1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5];
// Medium arrays
const int32Array64 = Array.from({ length: 64 }, (_, i) => i % 256);
const doubleArray64 = Array.from({ length: 64 }, (_, i) => i + 0.5);
// Large arrays
const int32Array1024 = Array.from({ length: 1024 }, (_, i) => i % 256);
// Array-like objects (fallback path)
const arrayLike8 = { 0: 1, 1: 2, 2: 3, 3: 4, 4: 5, 5: 6, 6: 7, 7: 8, length: 8 };
// Empty array
const emptyArray = [];
group("Buffer.from(array) - Int32 arrays", () => {
bench("Buffer.from(int32[8])", () => Buffer.from(int32Array8));
bench("Buffer.from(int32[64])", () => Buffer.from(int32Array64));
bench("Buffer.from(int32[1024])", () => Buffer.from(int32Array1024));
});
group("Buffer.from(array) - Double arrays", () => {
bench("Buffer.from(double[8])", () => Buffer.from(doubleArray8));
bench("Buffer.from(double[64])", () => Buffer.from(doubleArray64));
});
group("Buffer.from(array) - Edge cases", () => {
bench("Buffer.from([])", () => Buffer.from(emptyArray));
bench("Buffer.from(arrayLike[8])", () => Buffer.from(arrayLike8));
});
await run();

View File

@@ -1,20 +0,0 @@
// Benchmark for [...set] optimization (WebKit#56539)
// https://github.com/WebKit/WebKit/pull/56539
import { bench, run } from "../runner.mjs";
const intSet10 = new Set([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
const intSet100 = new Set(Array.from({ length: 100 }, (_, i) => i));
const strSet10 = new Set(Array.from({ length: 10 }, (_, i) => `key-${i}`));
const strSet100 = new Set(Array.from({ length: 100 }, (_, i) => `key-${i}`));
const objSet10 = new Set(Array.from({ length: 10 }, (_, i) => ({ id: i })));
const objSet100 = new Set(Array.from({ length: 100 }, (_, i) => ({ id: i })));
bench("[...set] - integers (10)", () => [...intSet10]);
bench("[...set] - integers (100)", () => [...intSet100]);
bench("[...set] - strings (10)", () => [...strSet10]);
bench("[...set] - strings (100)", () => [...strSet100]);
bench("[...set] - objects (10)", () => [...objSet10]);
bench("[...set] - objects (100)", () => [...objSet100]);
await run();

View File

@@ -1,103 +0,0 @@
import wrapAnsi from "wrap-ansi";
import { bench, run, summary } from "../runner.mjs";
// Test fixtures
const shortText = "The quick brown fox jumped over the lazy dog.";
const mediumText = "The quick brown fox jumped over the lazy dog and then ran away with the unicorn. ".repeat(10);
const longText = "The quick brown fox jumped over the lazy dog and then ran away with the unicorn. ".repeat(100);
// ANSI colored text
const red = s => `\u001B[31m${s}\u001B[39m`;
const green = s => `\u001B[32m${s}\u001B[39m`;
const blue = s => `\u001B[34m${s}\u001B[39m`;
const coloredShort = `The quick ${red("brown fox")} jumped over the ${green("lazy dog")}.`;
const coloredMedium =
`The quick ${red("brown fox jumped over")} the ${green("lazy dog and then ran away")} with the ${blue("unicorn")}. `.repeat(
10,
);
const coloredLong =
`The quick ${red("brown fox jumped over")} the ${green("lazy dog and then ran away")} with the ${blue("unicorn")}. `.repeat(
100,
);
// Full-width characters (Japanese)
const japaneseText = "日本語のテキストを折り返すテストです。全角文字は幅2としてカウントされます。".repeat(5);
// Emoji text
const emojiText = "Hello 👋 World 🌍! Let's test 🧪 some emoji 😀 wrapping 📦!".repeat(5);
// Hyperlink text
const hyperlinkText = "Check out \u001B]8;;https://bun.sh\u0007Bun\u001B]8;;\u0007, it's fast! ".repeat(10);
// Options
const hardOpts = { hard: true };
const noTrimOpts = { trim: false };
// Basic text benchmarks
summary(() => {
bench("Short text (45 chars) - npm", () => wrapAnsi(shortText, 20));
bench("Short text (45 chars) - Bun", () => Bun.wrapAnsi(shortText, 20));
});
summary(() => {
bench("Medium text (810 chars) - npm", () => wrapAnsi(mediumText, 40));
bench("Medium text (810 chars) - Bun", () => Bun.wrapAnsi(mediumText, 40));
});
summary(() => {
bench("Long text (8100 chars) - npm", () => wrapAnsi(longText, 80));
bench("Long text (8100 chars) - Bun", () => Bun.wrapAnsi(longText, 80));
});
// ANSI colored text benchmarks
summary(() => {
bench("Colored short - npm", () => wrapAnsi(coloredShort, 20));
bench("Colored short - Bun", () => Bun.wrapAnsi(coloredShort, 20));
});
summary(() => {
bench("Colored medium - npm", () => wrapAnsi(coloredMedium, 40));
bench("Colored medium - Bun", () => Bun.wrapAnsi(coloredMedium, 40));
});
summary(() => {
bench("Colored long - npm", () => wrapAnsi(coloredLong, 80));
bench("Colored long - Bun", () => Bun.wrapAnsi(coloredLong, 80));
});
// Hard wrap benchmarks
summary(() => {
bench("Hard wrap long - npm", () => wrapAnsi(longText, 80, hardOpts));
bench("Hard wrap long - Bun", () => Bun.wrapAnsi(longText, 80, hardOpts));
});
summary(() => {
bench("Hard wrap colored - npm", () => wrapAnsi(coloredLong, 80, hardOpts));
bench("Hard wrap colored - Bun", () => Bun.wrapAnsi(coloredLong, 80, hardOpts));
});
// Unicode benchmarks
summary(() => {
bench("Japanese (full-width) - npm", () => wrapAnsi(japaneseText, 40));
bench("Japanese (full-width) - Bun", () => Bun.wrapAnsi(japaneseText, 40));
});
summary(() => {
bench("Emoji text - npm", () => wrapAnsi(emojiText, 30));
bench("Emoji text - Bun", () => Bun.wrapAnsi(emojiText, 30));
});
// Hyperlink benchmarks
summary(() => {
bench("Hyperlink (OSC 8) - npm", () => wrapAnsi(hyperlinkText, 40));
bench("Hyperlink (OSC 8) - Bun", () => Bun.wrapAnsi(hyperlinkText, 40));
});
// No trim option
summary(() => {
bench("No trim long - npm", () => wrapAnsi(longText, 80, noTrimOpts));
bench("No trim long - Bun", () => Bun.wrapAnsi(longText, 80, noTrimOpts));
});
await run();

150
build.zig
View File

@@ -34,7 +34,6 @@ const BunBuildOptions = struct {
enable_asan: bool,
enable_fuzzilli: bool,
enable_valgrind: bool,
enable_tinycc: bool,
use_mimalloc: bool,
tracy_callstack_depth: u16,
reported_nodejs_version: Version,
@@ -85,7 +84,6 @@ const BunBuildOptions = struct {
opts.addOption(bool, "enable_asan", this.enable_asan);
opts.addOption(bool, "enable_fuzzilli", this.enable_fuzzilli);
opts.addOption(bool, "enable_valgrind", this.enable_valgrind);
opts.addOption(bool, "enable_tinycc", this.enable_tinycc);
opts.addOption(bool, "use_mimalloc", this.use_mimalloc);
opts.addOption([]const u8, "reported_nodejs_version", b.fmt("{f}", .{this.reported_nodejs_version}));
opts.addOption(bool, "zig_self_hosted_backend", this.no_llvm);
@@ -261,7 +259,6 @@ pub fn build(b: *Build) !void {
.enable_asan = b.option(bool, "enable_asan", "Enable asan") orelse false,
.enable_fuzzilli = b.option(bool, "enable_fuzzilli", "Enable fuzzilli instrumentation") orelse false,
.enable_valgrind = b.option(bool, "enable_valgrind", "Enable valgrind") orelse false,
.enable_tinycc = b.option(bool, "enable_tinycc", "Enable TinyCC for FFI JIT compilation") orelse true,
.use_mimalloc = b.option(bool, "use_mimalloc", "Use mimalloc as default allocator") orelse false,
.llvm_codegen_threads = b.option(u32, "llvm_codegen_threads", "Number of threads to use for LLVM codegen") orelse 1,
};
@@ -345,7 +342,6 @@ pub fn build(b: *Build) !void {
const step = b.step("check-debug", "Check for semantic analysis errors on some platforms");
addMultiCheck(b, step, build_options, &.{
.{ .os = .windows, .arch = .x86_64 },
.{ .os = .windows, .arch = .aarch64 },
.{ .os = .mac, .arch = .aarch64 },
.{ .os = .linux, .arch = .x86_64 },
}, &.{.Debug});
@@ -356,7 +352,6 @@ pub fn build(b: *Build) !void {
const step = b.step("check-all", "Check for semantic analysis errors on all supported platforms");
addMultiCheck(b, step, build_options, &.{
.{ .os = .windows, .arch = .x86_64 },
.{ .os = .windows, .arch = .aarch64 },
.{ .os = .mac, .arch = .x86_64 },
.{ .os = .mac, .arch = .aarch64 },
.{ .os = .linux, .arch = .x86_64 },
@@ -371,7 +366,6 @@ pub fn build(b: *Build) !void {
const step = b.step("check-all-debug", "Check for semantic analysis errors on all supported platforms in debug mode");
addMultiCheck(b, step, build_options, &.{
.{ .os = .windows, .arch = .x86_64 },
.{ .os = .windows, .arch = .aarch64 },
.{ .os = .mac, .arch = .x86_64 },
.{ .os = .mac, .arch = .aarch64 },
.{ .os = .linux, .arch = .x86_64 },
@@ -386,14 +380,12 @@ pub fn build(b: *Build) !void {
const step = b.step("check-windows", "Check for semantic analysis errors on Windows");
addMultiCheck(b, step, build_options, &.{
.{ .os = .windows, .arch = .x86_64 },
.{ .os = .windows, .arch = .aarch64 },
}, &.{ .Debug, .ReleaseFast });
}
{
const step = b.step("check-windows-debug", "Check for semantic analysis errors on Windows");
addMultiCheck(b, step, build_options, &.{
.{ .os = .windows, .arch = .x86_64 },
.{ .os = .windows, .arch = .aarch64 },
}, &.{.Debug});
}
{
@@ -430,7 +422,6 @@ pub fn build(b: *Build) !void {
const step = b.step("translate-c", "Copy generated translated-c-headers.zig to zig-out");
for ([_]TargetDescription{
.{ .os = .windows, .arch = .x86_64 },
.{ .os = .windows, .arch = .aarch64 },
.{ .os = .mac, .arch = .x86_64 },
.{ .os = .mac, .arch = .aarch64 },
.{ .os = .linux, .arch = .x86_64 },
@@ -459,146 +450,6 @@ pub fn build(b: *Build) !void {
// const run = b.addRunArtifact(exe);
// step.dependOn(&run.step);
}
// zig build generate-grapheme-tables
// Regenerates src/string/immutable/grapheme_tables.zig from the vendored uucode.
// Run this when updating src/deps/uucode. Normal builds use the committed file.
{
const step = b.step("generate-grapheme-tables", "Regenerate grapheme property tables from vendored uucode");
// --- Phase 1: Build uucode tables (separate module graph, no tables dependency) ---
const bt_config_mod = b.createModule(.{
.root_source_file = b.path("src/deps/uucode/src/config.zig"),
.target = b.graph.host,
});
const bt_types_mod = b.createModule(.{
.root_source_file = b.path("src/deps/uucode/src/types.zig"),
.target = b.graph.host,
});
bt_types_mod.addImport("config.zig", bt_config_mod);
bt_config_mod.addImport("types.zig", bt_types_mod);
const bt_config_x_mod = b.createModule(.{
.root_source_file = b.path("src/deps/uucode/src/x/config.x.zig"),
.target = b.graph.host,
});
const bt_types_x_mod = b.createModule(.{
.root_source_file = b.path("src/deps/uucode/src/x/types.x.zig"),
.target = b.graph.host,
});
bt_types_x_mod.addImport("config.x.zig", bt_config_x_mod);
bt_config_x_mod.addImport("types.x.zig", bt_types_x_mod);
bt_config_x_mod.addImport("types.zig", bt_types_mod);
bt_config_x_mod.addImport("config.zig", bt_config_mod);
const bt_build_config_mod = b.createModule(.{
.root_source_file = b.path("src/unicode/uucode/uucode_config.zig"),
.target = b.graph.host,
});
bt_build_config_mod.addImport("types.zig", bt_types_mod);
bt_build_config_mod.addImport("config.zig", bt_config_mod);
bt_build_config_mod.addImport("types.x.zig", bt_types_x_mod);
bt_build_config_mod.addImport("config.x.zig", bt_config_x_mod);
const build_tables_mod = b.createModule(.{
.root_source_file = b.path("src/deps/uucode/src/build/tables.zig"),
.target = b.graph.host,
.optimize = .Debug,
});
build_tables_mod.addImport("config.zig", bt_config_mod);
build_tables_mod.addImport("build_config", bt_build_config_mod);
build_tables_mod.addImport("types.zig", bt_types_mod);
const build_tables_exe = b.addExecutable(.{
.name = "uucode_build_tables",
.root_module = build_tables_mod,
.use_llvm = true,
});
const run_build_tables = b.addRunArtifact(build_tables_exe);
run_build_tables.setCwd(b.path("src/deps/uucode"));
const tables_path = run_build_tables.addOutputFileArg("tables.zig");
// --- Phase 2: Build grapheme-gen with full uucode (separate module graph) ---
const rt_config_mod = b.createModule(.{
.root_source_file = b.path("src/deps/uucode/src/config.zig"),
.target = b.graph.host,
});
const rt_types_mod = b.createModule(.{
.root_source_file = b.path("src/deps/uucode/src/types.zig"),
.target = b.graph.host,
});
rt_types_mod.addImport("config.zig", rt_config_mod);
rt_config_mod.addImport("types.zig", rt_types_mod);
const rt_config_x_mod = b.createModule(.{
.root_source_file = b.path("src/deps/uucode/src/x/config.x.zig"),
.target = b.graph.host,
});
const rt_types_x_mod = b.createModule(.{
.root_source_file = b.path("src/deps/uucode/src/x/types.x.zig"),
.target = b.graph.host,
});
rt_types_x_mod.addImport("config.x.zig", rt_config_x_mod);
rt_config_x_mod.addImport("types.x.zig", rt_types_x_mod);
rt_config_x_mod.addImport("types.zig", rt_types_mod);
rt_config_x_mod.addImport("config.zig", rt_config_mod);
const rt_build_config_mod = b.createModule(.{
.root_source_file = b.path("src/unicode/uucode/uucode_config.zig"),
.target = b.graph.host,
});
rt_build_config_mod.addImport("types.zig", rt_types_mod);
rt_build_config_mod.addImport("config.zig", rt_config_mod);
rt_build_config_mod.addImport("types.x.zig", rt_types_x_mod);
rt_build_config_mod.addImport("config.x.zig", rt_config_x_mod);
const rt_tables_mod = b.createModule(.{
.root_source_file = tables_path,
.target = b.graph.host,
});
rt_tables_mod.addImport("types.zig", rt_types_mod);
rt_tables_mod.addImport("types.x.zig", rt_types_x_mod);
rt_tables_mod.addImport("config.zig", rt_config_mod);
rt_tables_mod.addImport("build_config", rt_build_config_mod);
const rt_get_mod = b.createModule(.{
.root_source_file = b.path("src/deps/uucode/src/get.zig"),
.target = b.graph.host,
});
rt_get_mod.addImport("types.zig", rt_types_mod);
rt_get_mod.addImport("tables", rt_tables_mod);
rt_types_mod.addImport("get.zig", rt_get_mod);
const uucode_mod = b.createModule(.{
.root_source_file = b.path("src/deps/uucode/src/root.zig"),
.target = b.graph.host,
});
uucode_mod.addImport("types.zig", rt_types_mod);
uucode_mod.addImport("config.zig", rt_config_mod);
uucode_mod.addImport("types.x.zig", rt_types_x_mod);
uucode_mod.addImport("tables", rt_tables_mod);
uucode_mod.addImport("get.zig", rt_get_mod);
// grapheme_gen executable
const gen_exe = b.addExecutable(.{
.name = "grapheme-gen",
.root_module = b.createModule(.{
.root_source_file = b.path("src/unicode/uucode/grapheme_gen.zig"),
.target = b.graph.host,
.optimize = .Debug,
.imports = &.{
.{ .name = "uucode", .module = uucode_mod },
},
}),
.use_llvm = true,
});
const run_gen = b.addRunArtifact(gen_exe);
const gen_output = run_gen.captureStdOut();
const install = b.addInstallFile(gen_output, "../src/string/immutable/grapheme_tables.zig");
step.dependOn(&install.step);
}
}
const TargetDescription = struct {
@@ -642,7 +493,6 @@ fn addMultiCheck(
.no_llvm = root_build_options.no_llvm,
.enable_asan = root_build_options.enable_asan,
.enable_valgrind = root_build_options.enable_valgrind,
.enable_tinycc = root_build_options.enable_tinycc,
.enable_fuzzilli = root_build_options.enable_fuzzilli,
.use_mimalloc = root_build_options.use_mimalloc,
.override_no_export_cpp_apis = root_build_options.override_no_export_cpp_apis,

View File

@@ -21,10 +21,6 @@ endforeach()
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm|ARM|arm64|ARM64|aarch64|AARCH64")
if(APPLE)
register_compiler_flags(-mcpu=apple-m1)
elseif(WIN32)
# Windows ARM64: use /clang: prefix for clang-cl, skip for MSVC cl.exe subprojects
# These flags are only understood by clang-cl, not MSVC cl.exe
register_compiler_flags(/clang:-march=armv8-a+crc /clang:-mtune=ampere1)
else()
register_compiler_flags(-march=armv8-a+crc -mtune=ampere1)
endif()
@@ -246,17 +242,10 @@ if(UNIX)
)
endif()
if(WIN32)
register_compiler_flags(
DESCRIPTION "Set C/C++ error limit"
/clang:-ferror-limit=${ERROR_LIMIT}
)
else()
register_compiler_flags(
DESCRIPTION "Set C/C++ error limit"
-ferror-limit=${ERROR_LIMIT}
)
endif()
register_compiler_flags(
DESCRIPTION "Set C/C++ error limit"
-ferror-limit=${ERROR_LIMIT}
)
# --- LTO ---
if(ENABLE_LTO)

View File

@@ -106,9 +106,9 @@ else()
endif()
if(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "arm64|ARM64|aarch64|AARCH64")
set(HOST_ARCH "aarch64")
set(HOST_OS "aarch64")
elseif(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "x86_64|X86_64|x64|X64|amd64|AMD64")
set(HOST_ARCH "x64")
set(HOST_OS "x64")
else()
unsupported(CMAKE_HOST_SYSTEM_PROCESSOR)
endif()
@@ -433,33 +433,6 @@ function(register_command)
list(APPEND CMD_EFFECTIVE_DEPENDS ${CMD_EXECUTABLE})
endif()
# SKIP_CODEGEN: Skip commands that use BUN_EXECUTABLE if all outputs exist
# This is used for Windows ARM64 builds where x64 bun crashes under emulation
if(SKIP_CODEGEN AND CMD_EXECUTABLE STREQUAL "${BUN_EXECUTABLE}")
set(ALL_OUTPUTS_EXIST TRUE)
foreach(output ${CMD_OUTPUTS})
if(NOT EXISTS ${output})
set(ALL_OUTPUTS_EXIST FALSE)
break()
endif()
endforeach()
if(ALL_OUTPUTS_EXIST AND CMD_OUTPUTS)
message(STATUS "SKIP_CODEGEN: Skipping ${CMD_TARGET} (outputs exist)")
if(CMD_TARGET)
add_custom_target(${CMD_TARGET})
endif()
return()
elseif(NOT CMD_OUTPUTS)
message(STATUS "SKIP_CODEGEN: Skipping ${CMD_TARGET} (no outputs)")
if(CMD_TARGET)
add_custom_target(${CMD_TARGET})
endif()
return()
else()
message(FATAL_ERROR "SKIP_CODEGEN: Cannot skip ${CMD_TARGET} - missing outputs. Run codegen on x64 first.")
endif()
endif()
foreach(target ${CMD_TARGETS})
if(target MATCHES "/|\\\\")
message(FATAL_ERROR "register_command: TARGETS contains \"${target}\", if it's a path add it to SOURCES instead")
@@ -677,7 +650,6 @@ function(register_bun_install)
${NPM_CWD}
COMMAND
${BUN_EXECUTABLE}
${BUN_FLAGS}
install
--frozen-lockfile
SOURCES
@@ -785,7 +757,7 @@ function(register_cmake_command)
set(MAKE_EFFECTIVE_ARGS -B${MAKE_BUILD_PATH} ${CMAKE_ARGS})
set(setFlags GENERATOR BUILD_TYPE)
set(appendFlags C_FLAGS CXX_FLAGS LINKER_FLAGS STATIC_LINKER_FLAGS EXE_LINKER_FLAGS SHARED_LINKER_FLAGS MODULE_LINKER_FLAGS)
set(appendFlags C_FLAGS CXX_FLAGS LINKER_FLAGS)
set(specialFlags POSITION_INDEPENDENT_CODE)
set(flags ${setFlags} ${appendFlags} ${specialFlags})
@@ -831,14 +803,6 @@ function(register_cmake_command)
list(APPEND MAKE_EFFECTIVE_ARGS "-DCMAKE_${flag}=${MAKE_${flag}}")
endforeach()
# Workaround for CMake 4.1.0 bug: Force correct machine type for Windows ARM64
# Use toolchain file and set CMP0197 policy to prevent duplicate /machine: flags
if(WIN32 AND CMAKE_SYSTEM_PROCESSOR MATCHES "ARM64|aarch64|AARCH64")
list(APPEND MAKE_EFFECTIVE_ARGS "-DCMAKE_TOOLCHAIN_FILE=${CWD}/cmake/toolchains/windows-aarch64.cmake")
list(APPEND MAKE_EFFECTIVE_ARGS "-DCMAKE_POLICY_DEFAULT_CMP0197=NEW")
list(APPEND MAKE_EFFECTIVE_ARGS "-DCMAKE_PROJECT_INCLUDE=${CWD}/cmake/arm64-static-lib-fix.cmake")
endif()
if(DEFINED FRESH)
list(APPEND MAKE_EFFECTIVE_ARGS --fresh)
endif()

View File

@@ -4,7 +4,6 @@ endif()
optionx(BUN_LINK_ONLY BOOL "If only the linking step should be built" DEFAULT OFF)
optionx(BUN_CPP_ONLY BOOL "If only the C++ part of Bun should be built" DEFAULT OFF)
optionx(SKIP_CODEGEN BOOL "Skip JavaScript codegen (for Windows ARM64 debug)" DEFAULT OFF)
optionx(BUILDKITE BOOL "If Buildkite is enabled" DEFAULT OFF)
optionx(GITHUB_ACTIONS BOOL "If GitHub Actions is enabled" DEFAULT OFF)
@@ -50,7 +49,7 @@ else()
message(FATAL_ERROR "Unsupported operating system: ${CMAKE_SYSTEM_NAME}")
endif()
if(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|arm64|ARM64")
if(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|arm64|arm")
setx(ARCH "aarch64")
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "amd64|x86_64|x64|AMD64")
setx(ARCH "x64")
@@ -58,18 +57,6 @@ else()
message(FATAL_ERROR "Unsupported architecture: ${CMAKE_SYSTEM_PROCESSOR}")
endif()
# CMake 4.0+ policy CMP0197 controls how MSVC machine type flags are handled
# Setting to NEW prevents duplicate /machine: flags being added to linker commands
if(WIN32 AND ARCH STREQUAL "aarch64")
set(CMAKE_POLICY_DEFAULT_CMP0197 NEW)
set(CMAKE_MSVC_CMP0197 NEW)
# Set linker flags for exe/shared linking
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} /machine:ARM64")
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} /machine:ARM64")
set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} /machine:ARM64")
set(CMAKE_STATIC_LINKER_FLAGS "${CMAKE_STATIC_LINKER_FLAGS} /machine:ARM64")
endif()
# Windows Code Signing Option
if(WIN32)
optionx(ENABLE_WINDOWS_CODESIGNING BOOL "Enable Windows code signing with DigiCert KeyLocker" DEFAULT OFF)
@@ -212,16 +199,6 @@ optionx(USE_WEBKIT_ICU BOOL "Use the ICU libraries from WebKit" DEFAULT ${DEFAUL
optionx(ERROR_LIMIT STRING "Maximum number of errors to show when compiling C++ code" DEFAULT "100")
# TinyCC is used for FFI JIT compilation
# Disable on Windows ARM64 where it's not yet supported
if(WIN32 AND ARCH STREQUAL "aarch64")
set(DEFAULT_ENABLE_TINYCC OFF)
else()
set(DEFAULT_ENABLE_TINYCC ON)
endif()
optionx(ENABLE_TINYCC BOOL "Enable TinyCC for FFI JIT compilation" DEFAULT ${DEFAULT_ENABLE_TINYCC})
# This is not an `option` because setting this variable to OFF is experimental
# and unsupported. This replaces the `use_mimalloc` variable previously in
# bun.zig, and enables C++ code to also be aware of the option.

View File

@@ -1,8 +0,0 @@
# This file is included after project() via CMAKE_PROJECT_INCLUDE
# It fixes the static library creation command to use ARM64 machine type
if(WIN32 AND CMAKE_SYSTEM_PROCESSOR STREQUAL \"aarch64\")
# Override the static library creation commands to avoid spurious /machine:x64 flags
set(CMAKE_C_CREATE_STATIC_LIBRARY \"<CMAKE_AR> /nologo /machine:ARM64 /out:<TARGET> <OBJECTS>\" CACHE STRING \"\" FORCE)
set(CMAKE_CXX_CREATE_STATIC_LIBRARY \"<CMAKE_AR> /nologo /machine:ARM64 /out:<TARGET> <OBJECTS>\" CACHE STRING \"\" FORCE)
endif()

View File

@@ -21,12 +21,7 @@ if(NOT DEFINED CMAKE_HOST_SYSTEM_PROCESSOR)
endif()
if(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "arm64|ARM64|aarch64|AARCH64")
# Windows ARM64 can run x86_64 via emulation, and no native ARM64 Zig build exists yet
if(CMAKE_HOST_WIN32)
set(ZIG_ARCH "x86_64")
else()
set(ZIG_ARCH "aarch64")
endif()
set(ZIG_ARCH "aarch64")
elseif(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "amd64|AMD64|x86_64|X86_64|x64|X64")
set(ZIG_ARCH "x86_64")
else()

View File

@@ -1,34 +0,0 @@
@echo off
setlocal enabledelayedexpansion
REM Wrapper for llvm-lib that strips conflicting /machine:x64 flag for ARM64 builds
REM This is a workaround for CMake 4.1.0 bug
REM Find llvm-lib.exe - check LLVM_LIB env var, then PATH, then known locations
if defined LLVM_LIB (
set "LLVM_LIB_EXE=!LLVM_LIB!"
) else (
where llvm-lib.exe >nul 2>&1
if !ERRORLEVEL! equ 0 (
for /f "delims=" %%i in ('where llvm-lib.exe') do set "LLVM_LIB_EXE=%%i"
) else if exist "C:\Program Files\LLVM\bin\llvm-lib.exe" (
set "LLVM_LIB_EXE=C:\Program Files\LLVM\bin\llvm-lib.exe"
) else (
echo Error: Cannot find llvm-lib.exe. Set LLVM_LIB environment variable or add LLVM to PATH.
exit /b 1
)
)
set "ARGS="
for %%a in (%*) do (
set "ARG=%%a"
if /i "!ARG!"=="/machine:x64" (
REM Skip this argument
) else (
set "ARGS=!ARGS! %%a"
)
)
"!LLVM_LIB_EXE!" %ARGS%
exit /b %ERRORLEVEL%

View File

@@ -1,18 +0,0 @@
# Wrapper for llvm-lib that strips conflicting /machine:x64 flag for ARM64 builds
# This is a workaround for CMake 4.1.0 bug where both /machine:ARM64 and /machine:x64 are added
# Find llvm-lib.exe - check LLVM_LIB env var, then PATH, then known locations
if ($env:LLVM_LIB) {
$llvmLib = $env:LLVM_LIB
} elseif (Get-Command llvm-lib.exe -ErrorAction SilentlyContinue) {
$llvmLib = (Get-Command llvm-lib.exe).Source
} elseif (Test-Path "C:\Program Files\LLVM\bin\llvm-lib.exe") {
$llvmLib = "C:\Program Files\LLVM\bin\llvm-lib.exe"
} else {
Write-Error "Cannot find llvm-lib.exe. Set LLVM_LIB environment variable or add LLVM to PATH."
exit 1
}
$filteredArgs = $args | Where-Object { $_ -ne "/machine:x64" }
& $llvmLib @filteredArgs
exit $LASTEXITCODE

View File

@@ -1,34 +0,0 @@
@echo off
setlocal enabledelayedexpansion
REM Wrapper for llvm-lib that strips conflicting /machine:x64 flag for ARM64 builds
REM This is a workaround for CMake 4.1.0 bug
REM Find llvm-lib.exe - check LLVM_LIB env var, then PATH, then known locations
if defined LLVM_LIB (
set "LLVM_LIB_EXE=!LLVM_LIB!"
) else (
where llvm-lib.exe >nul 2>&1
if !ERRORLEVEL! equ 0 (
for /f "delims=" %%i in ('where llvm-lib.exe') do set "LLVM_LIB_EXE=%%i"
) else if exist "C:\Program Files\LLVM\bin\llvm-lib.exe" (
set "LLVM_LIB_EXE=C:\Program Files\LLVM\bin\llvm-lib.exe"
) else (
echo Error: Cannot find llvm-lib.exe. Set LLVM_LIB environment variable or add LLVM to PATH.
exit /b 1
)
)
set NEWARGS=
for %%a in (%*) do (
set "ARG=%%a"
if /i "!ARG!"=="/machine:x64" (
REM Skip /machine:x64 argument
) else (
set "NEWARGS=!NEWARGS! %%a"
)
)
"!LLVM_LIB_EXE!" %NEWARGS%
exit /b %ERRORLEVEL%

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
oven-sh/boringssl
COMMIT
4f4f5ef8ebc6e23cbf393428f0ab1b526773f7ac
f1ffd9e83d4f5c28a9c70d73f9a4e6fcf310062f
)
register_cmake_command(

View File

@@ -57,17 +57,13 @@ set(BUN_DEPENDENCIES
LolHtml
Lshpack
Mimalloc
TinyCC
Zlib
LibArchive # must be loaded after zlib
HdrHistogram # must be loaded after zlib
Zstd
)
# TinyCC is optional - disabled on Windows ARM64 where it's not supported
if(ENABLE_TINYCC)
list(APPEND BUN_DEPENDENCIES TinyCC)
endif()
include(CloneZstd)
# --- Codegen ---
@@ -189,7 +185,7 @@ register_command(
CWD
${BUN_NODE_FALLBACKS_SOURCE}
COMMAND
${BUN_EXECUTABLE} ${BUN_FLAGS} run build-fallbacks
${BUN_EXECUTABLE} run build-fallbacks
${BUN_NODE_FALLBACKS_OUTPUT}
${BUN_NODE_FALLBACKS_SOURCES}
SOURCES
@@ -210,7 +206,7 @@ register_command(
CWD
${BUN_NODE_FALLBACKS_SOURCE}
COMMAND
${BUN_EXECUTABLE} ${BUN_FLAGS} build
${BUN_EXECUTABLE} build
${BUN_NODE_FALLBACKS_SOURCE}/node_modules/react-refresh/cjs/react-refresh-runtime.development.js
--outfile=${BUN_REACT_REFRESH_OUTPUT}
--target=browser
@@ -247,7 +243,6 @@ register_command(
"Generating ErrorCode.{zig,h}"
COMMAND
${BUN_EXECUTABLE}
${BUN_FLAGS}
run
${BUN_ERROR_CODE_SCRIPT}
${CODEGEN_PATH}
@@ -283,7 +278,6 @@ register_command(
"Generating ZigGeneratedClasses.{zig,cpp,h}"
COMMAND
${BUN_EXECUTABLE}
${BUN_FLAGS}
run
${BUN_ZIG_GENERATED_CLASSES_SCRIPT}
${BUN_ZIG_GENERATED_CLASSES_SOURCES}
@@ -334,7 +328,6 @@ register_command(
"Generating C++ --> Zig bindings"
COMMAND
${BUN_EXECUTABLE}
${BUN_FLAGS}
${CWD}/src/codegen/cppbind.ts
${CWD}/src
${CODEGEN_PATH}
@@ -352,7 +345,6 @@ register_command(
"Generating CI info"
COMMAND
${BUN_EXECUTABLE}
${BUN_FLAGS}
${CWD}/src/codegen/ci_info.ts
${CODEGEN_PATH}/ci_info.zig
SOURCES
@@ -361,35 +353,24 @@ register_command(
${BUN_CI_INFO_OUTPUTS}
)
if(SKIP_CODEGEN)
# Skip JavaScript codegen - useful for Windows ARM64 debug builds where bun crashes
message(STATUS "SKIP_CODEGEN is ON - skipping bun-js-modules codegen")
foreach(output ${BUN_JAVASCRIPT_OUTPUTS})
if(NOT EXISTS ${output})
message(FATAL_ERROR "SKIP_CODEGEN is ON but ${output} does not exist. Run codegen manually first.")
endif()
endforeach()
else()
register_command(
TARGET
bun-js-modules
COMMENT
"Generating JavaScript modules"
COMMAND
${BUN_EXECUTABLE}
${BUN_FLAGS}
run
${BUN_JAVASCRIPT_CODEGEN_SCRIPT}
--debug=${DEBUG}
${BUILD_PATH}
SOURCES
${BUN_JAVASCRIPT_SOURCES}
${BUN_JAVASCRIPT_CODEGEN_SOURCES}
register_command(
TARGET
bun-js-modules
COMMENT
"Generating JavaScript modules"
COMMAND
${BUN_EXECUTABLE}
run
${BUN_JAVASCRIPT_CODEGEN_SCRIPT}
OUTPUTS
${BUN_JAVASCRIPT_OUTPUTS}
)
endif()
--debug=${DEBUG}
${BUILD_PATH}
SOURCES
${BUN_JAVASCRIPT_SOURCES}
${BUN_JAVASCRIPT_CODEGEN_SOURCES}
${BUN_JAVASCRIPT_CODEGEN_SCRIPT}
OUTPUTS
${BUN_JAVASCRIPT_OUTPUTS}
)
set(BUN_BAKE_RUNTIME_CODEGEN_SCRIPT ${CWD}/src/codegen/bake-codegen.ts)
@@ -411,7 +392,6 @@ register_command(
"Bundling Bake Runtime"
COMMAND
${BUN_EXECUTABLE}
${BUN_FLAGS}
run
${BUN_BAKE_RUNTIME_CODEGEN_SCRIPT}
--debug=${DEBUG}
@@ -435,7 +415,7 @@ string(REPLACE ";" "," BUN_BINDGENV2_SOURCES_COMMA_SEPARATED
"${BUN_BINDGENV2_SOURCES}")
execute_process(
COMMAND ${BUN_EXECUTABLE} ${BUN_FLAGS} run ${BUN_BINDGENV2_SCRIPT}
COMMAND ${BUN_EXECUTABLE} run ${BUN_BINDGENV2_SCRIPT}
--command=list-outputs
--sources=${BUN_BINDGENV2_SOURCES_COMMA_SEPARATED}
--codegen-path=${CODEGEN_PATH}
@@ -458,7 +438,7 @@ register_command(
COMMENT
"Generating bindings (v2)"
COMMAND
${BUN_EXECUTABLE} ${BUN_FLAGS} run ${BUN_BINDGENV2_SCRIPT}
${BUN_EXECUTABLE} run ${BUN_BINDGENV2_SCRIPT}
--command=generate
--codegen-path=${CODEGEN_PATH}
--sources=${BUN_BINDGENV2_SOURCES_COMMA_SEPARATED}
@@ -489,7 +469,6 @@ register_command(
"Processing \".bind.ts\" files"
COMMAND
${BUN_EXECUTABLE}
${BUN_FLAGS}
run
${BUN_BINDGEN_SCRIPT}
--debug=${DEBUG}
@@ -522,7 +501,6 @@ register_command(
"Generating JSSink.{cpp,h}"
COMMAND
${BUN_EXECUTABLE}
${BUN_FLAGS}
run
${BUN_JS_SINK_SCRIPT}
${CODEGEN_PATH}
@@ -595,7 +573,6 @@ foreach(i RANGE 0 ${BUN_OBJECT_LUT_SOURCES_MAX_INDEX})
${BUN_OBJECT_LUT_SOURCE}
COMMAND
${BUN_EXECUTABLE}
${BUN_FLAGS}
run
${BUN_OBJECT_LUT_SCRIPT}
${BUN_OBJECT_LUT_SOURCE}
@@ -679,10 +656,6 @@ endif()
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm|ARM|arm64|ARM64|aarch64|AARCH64")
if(APPLE)
set(ZIG_CPU "apple_m1")
elseif(WIN32)
# Windows ARM64: use a specific CPU with NEON support
# Zig running under x64 emulation would detect wrong CPU with "native"
set(ZIG_CPU "cortex_a76")
else()
set(ZIG_CPU "native")
endif()
@@ -721,7 +694,6 @@ register_command(
-Denable_asan=$<IF:$<BOOL:${ENABLE_ZIG_ASAN}>,true,false>
-Denable_fuzzilli=$<IF:$<BOOL:${ENABLE_FUZZILLI}>,true,false>
-Denable_valgrind=$<IF:$<BOOL:${ENABLE_VALGRIND}>,true,false>
-Denable_tinycc=$<IF:$<BOOL:${ENABLE_TINYCC}>,true,false>
-Duse_mimalloc=$<IF:$<BOOL:${USE_MIMALLOC_AS_DEFAULT_ALLOCATOR}>,true,false>
-Dllvm_codegen_threads=${LLVM_ZIG_CODEGEN_THREADS}
-Dversion=${VERSION}
@@ -939,7 +911,7 @@ if(WIN32)
endif()
if(USE_MIMALLOC_AS_DEFAULT_ALLOCATOR)
target_compile_definitions(${bun} PRIVATE USE_BUN_MIMALLOC=1)
target_compile_definitions(${bun} PRIVATE USE_MIMALLOC=1)
endif()
target_compile_definitions(${bun} PRIVATE
@@ -1239,7 +1211,7 @@ if(BUN_LINK_ONLY)
WEBKIT_DOWNLOAD_URL=${WEBKIT_DOWNLOAD_URL}
WEBKIT_VERSION=${WEBKIT_VERSION}
ZIG_COMMIT=${ZIG_COMMIT}
${BUN_EXECUTABLE} ${BUN_FLAGS} ${CWD}/scripts/create-link-metadata.mjs ${BUILD_PATH} ${bun}
${BUN_EXECUTABLE} ${CWD}/scripts/create-link-metadata.mjs ${BUILD_PATH} ${bun}
SOURCES
${BUN_ZIG_OUTPUT}
${BUN_CPP_OUTPUT}
@@ -1253,7 +1225,6 @@ if(WIN32)
target_link_libraries(${bun} PRIVATE
${WEBKIT_LIB_PATH}/WTF.lib
${WEBKIT_LIB_PATH}/JavaScriptCore.lib
${WEBKIT_LIB_PATH}/bmalloc.lib
${WEBKIT_LIB_PATH}/sicudtd.lib
${WEBKIT_LIB_PATH}/sicuind.lib
${WEBKIT_LIB_PATH}/sicuucd.lib
@@ -1262,7 +1233,6 @@ if(WIN32)
target_link_libraries(${bun} PRIVATE
${WEBKIT_LIB_PATH}/WTF.lib
${WEBKIT_LIB_PATH}/JavaScriptCore.lib
${WEBKIT_LIB_PATH}/bmalloc.lib
${WEBKIT_LIB_PATH}/sicudt.lib
${WEBKIT_LIB_PATH}/sicuin.lib
${WEBKIT_LIB_PATH}/sicuuc.lib
@@ -1340,9 +1310,6 @@ if(WIN32)
wsock32 # ws2_32 required by TransmitFile aka sendfile on windows
delayimp.lib
)
# Required for static ICU linkage - without this, ICU headers expect DLL linkage
# which causes ABI mismatch and crashes (STATUS_STACK_BUFFER_OVERRUN)
target_compile_definitions(${bun} PRIVATE U_STATIC_IMPLEMENTATION)
endif()
# --- Packaging ---

View File

@@ -20,15 +20,6 @@ set(HIGHWAY_CMAKE_ARGS
-DHWY_ENABLE_INSTALL=OFF
)
# On Windows ARM64 with clang-cl, the __ARM_NEON macro isn't defined by default
# but NEON intrinsics are supported. Define it so Highway can detect NEON support.
if(WIN32 AND CMAKE_SYSTEM_PROCESSOR MATCHES "ARM64|aarch64|AARCH64")
list(APPEND HIGHWAY_CMAKE_ARGS
-DCMAKE_C_FLAGS=-D__ARM_NEON=1
-DCMAKE_CXX_FLAGS=-D__ARM_NEON=1
)
endif()
register_cmake_command(
TARGET
highway

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
cloudflare/lol-html
COMMIT
e9e16dca48dd4a8ffbc77642bc4be60407585f11
d64457d9ff0143deef025d5df7e8586092b9afb7
)
set(LOLHTML_CWD ${VENDOR_PATH}/lolhtml/c-api)
@@ -33,37 +33,6 @@ if (NOT WIN32)
set(RUSTFLAGS "-Cpanic=abort-Cdebuginfo=0-Cforce-unwind-tables=no-Copt-level=s")
endif()
# On Windows, ensure MSVC link.exe is used instead of Git's link.exe
set(LOLHTML_ENV
CARGO_TERM_COLOR=always
CARGO_TERM_VERBOSE=true
CARGO_TERM_DIAGNOSTIC=true
CARGO_ENCODED_RUSTFLAGS=${RUSTFLAGS}
CARGO_HOME=${CARGO_HOME}
RUSTUP_HOME=${RUSTUP_HOME}
)
if(WIN32)
# On Windows, tell Rust to use MSVC link.exe directly via the target-specific linker env var.
# This avoids Git's /usr/bin/link being found first in PATH.
# Find the MSVC link.exe from Visual Studio installation
file(GLOB MSVC_VERSIONS "C:/Program Files/Microsoft Visual Studio/2022/*/VC/Tools/MSVC/*")
if(MSVC_VERSIONS)
list(GET MSVC_VERSIONS -1 MSVC_LATEST) # Get the latest version
if(CMAKE_SYSTEM_PROCESSOR MATCHES "ARM64|aarch64")
set(MSVC_LINK_PATH "${MSVC_LATEST}/bin/HostARM64/arm64/link.exe")
set(CARGO_LINKER_VAR "CARGO_TARGET_AARCH64_PC_WINDOWS_MSVC_LINKER")
else()
set(MSVC_LINK_PATH "${MSVC_LATEST}/bin/Hostx64/x64/link.exe")
set(CARGO_LINKER_VAR "CARGO_TARGET_X86_64_PC_WINDOWS_MSVC_LINKER")
endif()
if(EXISTS "${MSVC_LINK_PATH}")
list(APPEND LOLHTML_ENV "${CARGO_LINKER_VAR}=${MSVC_LINK_PATH}")
message(STATUS "lolhtml: Using MSVC link.exe: ${MSVC_LINK_PATH}")
endif()
endif()
endif()
register_command(
TARGET
lolhtml
@@ -76,7 +45,12 @@ register_command(
ARTIFACTS
${LOLHTML_LIBRARY}
ENVIRONMENT
${LOLHTML_ENV}
CARGO_TERM_COLOR=always
CARGO_TERM_VERBOSE=true
CARGO_TERM_DIAGNOSTIC=true
CARGO_ENCODED_RUSTFLAGS=${RUSTFLAGS}
CARGO_HOME=${CARGO_HOME}
RUSTUP_HOME=${RUSTUP_HOME}
)
target_link_libraries(${bun} PRIVATE ${LOLHTML_LIBRARY})

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
oven-sh/mimalloc
COMMIT
989115cefb6915baa13788cb8252d83aac5330ad
1beadf9651a7bfdec6b5367c380ecc3fe1c40d1a
)
set(MIMALLOC_CMAKE_ARGS
@@ -14,7 +14,7 @@ set(MIMALLOC_CMAKE_ARGS
-DMI_BUILD_TESTS=OFF
-DMI_USE_CXX=ON
-DMI_SKIP_COLLECT_ON_EXIT=ON
# ```
# mimalloc_allow_large_os_pages=0 BUN_PORT=3004 mem bun http-hello.js
# Started development server: http://localhost:3004
@@ -51,7 +51,7 @@ if(ENABLE_ASAN)
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_DEBUG_UBSAN=ON)
elseif(APPLE OR LINUX)
if(APPLE)
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OVERRIDE=OFF)
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OVERRIDE=OFF)
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_ZONE=OFF)
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_INTERPOSE=OFF)
else()
@@ -77,9 +77,9 @@ endif()
if(WIN32)
if(DEBUG)
set(MIMALLOC_LIBRARY mimalloc-debug)
set(MIMALLOC_LIBRARY mimalloc-static-debug)
else()
set(MIMALLOC_LIBRARY mimalloc)
set(MIMALLOC_LIBRARY mimalloc-static)
endif()
elseif(DEBUG)
if (ENABLE_ASAN)

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
oven-sh/tinycc
COMMIT
12882eee073cfe5c7621bcfadf679e1372d4537b
29985a3b59898861442fa3b43f663fc1af2591d7
)
register_cmake_command(

View File

@@ -1,20 +0,0 @@
set(CMAKE_SYSTEM_NAME Windows)
set(CMAKE_SYSTEM_PROCESSOR aarch64)
set(CMAKE_C_COMPILER_WORKS ON)
set(CMAKE_CXX_COMPILER_WORKS ON)
# Force ARM64 architecture ID - this is what CMake uses to determine /machine: flag
set(MSVC_C_ARCHITECTURE_ID ARM64 CACHE INTERNAL "")
set(MSVC_CXX_ARCHITECTURE_ID ARM64 CACHE INTERNAL "")
# CMake 4.0+ policy CMP0197 controls how MSVC machine type flags are handled
set(CMAKE_POLICY_DEFAULT_CMP0197 NEW CACHE INTERNAL "")
# Clear any inherited static linker flags that might have wrong machine types
set(CMAKE_STATIC_LINKER_FLAGS "" CACHE STRING "" FORCE)
# Use wrapper script for llvm-lib that strips /machine:x64 flags
# This works around CMake 4.1.0 bug where both ARM64 and x64 machine flags are added
get_filename_component(_TOOLCHAIN_DIR "${CMAKE_CURRENT_LIST_DIR}" DIRECTORY)
set(CMAKE_AR "${_TOOLCHAIN_DIR}/scripts/llvm-lib-wrapper.bat" CACHE FILEPATH "" FORCE)

View File

@@ -6,8 +6,7 @@ endif()
optionx(BUILDKITE_ORGANIZATION_SLUG STRING "The organization slug to use on Buildkite" DEFAULT "bun")
optionx(BUILDKITE_PIPELINE_SLUG STRING "The pipeline slug to use on Buildkite" DEFAULT "bun")
optionx(BUILDKITE_BUILD_ID STRING "The build ID (UUID) to use on Buildkite")
optionx(BUILDKITE_BUILD_NUMBER STRING "The build number to use on Buildkite")
optionx(BUILDKITE_BUILD_ID STRING "The build ID to use on Buildkite")
optionx(BUILDKITE_GROUP_ID STRING "The group ID to use on Buildkite")
if(ENABLE_BASELINE)
@@ -33,13 +32,7 @@ if(NOT BUILDKITE_BUILD_ID)
return()
endif()
# Use BUILDKITE_BUILD_NUMBER for the URL if available, as the UUID format causes a 302 redirect
# that CMake's file(DOWNLOAD) doesn't follow, resulting in empty response.
if(BUILDKITE_BUILD_NUMBER)
setx(BUILDKITE_BUILD_URL https://buildkite.com/${BUILDKITE_ORGANIZATION_SLUG}/${BUILDKITE_PIPELINE_SLUG}/builds/${BUILDKITE_BUILD_NUMBER})
else()
setx(BUILDKITE_BUILD_URL https://buildkite.com/${BUILDKITE_ORGANIZATION_SLUG}/${BUILDKITE_PIPELINE_SLUG}/builds/${BUILDKITE_BUILD_ID})
endif()
setx(BUILDKITE_BUILD_URL https://buildkite.com/${BUILDKITE_ORGANIZATION_SLUG}/${BUILDKITE_PIPELINE_SLUG}/builds/${BUILDKITE_BUILD_ID})
setx(BUILDKITE_BUILD_PATH ${BUILDKITE_BUILDS_PATH}/builds/${BUILDKITE_BUILD_ID})
file(
@@ -55,16 +48,8 @@ if(NOT BUILDKITE_BUILD_STATUS EQUAL 0)
endif()
file(READ ${BUILDKITE_BUILD_PATH}/build.json BUILDKITE_BUILD)
# CMake's string(JSON ...) interprets escape sequences like \n, \r, \t.
# We need to escape these specific sequences while preserving valid JSON escapes like \" and \\.
# Strategy: Use a unique placeholder to protect \\ sequences, escape \n/\r/\t, then restore \\.
# This prevents \\n (literal backslash + n) from being corrupted to \\\n.
set(BKSLASH_PLACEHOLDER "___BKSLASH_PLACEHOLDER_7f3a9b2c___")
string(REPLACE "\\\\" "${BKSLASH_PLACEHOLDER}" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
string(REPLACE "\\n" "\\\\n" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
string(REPLACE "\\r" "\\\\r" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
string(REPLACE "\\t" "\\\\t" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
string(REPLACE "${BKSLASH_PLACEHOLDER}" "\\\\" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
# Escape backslashes so CMake doesn't interpret JSON escape sequences (e.g., \n in commit messages)
string(REPLACE "\\" "\\\\" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
string(JSON BUILDKITE_BUILD_UUID GET ${BUILDKITE_BUILD} id)
string(JSON BUILDKITE_JOBS GET ${BUILDKITE_BUILD} jobs)

View File

@@ -17,14 +17,6 @@ if (NOT CI)
set(BUN_EXECUTABLE ${BUN_EXECUTABLE} CACHE FILEPATH "Bun executable" FORCE)
endif()
# On Windows ARM64, we need to add --smol flag to avoid crashes when running
# x64 bun under WoW64 emulation
if(WIN32 AND ARCH STREQUAL "aarch64")
set(BUN_FLAGS "--smol" CACHE STRING "Extra flags for bun executable")
else()
set(BUN_FLAGS "" CACHE STRING "Extra flags for bun executable")
endif()
# If this is not set, some advanced features are not checked.
# https://github.com/oven-sh/bun/blob/cd7f6a1589db7f1e39dc4e3f4a17234afbe7826c/src/bun.js/javascript.zig#L1069-L1072
setenv(BUN_GARBAGE_COLLECTOR_LEVEL 1)

View File

@@ -12,13 +12,7 @@ if(NOT ENABLE_LLVM)
return()
endif()
# LLVM 21 is required for Windows ARM64 (first version with ARM64 Windows builds)
# Other platforms use LLVM 19.1.7
if(WIN32 AND CMAKE_SYSTEM_PROCESSOR MATCHES "ARM64|aarch64|AARCH64")
set(DEFAULT_LLVM_VERSION "21.1.8")
else()
set(DEFAULT_LLVM_VERSION "19.1.7")
endif()
set(DEFAULT_LLVM_VERSION "19.1.7")
optionx(LLVM_VERSION STRING "The version of LLVM to use" DEFAULT ${DEFAULT_LLVM_VERSION})

View File

@@ -2,14 +2,10 @@ option(WEBKIT_VERSION "The version of WebKit to use")
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
if(NOT WEBKIT_VERSION)
set(WEBKIT_VERSION 0e6527f24783ea832fa58f696437829cdcbc3c7c)
set(WEBKIT_VERSION 1d0216219a3c52cb85195f48f19ba7d5db747ff7)
endif()
# Use preview build URL for Windows ARM64 until the fix is merged to main
set(WEBKIT_PREVIEW_PR 140)
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)
string(SUBSTRING ${WEBKIT_VERSION} 0 8 WEBKIT_VERSION_SHORT)
if(WEBKIT_LOCAL)
set(DEFAULT_WEBKIT_PATH ${VENDOR_PATH}/WebKit/WebKitBuild/${CMAKE_BUILD_TYPE})
@@ -37,25 +33,9 @@ if(WEBKIT_LOCAL)
${WEBKIT_PATH}/JavaScriptCore/PrivateHeaders
${WEBKIT_PATH}/bmalloc/Headers
${WEBKIT_PATH}/WTF/Headers
${WEBKIT_PATH}/JavaScriptCore/PrivateHeaders/JavaScriptCore
${WEBKIT_PATH}/JavaScriptCore/DerivedSources/inspector
${WEBKIT_PATH}/JavaScriptCore/PrivateHeaders/JavaScriptCore
)
# On Windows, add ICU include path from vcpkg
if(WIN32)
# Auto-detect vcpkg triplet
set(VCPKG_ARM64_PATH ${VENDOR_PATH}/WebKit/vcpkg_installed/arm64-windows-static)
set(VCPKG_X64_PATH ${VENDOR_PATH}/WebKit/vcpkg_installed/x64-windows-static)
if(EXISTS ${VCPKG_ARM64_PATH})
set(VCPKG_ICU_PATH ${VCPKG_ARM64_PATH})
else()
set(VCPKG_ICU_PATH ${VCPKG_X64_PATH})
endif()
if(EXISTS ${VCPKG_ICU_PATH}/include)
include_directories(${VCPKG_ICU_PATH}/include)
message(STATUS "Using ICU from vcpkg: ${VCPKG_ICU_PATH}/include")
endif()
endif()
endif()
# After this point, only prebuilt WebKit is supported
@@ -72,7 +52,7 @@ else()
message(FATAL_ERROR "Unsupported operating system: ${CMAKE_SYSTEM_NAME}")
endif()
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm64|ARM64|aarch64|AARCH64")
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm64|aarch64")
set(WEBKIT_ARCH "arm64")
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "amd64|x86_64|x64|AMD64")
set(WEBKIT_ARCH "amd64")
@@ -101,14 +81,7 @@ endif()
setx(WEBKIT_NAME bun-webkit-${WEBKIT_OS}-${WEBKIT_ARCH}${WEBKIT_SUFFIX})
set(WEBKIT_FILENAME ${WEBKIT_NAME}.tar.gz)
if(WEBKIT_VERSION MATCHES "^autobuild-")
set(WEBKIT_TAG ${WEBKIT_VERSION})
else()
set(WEBKIT_TAG autobuild-${WEBKIT_VERSION})
endif()
setx(WEBKIT_DOWNLOAD_URL https://github.com/oven-sh/WebKit/releases/download/${WEBKIT_TAG}/${WEBKIT_FILENAME})
setx(WEBKIT_DOWNLOAD_URL https://github.com/oven-sh/WebKit/releases/download/autobuild-${WEBKIT_VERSION}/${WEBKIT_FILENAME})
if(EXISTS ${WEBKIT_PATH}/package.json)
file(READ ${WEBKIT_PATH}/package.json WEBKIT_PACKAGE_JSON)

View File

@@ -1,4 +1,4 @@
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm64|ARM64|aarch64|AARCH64")
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm64|aarch64")
set(DEFAULT_ZIG_ARCH "aarch64")
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "amd64|x86_64|x64|AMD64")
set(DEFAULT_ZIG_ARCH "x86_64")

View File

@@ -35,8 +35,8 @@ end
set -l bun_install_boolean_flags yarn production optional development no-save dry-run force no-cache silent verbose global
set -l bun_install_boolean_flags_descriptions "Write a yarn.lock file (yarn v1)" "Don't install devDependencies" "Add dependency to optionalDependencies" "Add dependency to devDependencies" "Don't update package.json or save a lockfile" "Don't install anything" "Always request the latest versions from the registry & reinstall all dependencies" "Ignore manifest cache entirely" "Don't output anything" "Excessively verbose logging" "Use global folder"
set -l bun_builtin_cmds_without_run dev create help bun upgrade discord install remove add update init pm x
set -l bun_builtin_cmds_accepting_flags create help bun upgrade discord run init link unlink pm x update
set -l bun_builtin_cmds_without_run dev create help bun upgrade discord install remove add init pm x
set -l bun_builtin_cmds_accepting_flags create help bun upgrade discord run init link unlink pm x
function __bun_complete_bins_scripts --inherit-variable bun_builtin_cmds_without_run -d "Emit bun completions for bins and scripts"
# Do nothing if we already have a builtin subcommand,
@@ -148,14 +148,14 @@ complete -c bun \
for i in (seq (count $bun_install_boolean_flags))
complete -c bun \
-n "__fish_seen_subcommand_from install add remove update" -l "$bun_install_boolean_flags[$i]" -d "$bun_install_boolean_flags_descriptions[$i]"
-n "__fish_seen_subcommand_from install add remove" -l "$bun_install_boolean_flags[$i]" -d "$bun_install_boolean_flags_descriptions[$i]"
end
complete -c bun \
-n "__fish_seen_subcommand_from install add remove update" -l 'cwd' -d 'Change working directory'
-n "__fish_seen_subcommand_from install add remove" -l 'cwd' -d 'Change working directory'
complete -c bun \
-n "__fish_seen_subcommand_from install add remove update" -l 'cache-dir' -d 'Choose a cache directory (default: $HOME/.bun/install/cache)'
-n "__fish_seen_subcommand_from install add remove" -l 'cache-dir' -d 'Choose a cache directory (default: $HOME/.bun/install/cache)'
complete -c bun \
-n "__fish_seen_subcommand_from add" -d 'Popular' -a '(__fish__get_bun_packages)'
@@ -183,5 +183,4 @@ complete -c bun -n "__fish_use_subcommand" -a "unlink" -d "Unregister a local np
complete -c bun -n "__fish_use_subcommand" -a "pm" -d "Additional package management utilities" -f
complete -c bun -n "__fish_use_subcommand" -a "x" -d "Execute a package binary, installing if needed" -f
complete -c bun -n "__fish_use_subcommand" -a "outdated" -d "Display the latest versions of outdated dependencies" -f
complete -c bun -n "__fish_use_subcommand" -a "update" -d "Update dependencies to their latest versions" -f
complete -c bun -n "__fish_use_subcommand" -a "publish" -d "Publish your package from local to npm" -f

View File

@@ -150,8 +150,6 @@
"/runtime/secrets",
"/runtime/console",
"/runtime/yaml",
"/runtime/json5",
"/runtime/jsonl",
"/runtime/html-rewriter",
"/runtime/hashing",
"/runtime/glob",
@@ -499,7 +497,6 @@
"/guides/runtime/import-json",
"/guides/runtime/import-toml",
"/guides/runtime/import-yaml",
"/guides/runtime/import-json5",
"/guides/runtime/import-html",
"/guides/util/import-meta-dir",
"/guides/util/import-meta-file",

View File

@@ -1,74 +0,0 @@
---
title: Import a JSON5 file
sidebarTitle: Import JSON5
mode: center
---
Bun natively supports `.json5` imports.
```json5 config.json5 icon="file-code"
{
// Comments are allowed
database: {
host: "localhost",
port: 5432,
name: "myapp",
},
server: {
port: 3000,
timeout: 30,
},
features: {
auth: true,
rateLimit: true,
},
}
```
---
Import the file like any other source file.
```ts config.ts icon="/icons/typescript.svg"
import config from "./config.json5";
config.database.host; // => "localhost"
config.server.port; // => 3000
config.features.auth; // => true
```
---
You can also use named imports to destructure top-level properties:
```ts config.ts icon="/icons/typescript.svg"
import { database, server, features } from "./config.json5";
console.log(database.name); // => "myapp"
console.log(server.timeout); // => 30
console.log(features.rateLimit); // => true
```
---
For parsing JSON5 strings at runtime, use `Bun.JSON5.parse()`:
```ts config.ts icon="/icons/typescript.svg"
const data = JSON5.parse(`{
name: 'John Doe',
age: 30,
hobbies: [
'reading',
'coding',
],
}`);
console.log(data.name); // => "John Doe"
console.log(data.hobbies); // => ["reading", "coding"]
```
---
See [Docs > API > JSON5](/runtime/json5) for complete documentation on JSON5 support in Bun.

View File

@@ -26,6 +26,21 @@ The `bun` CLI contains a Node.js-compatible package manager designed to be a dra
</Note>
<Accordion title="For Linux users">
The recommended minimum Linux Kernel version is 5.6. If you're on Linux kernel 5.1 - 5.5, `bun install` will work, but HTTP requests will be slow due to a lack of support for io_uring's `connect()` operation.
If you're using Ubuntu 20.04, here's how to install a [newer kernel](https://wiki.ubuntu.com/Kernel/LTSEnablementStack):
```bash terminal icon="terminal"
# If this returns a version >= 5.6, you don't need to do anything
uname -r
# Install the official Ubuntu hardware enablement kernel
sudo apt install --install-recommends linux-generic-hwe-20.04
```
</Accordion>
To install all dependencies of a project:
```bash terminal icon="terminal"

View File

@@ -35,7 +35,7 @@ winget install "Visual Studio Community 2022" --override "--add Microsoft.Visual
After Visual Studio, you need the following:
- LLVM (19.1.7 for x64, 21.1.8 for ARM64)
- LLVM 19.1.7
- Go
- Rust
- NASM
@@ -47,35 +47,25 @@ After Visual Studio, you need the following:
[Scoop](https://scoop.sh) can be used to install these remaining tools easily.
```ps1 Scoop (x64)
```ps1 Scoop
irm https://get.scoop.sh | iex
scoop install nodejs-lts go rust nasm ruby perl ccache
# scoop seems to be buggy if you install llvm and the rest at the same time
scoop install llvm@19.1.7
```
For Windows ARM64, download LLVM 21.1.8 directly from GitHub releases (first version with ARM64 Windows builds):
```ps1 ARM64
# Download and install LLVM for ARM64
Invoke-WebRequest -Uri "https://github.com/llvm/llvm-project/releases/download/llvmorg-21.1.8/LLVM-21.1.8-woa64.exe" -OutFile "$env:TEMP\LLVM-21.1.8-woa64.exe"
Start-Process -FilePath "$env:TEMP\LLVM-21.1.8-woa64.exe" -ArgumentList "/S" -Wait
```
<Note>
Please do not use WinGet/other package manager for these, as you will likely install Strawberry Perl instead of a more
minimal installation of Perl. Strawberry Perl includes many other utilities that get installed into `$Env:PATH` that
will conflict with MSVC and break the build.
</Note>
If you intend on building WebKit locally (optional, x64 only), you should install these packages:
If you intend on building WebKit locally (optional), you should install these packages:
```ps1 Scoop
scoop install make cygwin python
```
<Note>Cygwin is not required for ARM64 builds as WebKit is provided as a pre-built binary.</Note>
From here on out, it is **expected you use a PowerShell Terminal with `.\scripts\vs-shell.ps1` sourced**. This script is available in the Bun repository and can be loaded by executing it:
```ps1

View File

@@ -10,21 +10,21 @@ Bun provides a fast, native implementation for working with tar archives through
**Create an archive from files:**
```ts
const archive = new Bun.Archive({
const archive = Bun.Archive.from({
"hello.txt": "Hello, World!",
"data.json": JSON.stringify({ foo: "bar" }),
"nested/file.txt": "Nested content",
});
// Write to disk
await Bun.write("bundle.tar", archive);
await Bun.Archive.write("bundle.tar", archive);
```
**Extract an archive:**
```ts
const tarball = await Bun.file("package.tar.gz").bytes();
const archive = new Bun.Archive(tarball);
const archive = Bun.Archive.from(tarball);
const entryCount = await archive.extract("./output");
console.log(`Extracted ${entryCount} entries`);
```
@@ -33,7 +33,7 @@ console.log(`Extracted ${entryCount} entries`);
```ts
const tarball = await Bun.file("package.tar.gz").bytes();
const archive = new Bun.Archive(tarball);
const archive = Bun.Archive.from(tarball);
const files = await archive.files();
for (const [path, file] of files) {
@@ -43,11 +43,10 @@ for (const [path, file] of files) {
## Creating Archives
Use `new Bun.Archive()` to create an archive from an object where keys are file paths and values are file contents. By default, archives are uncompressed:
Use `Bun.Archive.from()` to create an archive from an object where keys are file paths and values are file contents:
```ts
// Creates an uncompressed tar archive (default)
const archive = new Bun.Archive({
const archive = Bun.Archive.from({
"README.md": "# My Project",
"src/index.ts": "console.log('Hello');",
"package.json": JSON.stringify({ name: "my-project" }),
@@ -65,7 +64,7 @@ File contents can be:
const data = "binary data";
const arrayBuffer = new ArrayBuffer(8);
const archive = new Bun.Archive({
const archive = Bun.Archive.from({
"text.txt": "Plain text",
"blob.bin": new Blob([data]),
"bytes.bin": new Uint8Array([1, 2, 3, 4]),
@@ -75,19 +74,18 @@ const archive = new Bun.Archive({
### Writing Archives to Disk
Use `Bun.write()` to write an archive to disk:
Use `Bun.Archive.write()` to create and write an archive in one operation:
```ts
// Write uncompressed tar (default)
const archive = new Bun.Archive({
// Write uncompressed tar
await Bun.Archive.write("output.tar", {
"file1.txt": "content1",
"file2.txt": "content2",
});
await Bun.write("output.tar", archive);
// Write gzipped tar
const compressed = new Bun.Archive({ "src/index.ts": "console.log('Hello');" }, { compress: "gzip" });
await Bun.write("output.tar.gz", compressed);
const files = { "src/index.ts": "console.log('Hello');" };
await Bun.Archive.write("output.tar.gz", files, "gzip");
```
### Getting Archive Bytes
@@ -95,7 +93,8 @@ await Bun.write("output.tar.gz", compressed);
Get the archive data as bytes or a Blob:
```ts
const archive = new Bun.Archive({ "hello.txt": "Hello, World!" });
const files = { "hello.txt": "Hello, World!" };
const archive = Bun.Archive.from(files);
// As Uint8Array
const bytes = await archive.bytes();
@@ -103,10 +102,9 @@ const bytes = await archive.bytes();
// As Blob
const blob = await archive.blob();
// With gzip compression (set at construction)
const gzipped = new Bun.Archive({ "hello.txt": "Hello, World!" }, { compress: "gzip" });
const gzippedBytes = await gzipped.bytes();
const gzippedBlob = await gzipped.blob();
// With gzip compression
const gzippedBytes = await archive.bytes("gzip");
const gzippedBlob = await archive.blob("gzip");
```
## Extracting Archives
@@ -118,13 +116,13 @@ Create an archive from existing tar/tar.gz data:
```ts
// From a file
const tarball = await Bun.file("package.tar.gz").bytes();
const archiveFromFile = new Bun.Archive(tarball);
const archiveFromFile = Bun.Archive.from(tarball);
```
```ts
// From a fetch response
const response = await fetch("https://example.com/archive.tar.gz");
const archiveFromFetch = new Bun.Archive(await response.blob());
const archiveFromFetch = Bun.Archive.from(await response.blob());
```
### Extracting to Disk
@@ -133,7 +131,7 @@ Use `.extract()` to write all files to a directory:
```ts
const tarball = await Bun.file("package.tar.gz").bytes();
const archive = new Bun.Archive(tarball);
const archive = Bun.Archive.from(tarball);
const count = await archive.extract("./extracted");
console.log(`Extracted ${count} entries`);
```
@@ -150,7 +148,7 @@ Use glob patterns to extract only specific files. Patterns are matched against a
```ts
const tarball = await Bun.file("package.tar.gz").bytes();
const archive = new Bun.Archive(tarball);
const archive = Bun.Archive.from(tarball);
// Extract only TypeScript files
const tsCount = await archive.extract("./extracted", { glob: "**/*.ts" });
@@ -183,7 +181,7 @@ Use `.files()` to get archive contents as a `Map` of `File` objects without extr
```ts
const tarball = await Bun.file("package.tar.gz").bytes();
const archive = new Bun.Archive(tarball);
const archive = Bun.Archive.from(tarball);
const files = await archive.files();
for (const [path, file] of files) {
@@ -208,7 +206,7 @@ Archive operations can fail due to corrupted data, I/O errors, or invalid paths.
```ts
try {
const tarball = await Bun.file("package.tar.gz").bytes();
const archive = new Bun.Archive(tarball);
const archive = Bun.Archive.from(tarball);
const count = await archive.extract("./output");
console.log(`Extracted ${count} entries`);
} catch (e: unknown) {
@@ -229,7 +227,7 @@ try {
Common error scenarios:
- **Corrupted/truncated archives** - `new Archive()` loads the archive data; errors may be deferred until read/extract operations
- **Corrupted/truncated archives** - `Archive.from()` loads the archive data; errors may be deferred until read/extract operations
- **Permission denied** - `extract()` throws if the target directory is not writable
- **Disk full** - `extract()` throws if there's insufficient space
- **Invalid paths** - Operations throw for malformed file paths
@@ -241,7 +239,7 @@ The count returned by `extract()` includes all successfully written entries (fil
For additional security with untrusted archives, you can enumerate and validate paths before extraction:
```ts
const archive = new Bun.Archive(untrustedData);
const archive = Bun.Archive.from(untrustedData);
const files = await archive.files();
// Optional: Custom validation for additional checks
@@ -300,28 +298,26 @@ See [Bun.Glob](/docs/api/glob) for the full glob syntax including escaping and a
## Compression
Bun.Archive creates uncompressed tar archives by default. Use `{ compress: "gzip" }` to enable gzip compression:
Bun.Archive supports gzip compression for both reading and writing:
```ts
// Default: uncompressed tar
const archive = new Bun.Archive({ "hello.txt": "Hello, World!" });
// Reading: automatically detects gzip
const gzippedTarball = await Bun.file("archive.tar.gz").bytes();
const readArchive = new Bun.Archive(gzippedTarball);
const archive = Bun.Archive.from(gzippedTarball);
// Enable gzip compression
const compressed = new Bun.Archive({ "hello.txt": "Hello, World!" }, { compress: "gzip" });
// Writing: specify compression
const files = { "hello.txt": "Hello, World!" };
await Bun.Archive.write("output.tar.gz", files, "gzip");
// Gzip with custom level (1-12)
const maxCompression = new Bun.Archive({ "hello.txt": "Hello, World!" }, { compress: "gzip", level: 12 });
// Getting bytes: specify compression
const gzippedBytes = await archive.bytes("gzip");
```
The options accept:
The compression argument accepts:
- No options or `undefined` - Uncompressed tar (default)
- `{ compress: "gzip" }` - Enable gzip compression at level 6
- `{ compress: "gzip", level: number }` - Gzip with custom level 1-12 (1 = fastest, 12 = smallest)
- `"gzip"` - Enable gzip compression
- `true` - Same as `"gzip"`
- `false` or `undefined` - No compression
## Examples
@@ -343,16 +339,15 @@ for await (const path of glob.scan(".")) {
// Add package.json
files["package.json"] = await Bun.file("package.json").text();
// Create compressed archive and write to disk
const archive = new Bun.Archive(files, { compress: "gzip" });
await Bun.write("bundle.tar.gz", archive);
// Create compressed archive
await Bun.Archive.write("bundle.tar.gz", files, "gzip");
```
### Extract and Process npm Package
```ts
const response = await fetch("https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz");
const archive = new Bun.Archive(await response.blob());
const archive = Bun.Archive.from(await response.blob());
// Get package.json
const files = await archive.files("package/package.json");
@@ -370,7 +365,7 @@ if (packageJson) {
import { readdir } from "node:fs/promises";
import { join } from "node:path";
async function archiveDirectory(dir: string, compress = false): Promise<Bun.Archive> {
async function archiveDirectory(dir: string): Promise<Bun.Archive> {
const files: Record<string, Blob> = {};
async function walk(currentDir: string, prefix: string = "") {
@@ -389,11 +384,11 @@ async function archiveDirectory(dir: string, compress = false): Promise<Bun.Arch
}
await walk(dir);
return new Bun.Archive(files, compress ? { compress: "gzip" } : undefined);
return Bun.Archive.from(files);
}
const archive = await archiveDirectory("./my-project", true);
await Bun.write("my-project.tar.gz", archive);
const archive = await archiveDirectory("./my-project");
await Bun.Archive.write("my-project.tar.gz", archive, "gzip");
```
## Reference
@@ -401,19 +396,14 @@ await Bun.write("my-project.tar.gz", archive);
> **Note**: The following type signatures are simplified for documentation purposes. See [`packages/bun-types/bun.d.ts`](https://github.com/oven-sh/bun/blob/main/packages/bun-types/bun.d.ts) for the full type definitions.
```ts
type ArchiveCompression = "gzip" | boolean;
type ArchiveInput =
| Record<string, string | Blob | Bun.ArrayBufferView | ArrayBufferLike>
| Blob
| Bun.ArrayBufferView
| ArrayBufferLike;
type ArchiveOptions = {
/** Compression algorithm. Currently only "gzip" is supported. */
compress?: "gzip";
/** Compression level 1-12 (default 6 when gzip is enabled). */
level?: number;
};
interface ArchiveExtractOptions {
/** Glob pattern(s) to filter extraction. Supports negative patterns with "!" prefix. */
glob?: string | readonly string[];
@@ -422,11 +412,13 @@ interface ArchiveExtractOptions {
class Archive {
/**
* Create an Archive from input data
* @param data - Files to archive (as object) or existing archive data (as bytes/blob)
* @param options - Compression options. Uncompressed by default.
* Pass { compress: "gzip" } to enable compression.
*/
constructor(data: ArchiveInput, options?: ArchiveOptions);
static from(data: ArchiveInput): Archive;
/**
* Write an archive directly to disk
*/
static write(path: string, data: ArchiveInput | Archive, compress?: ArchiveCompression): Promise<void>;
/**
* Extract archive to a directory
@@ -435,14 +427,14 @@ class Archive {
extract(path: string, options?: ArchiveExtractOptions): Promise<number>;
/**
* Get archive as a Blob (uses compression setting from constructor)
* Get archive as a Blob
*/
blob(): Promise<Blob>;
blob(compress?: ArchiveCompression): Promise<Blob>;
/**
* Get archive as a Uint8Array (uses compression setting from constructor)
* Get archive as a Uint8Array
*/
bytes(): Promise<Uint8Array<ArrayBuffer>>;
bytes(compress?: ArchiveCompression): Promise<Uint8Array<ArrayBuffer>>;
/**
* Get archive contents as File objects (regular files only, no directories)

View File

@@ -5,7 +5,7 @@ description: "File types and loaders supported by Bun's bundler and runtime"
The Bun bundler implements a set of default loaders out of the box. As a rule of thumb, the bundler and the runtime both support the same set of file types out of the box.
`.js` `.cjs` `.mjs` `.mts` `.cts` `.ts` `.tsx` `.jsx` `.css` `.json` `.jsonc` `.json5` `.toml` `.yaml` `.yml` `.txt` `.wasm` `.node` `.html` `.sh`
`.js` `.cjs` `.mjs` `.mts` `.cts` `.ts` `.tsx` `.jsx` `.css` `.json` `.jsonc` `.toml` `.yaml` `.yml` `.txt` `.wasm` `.node` `.html` `.sh`
Bun uses the file extension to determine which built-in _loader_ should be used to parse the file. Every loader has a name, such as `js`, `tsx`, or `json`. These names are used when building [plugins](/bundler/plugins) that extend Bun with custom loaders.
@@ -197,53 +197,6 @@ export default {
</CodeGroup>
### `json5`
**JSON5 loader**. Default for `.json5`.
JSON5 files can be directly imported. Bun will parse them with its fast native JSON5 parser. JSON5 is a superset of JSON that supports comments, trailing commas, unquoted keys, single-quoted strings, and more.
```ts
import config from "./config.json5";
console.log(config);
// via import attribute:
import data from "./data.txt" with { type: "json5" };
```
During bundling, the parsed JSON5 is inlined into the bundle as a JavaScript object.
```ts
var config = {
name: "my-app",
version: "1.0.0",
// ...other fields
};
```
If a `.json5` file is passed as an entrypoint, it will be converted to a `.js` module that `export default`s the parsed object.
<CodeGroup>
```json5 Input
{
// Configuration
name: "John Doe",
age: 35,
email: "johndoe@example.com",
}
```
```ts Output
export default {
name: "John Doe",
age: 35,
email: "johndoe@example.com",
};
```
</CodeGroup>
### `text`
**Text loader**. Default for `.txt`.

View File

@@ -1,271 +0,0 @@
---
title: JSON5
description: Use Bun's built-in support for JSON5 files through both runtime APIs and bundler integration
---
In Bun, JSON5 is a first-class citizen alongside JSON, TOML, and YAML. You can:
- Parse and stringify JSON5 with `Bun.JSON5.parse` and `Bun.JSON5.stringify`
- `import` & `require` JSON5 files as modules at runtime (including hot reloading & watch mode support)
- `import` & `require` JSON5 files in frontend apps via Bun's bundler
---
## Conformance
Bun's JSON5 parser passes 100% of the [official JSON5 test suite](https://github.com/json5/json5-tests). The parser is written in Zig for optimal performance. You can view our [translated test suite](https://github.com/oven-sh/bun/blob/main/test/js/bun/json5/json5-test-suite.test.ts) to see every test case.
---
## Runtime API
### `Bun.JSON5.parse()`
Parse a JSON5 string into a JavaScript value.
```ts
import { JSON5 } from "bun";
const data = JSON5.parse(`{
// JSON5 supports comments
name: 'my-app',
version: '1.0.0',
debug: true,
// trailing commas are allowed
tags: ['web', 'api',],
}`);
console.log(data);
// {
// name: "my-app",
// version: "1.0.0",
// debug: true,
// tags: ["web", "api"]
// }
```
#### Supported JSON5 Features
JSON5 is a superset of JSON based on ECMAScript 5.1 syntax. It supports:
- **Comments**: single-line (`//`) and multi-line (`/* */`)
- **Trailing commas**: in objects and arrays
- **Unquoted keys**: valid ECMAScript 5.1 identifiers can be used as keys
- **Single-quoted strings**: in addition to double-quoted strings
- **Multi-line strings**: using backslash line continuations
- **Hex numbers**: `0xFF`
- **Leading & trailing decimal points**: `.5` and `5.`
- **Infinity and NaN**: positive and negative
- **Explicit plus sign**: `+42`
```ts
const data = JSON5.parse(`{
// Unquoted keys
unquoted: 'keys work',
// Single and double quotes
single: 'single-quoted',
double: "double-quoted",
// Trailing commas
trailing: 'comma',
// Special numbers
hex: 0xDEADbeef,
half: .5,
to: Infinity,
nan: NaN,
// Multi-line strings
multiline: 'line 1 \
line 2',
}`);
```
#### Error Handling
`Bun.JSON5.parse()` throws a `SyntaxError` if the input is invalid JSON5:
```ts
try {
JSON5.parse("{invalid}");
} catch (error) {
console.error("Failed to parse JSON5:", error.message);
}
```
### `Bun.JSON5.stringify()`
Stringify a JavaScript value to a JSON5 string.
```ts
import { JSON5 } from "bun";
const str = JSON5.stringify({ name: "my-app", version: "1.0.0" });
console.log(str);
// {name:'my-app',version:'1.0.0'}
```
#### Pretty Printing
Pass a `space` argument to format the output with indentation:
```ts
const pretty = JSON5.stringify(
{
name: "my-app",
debug: true,
tags: ["web", "api"],
},
null,
2,
);
console.log(pretty);
// {
// name: 'my-app',
// debug: true,
// tags: [
// 'web',
// 'api',
// ],
// }
```
The `space` argument can be a number (number of spaces) or a string (used as the indent character):
```ts
// Tab indentation
JSON5.stringify(data, null, "\t");
```
#### Special Values
Unlike `JSON.stringify`, `JSON5.stringify` preserves special numeric values:
```ts
JSON5.stringify({ inf: Infinity, ninf: -Infinity, nan: NaN });
// {inf:Infinity,ninf:-Infinity,nan:NaN}
```
---
## Module Import
### ES Modules
You can import JSON5 files directly as ES modules:
```json5 config.json5
{
// Database configuration
database: {
host: "localhost",
port: 5432,
name: "myapp",
},
features: {
auth: true,
rateLimit: true,
analytics: false,
},
}
```
#### Default Import
```ts app.ts icon="/icons/typescript.svg"
import config from "./config.json5";
console.log(config.database.host); // "localhost"
console.log(config.features.auth); // true
```
#### Named Imports
You can destructure top-level properties as named imports:
```ts app.ts icon="/icons/typescript.svg"
import { database, features } from "./config.json5";
console.log(database.host); // "localhost"
console.log(features.rateLimit); // true
```
### CommonJS
JSON5 files can also be required in CommonJS:
```ts app.ts icon="/icons/typescript.svg"
const config = require("./config.json5");
console.log(config.database.name); // "myapp"
// Destructuring also works
const { database, features } = require("./config.json5");
```
---
## Hot Reloading with JSON5
When you run your application with `bun --hot`, changes to JSON5 files are automatically detected and reloaded:
```json5 config.json5
{
server: {
port: 3000,
host: "localhost",
},
features: {
debug: true,
verbose: false,
},
}
```
```ts server.ts icon="/icons/typescript.svg"
import { server, features } from "./config.json5";
Bun.serve({
port: server.port,
hostname: server.host,
fetch(req) {
if (features.verbose) {
console.log(`${req.method} ${req.url}`);
}
return new Response("Hello World");
},
});
```
Run with hot reloading:
```bash terminal icon="terminal"
bun --hot server.ts
```
---
## Bundler Integration
When you import JSON5 files and bundle with Bun, the JSON5 is parsed at build time and included as a JavaScript module:
```bash terminal icon="terminal"
bun build app.ts --outdir=dist
```
This means:
- Zero runtime JSON5 parsing overhead in production
- Smaller bundle sizes
- Tree-shaking support for unused properties (named imports)
### Dynamic Imports
JSON5 files can be dynamically imported:
```ts
const config = await import("./config.json5");
```

View File

@@ -1,188 +0,0 @@
---
title: JSONL
description: Parse newline-delimited JSON (JSONL) with Bun's built-in streaming parser
---
Bun has built-in support for parsing [JSONL](https://jsonlines.org/) (newline-delimited JSON), where each line is a separate JSON value. The parser is implemented in C++ using JavaScriptCore's optimized JSON parser and supports streaming use cases.
```ts
const results = Bun.JSONL.parse('{"name":"Alice"}\n{"name":"Bob"}\n');
// [{ name: "Alice" }, { name: "Bob" }]
```
---
## `Bun.JSONL.parse()`
Parse a complete JSONL input and return an array of all parsed values.
```ts
import { JSONL } from "bun";
const input = '{"id":1,"name":"Alice"}\n{"id":2,"name":"Bob"}\n{"id":3,"name":"Charlie"}\n';
const records = JSONL.parse(input);
console.log(records);
// [
// { id: 1, name: "Alice" },
// { id: 2, name: "Bob" },
// { id: 3, name: "Charlie" }
// ]
```
Input can be a string or a `Uint8Array`:
```ts
const buffer = new TextEncoder().encode('{"a":1}\n{"b":2}\n');
const results = Bun.JSONL.parse(buffer);
// [{ a: 1 }, { b: 2 }]
```
When passed a `Uint8Array`, a UTF-8 BOM at the start of the buffer is automatically skipped.
### Error handling
If the input contains invalid JSON, `Bun.JSONL.parse()` throws a `SyntaxError`:
```ts
try {
Bun.JSONL.parse('{"valid":true}\n{invalid}\n');
} catch (error) {
console.error(error); // SyntaxError: Failed to parse JSONL
}
```
---
## `Bun.JSONL.parseChunk()`
For streaming scenarios, `parseChunk` parses as many complete values as possible from the input and reports how far it got. This is useful when receiving data incrementally (e.g., from a network stream) and you need to know where to resume parsing.
```ts
const chunk = '{"id":1}\n{"id":2}\n{"id":3';
const result = Bun.JSONL.parseChunk(chunk);
console.log(result.values); // [{ id: 1 }, { id: 2 }]
console.log(result.read); // 17 — characters consumed
console.log(result.done); // false — incomplete value remains
console.log(result.error); // null — no parse error
```
### Return value
`parseChunk` returns an object with four properties:
| Property | Type | Description |
| -------- | --------------------- | ----------------------------------------------------------------------- |
| `values` | `any[]` | Array of successfully parsed JSON values |
| `read` | `number` | Number of bytes (for `Uint8Array`) or characters (for strings) consumed |
| `done` | `boolean` | `true` if the entire input was consumed with no remaining data |
| `error` | `SyntaxError \| null` | Parse error, or `null` if no error occurred |
### Streaming example
Use `read` to slice off consumed input and carry forward the remainder:
```ts
let buffer = "";
async function processStream(stream: ReadableStream<string>) {
for await (const chunk of stream) {
buffer += chunk;
const result = Bun.JSONL.parseChunk(buffer);
for (const value of result.values) {
handleRecord(value);
}
// Keep only the unconsumed portion
buffer = buffer.slice(result.read);
}
// Handle any remaining data
if (buffer.length > 0) {
const final = Bun.JSONL.parseChunk(buffer);
for (const value of final.values) {
handleRecord(value);
}
if (final.error) {
console.error("Parse error in final chunk:", final.error.message);
}
}
}
```
### Byte offsets with `Uint8Array`
When the input is a `Uint8Array`, you can pass optional `start` and `end` byte offsets:
```ts
const buf = new TextEncoder().encode('{"a":1}\n{"b":2}\n{"c":3}\n');
// Parse starting from byte 8
const result = Bun.JSONL.parseChunk(buf, 8);
console.log(result.values); // [{ b: 2 }, { c: 3 }]
console.log(result.read); // 24
// Parse a specific range
const partial = Bun.JSONL.parseChunk(buf, 0, 8);
console.log(partial.values); // [{ a: 1 }]
```
The `read` value is always a byte offset into the original buffer, making it easy to use with `TypedArray.subarray()` for zero-copy streaming:
```ts
let buf = new Uint8Array(0);
async function processBinaryStream(stream: ReadableStream<Uint8Array>) {
for await (const chunk of stream) {
// Append chunk to buffer
const newBuf = new Uint8Array(buf.length + chunk.length);
newBuf.set(buf);
newBuf.set(chunk, buf.length);
buf = newBuf;
const result = Bun.JSONL.parseChunk(buf);
for (const value of result.values) {
handleRecord(value);
}
// Keep unconsumed bytes
buf = buf.slice(result.read);
}
}
```
### Error recovery
Unlike `parse()`, `parseChunk()` does not throw on invalid JSON. Instead, it returns the error in the `error` property, along with any values that were successfully parsed before the error:
```ts
const input = '{"a":1}\n{invalid}\n{"b":2}\n';
const result = Bun.JSONL.parseChunk(input);
console.log(result.values); // [{ a: 1 }] — values parsed before the error
console.log(result.error); // SyntaxError
console.log(result.read); // 7 — position up to last successful parse
```
---
## Supported value types
Each line can be any valid JSON value, not just objects:
```ts
const input = '42\n"hello"\ntrue\nnull\n[1,2,3]\n{"key":"value"}\n';
const values = Bun.JSONL.parse(input);
// [42, "hello", true, null, [1, 2, 3], { key: "value" }]
```
---
## Performance notes
- **ASCII fast path**: Pure ASCII input is parsed directly without copying, using a zero-allocation `StringView`.
- **UTF-8 support**: Non-ASCII `Uint8Array` input is decoded to UTF-16 using SIMD-accelerated conversion.
- **BOM handling**: UTF-8 BOM (`0xEF 0xBB 0xBF`) at the start of a `Uint8Array` is automatically skipped.
- **Pre-built object shape**: The result object from `parseChunk` uses a cached structure for fast property access.

View File

@@ -131,7 +131,6 @@
stdenv = pkgs.clangStdenv;
}) {
inherit packages;
hardeningDisable = [ "fortify" ];
shellHook = ''
# Set up build environment

View File

@@ -35,7 +35,7 @@ JSC_DEFINE_HOST_FUNCTION(jsFunctionWrite, (JSC::JSGlobalObject * globalObject,
JSValue arg1 = callframe->argument(0);
JSValue toWriteArg = callframe->argument(1);
auto &vm = globalObject->vm();
auto scope = DECLARE_TOP_EXCEPTION_SCOPE(vm);
auto scope = DECLARE_CATCH_SCOPE(vm);
int32_t fd = STDOUT_FILENO;
if (callframe->argumentCount() > 1) {

View File

@@ -1,7 +1,7 @@
{
"private": true,
"name": "bun",
"version": "1.3.7",
"version": "1.3.6",
"workspaces": [
"./packages/bun-types",
"./packages/@types/bun"

View File

@@ -71,23 +71,8 @@ async function buildRootModule(dryRun?: boolean) {
js: "// Source code: https://github.com/oven-sh/bun/blob/main/packages/bun-release/scripts/npm-postinstall.ts",
},
});
// Create placeholder scripts that print an error message if postinstall hasn't run.
// On Unix, these are executed as shell scripts despite the .exe extension.
// On Windows, npm creates .cmd wrappers that would fail anyway if the binary isn't valid.
const placeholderScript = `#!/bin/sh
echo "Error: Bun's postinstall script was not run." >&2
echo "" >&2
echo "This occurs when using --ignore-scripts during installation, or when using a" >&2
echo "package manager like pnpm that does not run postinstall scripts by default." >&2
echo "" >&2
echo "To fix this, run the postinstall script manually:" >&2
echo " cd node_modules/bun && node install.js" >&2
echo "" >&2
echo "Or reinstall bun without the --ignore-scripts flag." >&2
exit 1
`;
write(join(cwd, "bin", "bun.exe"), placeholderScript);
write(join(cwd, "bin", "bunx.exe"), placeholderScript);
write(join(cwd, "bin", "bun.exe"), "");
write(join(cwd, "bin", "bunx.exe"), "");
write(
join(cwd, "bin", "README.txt"),
`The 'bun.exe' file is a placeholder for the binary file, which

View File

@@ -610,97 +610,6 @@ declare module "bun" {
*/
function stripANSI(input: string): string;
interface WrapAnsiOptions {
/**
* If `true`, break words in the middle if they don't fit on a line.
* If `false`, only break at word boundaries.
*
* @default false
*/
hard?: boolean;
/**
* If `true`, wrap at word boundaries when possible.
* If `false`, don't perform word wrapping (only wrap at explicit newlines).
*
* @default true
*/
wordWrap?: boolean;
/**
* If `true`, trim leading and trailing whitespace from each line.
* If `false`, preserve whitespace.
*
* @default true
*/
trim?: boolean;
/**
* When it's ambiguous and `true`, count ambiguous width characters as 1 character wide.
* If `false`, count them as 2 characters wide.
*
* @default true
*/
ambiguousIsNarrow?: boolean;
}
/**
* Wrap a string to fit within the specified column width, preserving ANSI escape codes.
*
* This function is designed to be compatible with the popular "wrap-ansi" NPM package.
*
* Features:
* - Preserves ANSI escape codes (colors, styles) across line breaks
* - Supports SGR codes (colors, bold, italic, etc.) and OSC 8 hyperlinks
* - Respects Unicode display widths (full-width characters, emoji)
* - Word wrapping at word boundaries (configurable)
*
* @category Utilities
*
* @param input The string to wrap
* @param columns The maximum column width
* @param options Wrapping options
* @returns The wrapped string
*
* @example
* ```ts
* import { wrapAnsi } from "bun";
*
* console.log(wrapAnsi("hello world", 5));
* // Output:
* // hello
* // world
*
* // Preserves ANSI colors across line breaks
* console.log(wrapAnsi("\u001b[31mhello world\u001b[0m", 5));
* // Output:
* // \u001b[31mhello\u001b[0m
* // \u001b[31mworld\u001b[0m
*
* // Hard wrap long words
* console.log(wrapAnsi("abcdefghij", 3, { hard: true }));
* // Output:
* // abc
* // def
* // ghi
* // j
* ```
*/
function wrapAnsi(
/**
* The string to wrap
*/
input: string,
/**
* The maximum column width
*/
columns: number,
/**
* Wrapping options
*/
options?: WrapAnsiOptions,
): string;
/**
* TOML related APIs
*/
@@ -743,101 +652,6 @@ declare module "bun" {
export function parse(input: string): unknown;
}
/**
* JSONL (JSON Lines) related APIs.
*
* Each line in the input is expected to be a valid JSON value separated by newlines.
*/
namespace JSONL {
/**
* The result of `Bun.JSONL.parseChunk`.
*/
interface ParseChunkResult {
/** The successfully parsed JSON values. */
values: unknown[];
/** How far into the input was consumed. When the input is a string, this is a character offset. When the input is a `TypedArray`, this is a byte offset. Use `input.slice(read)` or `input.subarray(read)` to get the unconsumed remainder. */
read: number;
/** `true` if all input was consumed successfully. `false` if the input ends with an incomplete value or a parse error occurred. */
done: boolean;
/** A `SyntaxError` if a parse error occurred, otherwise `null`. Values parsed before the error are still available in `values`. */
error: SyntaxError | null;
}
/**
* Parse a JSONL (JSON Lines) string into an array of JavaScript values.
*
* If a parse error occurs and no values were successfully parsed, throws
* a `SyntaxError`. If values were parsed before the error, returns the
* successfully parsed values without throwing.
*
* Incomplete trailing values (e.g. from a partial chunk) are silently
* ignored and not included in the result.
*
* When a `TypedArray` is passed, the bytes are parsed directly without
* copying if the content is ASCII.
*
* @param input The JSONL string or typed array to parse
* @returns An array of parsed values
* @throws {SyntaxError} If the input starts with invalid JSON and no values could be parsed
*
* @example
* ```js
* const items = Bun.JSONL.parse('{"a":1}\n{"b":2}\n');
* // [{ a: 1 }, { b: 2 }]
*
* // From a Uint8Array (zero-copy for ASCII):
* const buf = new TextEncoder().encode('{"a":1}\n{"b":2}\n');
* const items = Bun.JSONL.parse(buf);
* // [{ a: 1 }, { b: 2 }]
*
* // Partial results on error after valid values:
* const partial = Bun.JSONL.parse('{"a":1}\n{bad}\n');
* // [{ a: 1 }]
*
* // Throws when no valid values precede the error:
* Bun.JSONL.parse('{bad}\n'); // throws SyntaxError
* ```
*/
export function parse(input: string | NodeJS.TypedArray | DataView<ArrayBuffer> | ArrayBufferLike): unknown[];
/**
* Parse a JSONL chunk, designed for streaming use.
*
* Never throws on parse errors. Instead, returns whatever values were
* successfully parsed along with an `error` property containing the
* `SyntaxError` (or `null` on success). Use `read` to determine how
* much input was consumed and `done` to check if all input was parsed.
*
* When a `TypedArray` is passed, the bytes are parsed directly without
* copying if the content is ASCII. Optional `start` and `end` parameters
* allow slicing without copying, and `read` will be a byte offset into
* the original typed array.
*
* @param input The JSONL string or typed array to parse
* @param start Byte offset to start parsing from (typed array only, default: 0)
* @param end Byte offset to stop parsing at (typed array only, default: input.byteLength)
* @returns An object with `values`, `read`, `done`, and `error` properties
*
* @example
* ```js
* let buffer = new Uint8Array(0);
* for await (const chunk of stream) {
* buffer = Buffer.concat([buffer, chunk]);
* const { values, read, error } = Bun.JSONL.parseChunk(buffer);
* if (error) throw error;
* for (const value of values) handle(value);
* buffer = buffer.subarray(read);
* }
* ```
*/
export function parseChunk(input: string): ParseChunkResult;
export function parseChunk(
input: NodeJS.TypedArray | DataView<ArrayBuffer> | ArrayBufferLike,
start?: number,
end?: number,
): ParseChunkResult;
}
/**
* YAML related APIs
*/
@@ -905,69 +719,6 @@ declare module "bun" {
export function stringify(input: unknown, replacer?: undefined | null, space?: string | number): string;
}
/**
* JSON5 related APIs
*/
namespace JSON5 {
/**
* Parse a JSON5 string into a JavaScript value.
*
* JSON5 is a superset of JSON based on ECMAScript 5.1 that supports
* comments, trailing commas, unquoted keys, single-quoted strings,
* hex numbers, Infinity, NaN, and more.
*
* @category Utilities
*
* @param input The JSON5 string to parse
* @returns A JavaScript value
*
* @example
* ```ts
* import { JSON5 } from "bun";
*
* const result = JSON5.parse(`{
* // This is a comment
* name: 'my-app',
* version: '1.0.0', // trailing comma is allowed
* hex: 0xDEADbeef,
* half: .5,
* infinity: Infinity,
* }`);
* ```
*/
export function parse(input: string): unknown;
/**
* Convert a JavaScript value into a JSON5 string. Object keys that are
* valid identifiers are unquoted, strings use double quotes, `Infinity`
* and `NaN` are represented as literals, and indented output includes
* trailing commas.
*
* @category Utilities
*
* @param input The JavaScript value to stringify.
* @param replacer Currently not supported.
* @param space A number for how many spaces each level of indentation gets, or a string used as indentation.
* The number is clamped between 0 and 10, and the first 10 characters of the string are used.
* @returns A JSON5 string, or `undefined` if the input is `undefined`, a function, or a symbol.
*
* @example
* ```ts
* import { JSON5 } from "bun";
*
* console.log(JSON5.stringify({ a: 1, b: "two" }));
* // {a:1,b:"two"}
*
* console.log(JSON5.stringify({ a: 1, b: 2 }, null, 2));
* // {
* // a: 1,
* // b: 2,
* // }
* ```
*/
export function stringify(input: unknown, replacer?: undefined | null, space?: string | number): string | undefined;
}
/**
* Synchronously resolve a `moduleId` as though it were imported from `parent`
*
@@ -999,7 +750,7 @@ declare module "bun" {
*/
function write(
destination: BunFile | S3File | PathLike,
input: Blob | NodeJS.TypedArray | ArrayBufferLike | string | BlobPart[] | Archive,
input: Blob | NodeJS.TypedArray | ArrayBufferLike | string | BlobPart[],
options?: {
/**
* If writing to a PathLike, set the permissions of the file.
@@ -1903,17 +1654,6 @@ declare module "bun" {
* @default "warn"
*/
logLevel?: "verbose" | "debug" | "info" | "warn" | "error";
/**
* Enable REPL mode transforms:
* - Wraps top-level inputs that appear to be object literals (inputs starting with '{' without trailing ';') in parentheses
* - Hoists all declarations as var for REPL persistence across vm.runInContext calls
* - Wraps last expression in { __proto__: null, value: expr } for result capture
* - Wraps code in sync/async IIFE to avoid parentheses around object literals
*
* @default false
*/
replMode?: boolean;
}
/**
@@ -2020,7 +1760,7 @@ declare module "bun" {
type Architecture = "x64" | "arm64";
type Libc = "glibc" | "musl";
type SIMD = "baseline" | "modern";
type CompileTarget =
type Target =
| `bun-darwin-${Architecture}`
| `bun-darwin-x64-${SIMD}`
| `bun-linux-${Architecture}`
@@ -2362,7 +2102,7 @@ declare module "bun" {
}
interface CompileBuildOptions {
target?: Bun.Build.CompileTarget;
target?: Bun.Build.Target;
execArgv?: string[];
executablePath?: string;
outfile?: string;
@@ -2444,7 +2184,7 @@ declare module "bun" {
* });
* ```
*/
compile: boolean | Bun.Build.CompileTarget | CompileBuildOptions;
compile: boolean | Bun.Build.Target | CompileBuildOptions;
/**
* Splitting is not currently supported with `.compile`
@@ -7235,44 +6975,15 @@ declare module "bun" {
/**
* Compression format for archive output.
* Currently only `"gzip"` is supported.
* - `"gzip"` - Compress with gzip
* - `true` - Same as `"gzip"`
* - `false` - Explicitly disable compression (no compression)
* - `undefined` - No compression (default behavior when omitted)
*
* Both `false` and `undefined` result in no compression; `false` can be used
* to explicitly indicate "no compression" in code where the intent should be clear.
*/
type ArchiveCompression = "gzip";
/**
* Options for creating an Archive instance.
*
* By default, archives are not compressed. Use `{ compress: "gzip" }` to enable compression.
*
* @example
* ```ts
* // No compression (default)
* new Bun.Archive(data);
*
* // Enable gzip with default level (6)
* new Bun.Archive(data, { compress: "gzip" });
*
* // Specify compression level
* new Bun.Archive(data, { compress: "gzip", level: 9 });
* ```
*/
interface ArchiveOptions {
/**
* Compression algorithm to use.
* Currently only "gzip" is supported.
* If not specified, no compression is applied.
*/
compress?: ArchiveCompression;
/**
* Compression level (1-12). Only applies when `compress` is set.
* - 1: Fastest compression, lowest ratio
* - 6: Default balance of speed and ratio
* - 12: Best compression ratio, slowest
*
* @default 6
*/
level?: number;
}
type ArchiveCompression = "gzip" | boolean;
/**
* Options for extracting archive contents.
@@ -7320,7 +7031,7 @@ declare module "bun" {
* @example
* **Create an archive from an object:**
* ```ts
* const archive = new Bun.Archive({
* const archive = Bun.Archive.from({
* "hello.txt": "Hello, World!",
* "data.json": JSON.stringify({ foo: "bar" }),
* "binary.bin": new Uint8Array([1, 2, 3, 4]),
@@ -7328,20 +7039,9 @@ declare module "bun" {
* ```
*
* @example
* **Create a gzipped archive:**
* ```ts
* const archive = new Bun.Archive({
* "hello.txt": "Hello, World!",
* }, { compress: "gzip" });
*
* // Or with a specific compression level (1-12)
* const archive = new Bun.Archive(data, { compress: "gzip", level: 9 });
* ```
*
* @example
* **Extract an archive to disk:**
* ```ts
* const archive = new Bun.Archive(tarballBytes);
* const archive = Bun.Archive.from(tarballBytes);
* const entryCount = await archive.extract("./output");
* console.log(`Extracted ${entryCount} entries`);
* ```
@@ -7349,7 +7049,7 @@ declare module "bun" {
* @example
* **Get archive contents as a Map of File objects:**
* ```ts
* const archive = new Bun.Archive(tarballBytes);
* const archive = Bun.Archive.from(tarballBytes);
* const entries = await archive.files();
* for (const [path, file] of entries) {
* console.log(path, await file.text());
@@ -7362,50 +7062,36 @@ declare module "bun" {
* await Bun.Archive.write("bundle.tar.gz", {
* "src/index.ts": sourceCode,
* "package.json": packageJson,
* }, { compress: "gzip" });
* }, "gzip");
* ```
*/
export class Archive {
/**
* Create an `Archive` instance from input data.
*
* By default, archives are not compressed. Use `{ compress: "gzip" }` to enable compression.
*
* @param data - The input data for the archive:
* - **Object**: Creates a new tarball with the object's keys as file paths and values as file contents
* - **Blob/TypedArray/ArrayBuffer**: Wraps existing archive data (tar or tar.gz)
* @param options - Optional archive options including compression settings.
* Defaults to no compression if omitted.
*
* @returns A new `Archive` instance
*
* @example
* **From an object (creates uncompressed tarball):**
* **From an object (creates new tarball):**
* ```ts
* const archive = new Bun.Archive({
* const archive = Bun.Archive.from({
* "hello.txt": "Hello, World!",
* "nested/file.txt": "Nested content",
* });
* ```
*
* @example
* **With gzip compression:**
* ```ts
* const archive = new Bun.Archive(data, { compress: "gzip" });
* ```
*
* @example
* **With explicit gzip compression level:**
* ```ts
* const archive = new Bun.Archive(data, { compress: "gzip", level: 12 });
* ```
*
* @example
* **From existing archive data:**
* ```ts
* const response = await fetch("https://example.com/package.tar.gz");
* const archive = new Bun.Archive(await response.blob());
* const archive = Bun.Archive.from(await response.blob());
* ```
*/
constructor(data: ArchiveInput, options?: ArchiveOptions);
static from(data: ArchiveInput): Archive;
/**
* Create and write an archive directly to disk in one operation.
@@ -7414,8 +7100,8 @@ declare module "bun" {
* as it streams the data directly to disk.
*
* @param path - The file path to write the archive to
* @param data - The input data for the archive (same as `new Archive()`)
* @param options - Optional archive options including compression settings
* @param data - The input data for the archive (same as `Archive.from()`)
* @param compress - Optional compression: `"gzip"`, `true` for gzip, or `false`/`undefined` for none
*
* @returns A promise that resolves when the write is complete
*
@@ -7431,10 +7117,10 @@ declare module "bun" {
* @example
* **Write gzipped tarball:**
* ```ts
* await Bun.Archive.write("output.tar.gz", files, { compress: "gzip" });
* await Bun.Archive.write("output.tar.gz", files, "gzip");
* ```
*/
static write(path: string, data: ArchiveInput | Archive, options?: ArchiveOptions): Promise<void>;
static write(path: string, data: ArchiveInput | Archive, compress?: ArchiveCompression): Promise<void>;
/**
* Extract the archive contents to a directory on disk.
@@ -7450,7 +7136,7 @@ declare module "bun" {
* @example
* **Extract all entries:**
* ```ts
* const archive = new Bun.Archive(tarballBytes);
* const archive = Bun.Archive.from(tarballBytes);
* const count = await archive.extract("./extracted");
* console.log(`Extracted ${count} entries`);
* ```
@@ -7480,48 +7166,42 @@ declare module "bun" {
/**
* Get the archive contents as a `Blob`.
*
* Uses the compression settings specified when the Archive was created.
*
* @param compress - Optional compression: `"gzip"`, `true` for gzip, or `false`/`undefined` for none
* @returns A promise that resolves with the archive data as a Blob
*
* @example
* **Get tarball as Blob:**
* **Get uncompressed tarball:**
* ```ts
* const archive = new Bun.Archive(data);
* const blob = await archive.blob();
* ```
*
* @example
* **Get gzipped tarball as Blob:**
* **Get gzipped tarball:**
* ```ts
* const archive = new Bun.Archive(data, { compress: "gzip" });
* const gzippedBlob = await archive.blob();
* const gzippedBlob = await archive.blob("gzip");
* ```
*/
blob(): Promise<Blob>;
blob(compress?: ArchiveCompression): Promise<Blob>;
/**
* Get the archive contents as a `Uint8Array`.
*
* Uses the compression settings specified when the Archive was created.
*
* @param compress - Optional compression: `"gzip"`, `true` for gzip, or `false`/`undefined` for none
* @returns A promise that resolves with the archive data as a Uint8Array
*
* @example
* **Get tarball bytes:**
* **Get uncompressed tarball bytes:**
* ```ts
* const archive = new Bun.Archive(data);
* const bytes = await archive.bytes();
* ```
*
* @example
* **Get gzipped tarball bytes:**
* ```ts
* const archive = new Bun.Archive(data, { compress: "gzip" });
* const gzippedBytes = await archive.bytes();
* const gzippedBytes = await archive.bytes("gzip");
* ```
*/
bytes(): Promise<Uint8Array<ArrayBuffer>>;
bytes(compress?: ArchiveCompression): Promise<Uint8Array<ArrayBuffer>>;
/**
* Get the archive contents as a `Map` of `File` objects.

View File

@@ -23,11 +23,6 @@ declare module "*.jsonc" {
export = contents;
}
declare module "*.json5" {
var contents: any;
export = contents;
}
declare module "*/bun.lock" {
var contents: import("bun").BunLockFile;
export = contents;

View File

@@ -609,17 +609,7 @@ declare module "bun" {
* });
*/
write(
data:
| string
| ArrayBufferView
| ArrayBuffer
| SharedArrayBuffer
| Request
| Response
| BunFile
| S3File
| Blob
| Archive,
data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer | Request | Response | BunFile | S3File | Blob,
options?: S3Options,
): Promise<number>;
@@ -930,8 +920,7 @@ declare module "bun" {
| BunFile
| S3File
| Blob
| File
| Archive,
| File,
options?: S3Options,
): Promise<number>;
@@ -981,8 +970,7 @@ declare module "bun" {
| BunFile
| S3File
| Blob
| File
| Archive,
| File,
options?: S3Options,
): Promise<number>;

View File

@@ -315,15 +315,6 @@ int us_internal_ssl_socket_is_closed(struct us_internal_ssl_socket_t *s) {
return us_socket_is_closed(0, &s->s);
}
int us_internal_ssl_socket_is_handshake_finished(struct us_internal_ssl_socket_t *s) {
if (!s || !s->ssl) return 0;
return SSL_is_init_finished(s->ssl);
}
int us_internal_ssl_socket_handshake_callback_has_fired(struct us_internal_ssl_socket_t *s) {
if (!s) return 0;
return s->handshake_state == HANDSHAKE_COMPLETED;
}
void us_internal_trigger_handshake_callback_econnreset(struct us_internal_ssl_socket_t *s) {
struct us_internal_ssl_socket_context_t *context =

View File

@@ -439,8 +439,6 @@ void *us_internal_ssl_socket_ext(us_internal_ssl_socket_r s);
void *us_internal_connecting_ssl_socket_ext(struct us_connecting_socket_t *c);
int us_internal_ssl_socket_is_shut_down(us_internal_ssl_socket_r s);
int us_internal_ssl_socket_is_closed(us_internal_ssl_socket_r s);
int us_internal_ssl_socket_is_handshake_finished(us_internal_ssl_socket_r s);
int us_internal_ssl_socket_handshake_callback_has_fired(us_internal_ssl_socket_r s);
void us_internal_ssl_socket_shutdown(us_internal_ssl_socket_r s);
struct us_internal_ssl_socket_t *us_internal_ssl_socket_context_adopt_socket(

View File

@@ -457,12 +457,6 @@ int us_socket_is_shut_down(int ssl, us_socket_r s) nonnull_fn_decl;
/* Returns whether this socket has been closed. Only valid if memory has not yet been released. */
int us_socket_is_closed(int ssl, us_socket_r s) nonnull_fn_decl;
/* Returns 1 if the TLS handshake has completed, 0 otherwise. For non-SSL sockets, always returns 1. */
int us_socket_is_ssl_handshake_finished(int ssl, us_socket_r s) nonnull_fn_decl;
/* Returns 1 if the TLS handshake callback has been invoked, 0 otherwise. For non-SSL sockets, always returns 1. */
int us_socket_ssl_handshake_callback_has_fired(int ssl, us_socket_r s) nonnull_fn_decl;
/* Immediately closes the socket */
struct us_socket_t *us_socket_close(int ssl, us_socket_r s, int code, void *reason) __attribute__((nonnull(2)));

View File

@@ -128,26 +128,6 @@ int us_socket_is_closed(int ssl, struct us_socket_t *s) {
return s->flags.is_closed;
}
int us_socket_is_ssl_handshake_finished(int ssl, struct us_socket_t *s) {
#ifndef LIBUS_NO_SSL
if(ssl) {
return us_internal_ssl_socket_is_handshake_finished((struct us_internal_ssl_socket_t *) s);
}
#endif
// Non-SSL sockets are always "handshake finished"
return 1;
}
int us_socket_ssl_handshake_callback_has_fired(int ssl, struct us_socket_t *s) {
#ifndef LIBUS_NO_SSL
if(ssl) {
return us_internal_ssl_socket_handshake_callback_has_fired((struct us_internal_ssl_socket_t *) s);
}
#endif
// Non-SSL sockets are always "callback fired"
return 1;
}
int us_connecting_socket_is_closed(int ssl, struct us_connecting_socket_t *c) {
return c->closed;
}

View File

@@ -18,7 +18,7 @@
#pragma once
#ifndef UWS_HTTP_MAX_HEADERS_COUNT
#define UWS_HTTP_MAX_HEADERS_COUNT 200
#define UWS_HTTP_MAX_HEADERS_COUNT 100
#endif
// todo: HttpParser is in need of a few clean-ups and refactorings

View File

@@ -39,7 +39,6 @@ add_compile_definitions(
CONFIG_TCC_PREDEFS
ONE_SOURCE=0
TCC_LIBTCC1="\\0"
CONFIG_TCC_BACKTRACE=0
)
if(APPLE)

View File

@@ -23,10 +23,7 @@ const OS_NAME = platform().toLowerCase();
const ARCH_NAME_RAW = arch();
const IS_MAC = OS_NAME === "darwin";
const IS_LINUX = OS_NAME === "linux";
const IS_WINDOWS = OS_NAME === "win32";
// On Windows, use PROCESSOR_ARCHITECTURE env var to get native arch (Bun may run under x64 emulation)
const NATIVE_ARCH = IS_WINDOWS ? (process.env.PROCESSOR_ARCHITECTURE || ARCH_NAME_RAW).toUpperCase() : ARCH_NAME_RAW;
const IS_ARM64 = NATIVE_ARCH === "ARM64" || NATIVE_ARCH === "AARCH64" || ARCH_NAME_RAW === "arm64";
const IS_ARM64 = ARCH_NAME_RAW === "arm64" || ARCH_NAME_RAW === "aarch64";
// Paths
const ROOT_DIR = resolve(import.meta.dir, "..");
@@ -36,54 +33,22 @@ const WEBKIT_RELEASE_DIR = join(WEBKIT_BUILD_DIR, "Release");
const WEBKIT_DEBUG_DIR = join(WEBKIT_BUILD_DIR, "Debug");
const WEBKIT_RELEASE_DIR_LTO = join(WEBKIT_BUILD_DIR, "ReleaseLTO");
// Windows ICU paths - use vcpkg static build
// Auto-detect triplet: prefer arm64 if it exists, otherwise x64
const VCPKG_ARM64_PATH = join(WEBKIT_DIR, "vcpkg_installed", "arm64-windows-static");
const VCPKG_X64_PATH = join(WEBKIT_DIR, "vcpkg_installed", "x64-windows-static");
const VCPKG_ROOT = existsSync(VCPKG_ARM64_PATH) ? VCPKG_ARM64_PATH : VCPKG_X64_PATH;
const ICU_INCLUDE_DIR = join(VCPKG_ROOT, "include");
// Get ICU library paths based on build config (debug uses 'd' suffix libraries)
function getICULibraryPaths(config: BuildConfig) {
const isDebug = config === "debug";
// vcpkg static ICU libraries: release in lib/, debug in debug/lib/ with 'd' suffix
const libDir = isDebug ? join(VCPKG_ROOT, "debug", "lib") : join(VCPKG_ROOT, "lib");
const suffix = isDebug ? "d" : "";
return {
ICU_LIBRARY: libDir,
ICU_DATA_LIBRARY: join(libDir, `sicudt${suffix}.lib`),
ICU_I18N_LIBRARY: join(libDir, `sicuin${suffix}.lib`),
ICU_UC_LIBRARY: join(libDir, `sicuuc${suffix}.lib`),
};
}
// Homebrew prefix detection
const HOMEBREW_PREFIX = IS_ARM64 ? "/opt/homebrew/" : "/usr/local/";
// Compiler detection
function findExecutable(names: string[]): string | null {
for (const name of names) {
const path = Bun.which(name);
if (path) return path;
const result = spawnSync("which", [name], { encoding: "utf8" });
if (result.status === 0) {
return result.stdout.trim();
}
}
return null;
}
// Detect ccache
const CCACHE = findExecutable(["ccache"]);
const HAS_CCACHE = CCACHE !== null;
// Configure compilers with ccache if available
// On Windows, use clang-cl for MSVC compatibility
const CC_BASE = IS_WINDOWS
? findExecutable(["clang-cl.exe", "clang-cl"]) || "clang-cl"
: findExecutable(["clang-19", "clang"]) || "clang";
const CXX_BASE = IS_WINDOWS
? findExecutable(["clang-cl.exe", "clang-cl"]) || "clang-cl"
: findExecutable(["clang++-19", "clang++"]) || "clang++";
const CC = HAS_CCACHE ? CCACHE : CC_BASE;
const CXX = HAS_CCACHE ? CCACHE : CXX_BASE;
const CC = findExecutable(["clang-19", "clang"]) || "clang";
const CXX = findExecutable(["clang++-19", "clang++"]) || "clang++";
// Build directory based on config
const getBuildDir = (config: BuildConfig) => {
@@ -98,7 +63,7 @@ const getBuildDir = (config: BuildConfig) => {
};
// Common CMake flags
const getCommonFlags = (config: BuildConfig) => {
const getCommonFlags = () => {
const flags = [
"-DPORT=JSCOnly",
"-DENABLE_STATIC_JSC=ON",
@@ -109,27 +74,16 @@ const getCommonFlags = (config: BuildConfig) => {
"-DENABLE_FTL_JIT=ON",
"-G",
"Ninja",
`-DCMAKE_C_COMPILER=${CC}`,
`-DCMAKE_CXX_COMPILER=${CXX}`,
];
// Configure compiler with ccache if available
if (HAS_CCACHE) {
flags.push(
`-DCMAKE_C_COMPILER_LAUNCHER=${CCACHE}`,
`-DCMAKE_CXX_COMPILER_LAUNCHER=${CCACHE}`,
`-DCMAKE_C_COMPILER=${CC_BASE}`,
`-DCMAKE_CXX_COMPILER=${CXX_BASE}`,
);
} else {
flags.push(`-DCMAKE_C_COMPILER=${CC}`, `-DCMAKE_CXX_COMPILER=${CXX}`);
}
if (IS_MAC) {
flags.push(
"-DENABLE_SINGLE_THREADED_VM_ENTRY_SCOPE=ON",
"-DBUN_FAST_TLS=ON",
"-DPTHREAD_JIT_PERMISSIONS_API=1",
"-DUSE_PTHREAD_JIT_PERMISSIONS_API=ON",
"-DENABLE_REMOTE_INSPECTOR=ON",
);
} else if (IS_LINUX) {
flags.push(
@@ -137,27 +91,6 @@ const getCommonFlags = (config: BuildConfig) => {
"-DUSE_VISIBILITY_ATTRIBUTE=1",
"-DENABLE_REMOTE_INSPECTOR=ON",
);
} else if (IS_WINDOWS) {
// Find lld-link for Windows builds
const lldLink = findExecutable(["lld-link.exe", "lld-link"]) || "lld-link";
// Get ICU library paths for this build config (debug uses 'd' suffix libraries)
const icuPaths = getICULibraryPaths(config);
flags.push(
"-DENABLE_REMOTE_INSPECTOR=ON",
"-DUSE_VISIBILITY_ATTRIBUTE=1",
"-DUSE_SYSTEM_MALLOC=ON",
`-DCMAKE_LINKER=${lldLink}`,
`-DICU_ROOT=${VCPKG_ROOT}`,
`-DICU_LIBRARY=${icuPaths.ICU_LIBRARY}`,
`-DICU_INCLUDE_DIR=${ICU_INCLUDE_DIR}`,
// Explicitly set ICU library paths to use vcpkg static libs (debug has 'd' suffix)
`-DICU_DATA_LIBRARY_RELEASE=${icuPaths.ICU_DATA_LIBRARY}`,
`-DICU_I18N_LIBRARY_RELEASE=${icuPaths.ICU_I18N_LIBRARY}`,
`-DICU_UC_LIBRARY_RELEASE=${icuPaths.ICU_UC_LIBRARY}`,
"-DCMAKE_C_FLAGS=/DU_STATIC_IMPLEMENTATION",
"-DCMAKE_CXX_FLAGS=/DU_STATIC_IMPLEMENTATION /clang:-fno-c++-static-destructors",
);
}
return flags;
@@ -165,7 +98,7 @@ const getCommonFlags = (config: BuildConfig) => {
// Build-specific CMake flags
const getBuildFlags = (config: BuildConfig) => {
const flags = [...getCommonFlags(config)];
const flags = [...getCommonFlags()];
switch (config) {
case "debug":
@@ -173,40 +106,24 @@ const getBuildFlags = (config: BuildConfig) => {
"-DCMAKE_BUILD_TYPE=Debug",
"-DENABLE_BUN_SKIP_FAILING_ASSERTIONS=ON",
"-DCMAKE_EXPORT_COMPILE_COMMANDS=ON",
"-DENABLE_REMOTE_INSPECTOR=ON",
"-DUSE_VISIBILITY_ATTRIBUTE=1",
);
if (IS_MAC || IS_LINUX) {
// Enable address sanitizer by default on Mac/Linux debug builds
if (IS_MAC) {
// Enable address sanitizer by default on Mac debug builds
flags.push("-DENABLE_SANITIZERS=address");
// To disable asan, comment the line above and uncomment:
// flags.push("-DENABLE_MALLOC_HEAP_BREAKDOWN=ON");
}
if (IS_WINDOWS) {
flags.push("-DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreadedDebug");
}
break;
case "lto":
flags.push("-DCMAKE_BUILD_TYPE=Release");
if (IS_WINDOWS) {
// On Windows, append LTO flags to existing Windows-specific flags
flags.push(
"-DCMAKE_C_FLAGS=/DU_STATIC_IMPLEMENTATION -flto=full",
"-DCMAKE_CXX_FLAGS=/DU_STATIC_IMPLEMENTATION /clang:-fno-c++-static-destructors -flto=full",
"-DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreaded",
);
} else {
flags.push("-DCMAKE_C_FLAGS=-flto=full", "-DCMAKE_CXX_FLAGS=-flto=full");
}
flags.push("-DCMAKE_BUILD_TYPE=Release", "-DCMAKE_C_FLAGS=-flto=full", "-DCMAKE_CXX_FLAGS=-flto=full");
break;
default: // release
flags.push("-DCMAKE_BUILD_TYPE=RelWithDebInfo");
if (IS_WINDOWS) {
flags.push("-DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreaded");
}
break;
}
@@ -217,6 +134,17 @@ const getBuildFlags = (config: BuildConfig) => {
const getBuildEnv = () => {
const env = { ...process.env };
const cflags = ["-ffat-lto-objects"];
const cxxflags = ["-ffat-lto-objects"];
if (IS_LINUX && buildConfig !== "lto") {
cflags.push("-Wl,--whole-archive");
cxxflags.push("-Wl,--whole-archive", "-DUSE_BUN_JSC_ADDITIONS=ON", "-DUSE_BUN_EVENT_LOOP=ON");
}
env.CFLAGS = (env.CFLAGS || "") + " " + cflags.join(" ");
env.CXXFLAGS = (env.CXXFLAGS || "") + " " + cxxflags.join(" ");
if (IS_MAC) {
env.ICU_INCLUDE_DIRS = `${HOMEBREW_PREFIX}opt/icu4c/include`;
}
@@ -251,9 +179,6 @@ function buildJSC() {
console.log(`Building JSC with configuration: ${buildConfig}`);
console.log(`Build directory: ${buildDir}`);
if (HAS_CCACHE) {
console.log(`Using ccache for faster builds: ${CCACHE}`);
}
// Create build directories
if (!existsSync(buildDir)) {

View File

@@ -14,15 +14,6 @@ import {
startGroup,
} from "./utils.mjs";
// Detect Windows ARM64 - bun may run under x64 emulation (WoW64), so check multiple indicators
const isWindowsARM64 =
isWindows &&
(process.env.PROCESSOR_ARCHITECTURE === "ARM64" ||
process.env.VSCMD_ARG_HOST_ARCH === "arm64" ||
process.env.MSYSTEM_CARCH === "aarch64" ||
(process.env.PROCESSOR_IDENTIFIER || "").includes("ARMv8") ||
process.arch === "arm64");
if (globalThis.Bun) {
await import("./glob-sources.mjs");
}
@@ -92,23 +83,6 @@ async function build(args) {
generateOptions["--toolchain"] = toolchainPath;
}
// Windows ARM64: automatically set required options
if (isWindowsARM64) {
// Use clang-cl instead of MSVC cl.exe for proper ARM64 flag support
if (!generateOptions["-DCMAKE_C_COMPILER"]) {
generateOptions["-DCMAKE_C_COMPILER"] = "clang-cl";
}
if (!generateOptions["-DCMAKE_CXX_COMPILER"]) {
generateOptions["-DCMAKE_CXX_COMPILER"] = "clang-cl";
}
// Skip codegen by default since x64 bun crashes under WoW64 emulation
// Can be overridden with -DSKIP_CODEGEN=OFF once ARM64 bun is available
if (!generateOptions["-DSKIP_CODEGEN"]) {
generateOptions["-DSKIP_CODEGEN"] = "ON";
}
console.log("Windows ARM64 detected: using clang-cl and SKIP_CODEGEN=ON");
}
const generateArgs = Object.entries(generateOptions).flatMap(([flag, value]) =>
flag.startsWith("-D") ? [`${flag}=${value}`] : [flag, value],
);

View File

@@ -49,42 +49,9 @@ const colors = {
// Parse command line arguments
const args = process.argv.slice(2);
// Show help
if (args.includes("--help") || args.includes("-h")) {
console.log(`Usage: bun run scripts/buildkite-failures.ts [options] [build-id|branch|pr-url|buildkite-url]
Shows detailed error information from BuildKite build failures.
Full logs are saved to /tmp/bun-build-{number}-{platform}-{step}.log
Arguments:
build-id BuildKite build number (e.g., 35051)
branch Git branch name (e.g., main, claude/fix-bug)
pr-url GitHub PR URL (e.g., https://github.com/oven-sh/bun/pull/26173)
buildkite-url BuildKite build URL
#number GitHub PR number (e.g., #26173)
(none) Uses current git branch
Options:
--flaky, -f Include flaky test annotations
--warnings, -w Include warning annotations
--wait Poll continuously until build completes or fails
--help, -h Show this help message
Examples:
bun run scripts/buildkite-failures.ts # Current branch
bun run scripts/buildkite-failures.ts main # Main branch
bun run scripts/buildkite-failures.ts 35051 # Build #35051
bun run scripts/buildkite-failures.ts #26173 # PR #26173
bun run scripts/buildkite-failures.ts --wait # Wait for current branch build to complete
`);
process.exit(0);
}
const showWarnings = args.includes("--warnings") || args.includes("-w");
const showFlaky = args.includes("--flaky") || args.includes("-f");
const waitMode = args.includes("--wait");
const inputArg = args.find(arg => !arg.startsWith("-") && !arg.startsWith("--"));
const inputArg = args.find(arg => !arg.startsWith("-"));
// Determine what type of input we have
let buildNumber = null;
@@ -147,138 +114,38 @@ if (!buildNumber) {
buildNumber = match[1];
}
// Helper to format time ago
function formatTimeAgo(dateStr: string | null): string {
if (!dateStr) return "not started";
const date = new Date(dateStr);
const now = new Date();
const diffMs = now.getTime() - date.getTime();
const diffSecs = Math.floor(diffMs / 1000);
const diffMins = Math.floor(diffSecs / 60);
const diffHours = Math.floor(diffMins / 60);
const diffDays = Math.floor(diffHours / 24);
// Fetch build JSON
const buildResponse = await fetch(`https://buildkite.com/bun/bun/builds/${buildNumber}.json`);
const build = await buildResponse.json();
if (diffDays > 0) return `${diffDays} day${diffDays !== 1 ? "s" : ""} ago`;
if (diffHours > 0) return `${diffHours} hour${diffHours !== 1 ? "s" : ""} ago`;
if (diffMins > 0) return `${diffMins} minute${diffMins !== 1 ? "s" : ""} ago`;
return `${diffSecs} second${diffSecs !== 1 ? "s" : ""} ago`;
// Calculate time ago
const buildTime = new Date(build.started_at);
const now = new Date();
const diffMs = now.getTime() - buildTime.getTime();
const diffSecs = Math.floor(diffMs / 1000);
const diffMins = Math.floor(diffSecs / 60);
const diffHours = Math.floor(diffMins / 60);
const diffDays = Math.floor(diffHours / 24);
let timeAgo;
if (diffDays > 0) {
timeAgo = `${diffDays} day${diffDays !== 1 ? "s" : ""} ago`;
} else if (diffHours > 0) {
timeAgo = `${diffHours} hour${diffHours !== 1 ? "s" : ""} ago`;
} else if (diffMins > 0) {
timeAgo = `${diffMins} minute${diffMins !== 1 ? "s" : ""} ago`;
} else {
timeAgo = `${diffSecs} second${diffSecs !== 1 ? "s" : ""} ago`;
}
// Helper to clear line for updates
const clearLine = isTTY ? "\x1b[2K\r" : "";
// Poll for build status
let build: any;
let pollCount = 0;
const pollInterval = 10000; // 10 seconds
while (true) {
// Fetch build JSON
const buildResponse = await fetch(`https://buildkite.com/bun/bun/builds/${buildNumber}.json`);
build = await buildResponse.json();
// Check for failed jobs first (even if build is still running)
const failedJobsEarly =
build.jobs?.filter(
(job: any) => job.exit_status && job.exit_status > 0 && !job.soft_failed && job.type === "script",
) || [];
// In wait mode with failures, stop polling and show failures
if (waitMode && failedJobsEarly.length > 0) {
if (pollCount > 0) {
process.stdout.write(clearLine);
}
break;
}
// Calculate time ago (use created_at as fallback for scheduled/pending builds)
const timeAgo = formatTimeAgo(build.started_at || build.created_at);
// Check if build passed
if (build.state === "passed") {
if (pollCount > 0) {
process.stdout.write(clearLine);
}
console.log(`${timeAgo} - build #${buildNumber} https://buildkite.com/bun/bun/builds/${buildNumber}\n`);
console.log(`${colors.green}✅ Passed!${colors.reset}`);
process.exit(0);
}
// Check if build was canceled
if (build.state === "canceled" || build.state === "canceling") {
if (pollCount > 0) {
process.stdout.write(clearLine);
}
console.log(`${timeAgo} - build #${buildNumber} https://buildkite.com/bun/bun/builds/${buildNumber}\n`);
console.log(`${colors.dim}🚫 Build was canceled${colors.reset}`);
process.exit(0);
}
// Check if build is pending/running/scheduled
if (
build.state === "scheduled" ||
build.state === "running" ||
build.state === "creating" ||
build.state === "started"
) {
const runningJobs = build.jobs?.filter((job: any) => job.state === "running") || [];
const pendingJobs = build.jobs?.filter((job: any) => job.state === "scheduled" || job.state === "waiting") || [];
const passedJobs = build.jobs?.filter((job: any) => job.state === "passed") || [];
const totalJobs = build.jobs?.filter((job: any) => job.type === "script")?.length || 0;
if (waitMode) {
// In wait mode, show a single updating line
let statusMsg = "";
if (build.state === "scheduled" || build.state === "creating") {
statusMsg = `⏳ Waiting... (scheduled ${formatTimeAgo(build.created_at)})`;
} else {
statusMsg = `🔄 Running... ${passedJobs.length}/${totalJobs} passed, ${runningJobs.length} running`;
}
process.stdout.write(`${clearLine}${colors.dim}${statusMsg}${colors.reset}`);
pollCount++;
await Bun.sleep(pollInterval);
continue;
} else {
// Not in wait mode, show full status and exit
console.log(`${timeAgo} - build #${buildNumber} https://buildkite.com/bun/bun/builds/${buildNumber}\n`);
if (build.state === "scheduled" || build.state === "creating") {
console.log(`${colors.dim}⏳ Build is scheduled/pending${colors.reset}`);
if (build.created_at) {
console.log(`${colors.dim} Created: ${formatTimeAgo(build.created_at)}${colors.reset}`);
}
} else {
console.log(`${colors.dim}🔄 Build is running${colors.reset}`);
if (build.started_at) {
console.log(`${colors.dim} Started: ${formatTimeAgo(build.started_at)}${colors.reset}`);
}
console.log(
`${colors.dim} Progress: ${passedJobs.length}/${totalJobs} jobs passed, ${runningJobs.length} running, ${pendingJobs.length} pending${colors.reset}`,
);
if (runningJobs.length > 0) {
console.log(`\n${colors.dim}Running jobs:${colors.reset}`);
for (const job of runningJobs.slice(0, 5)) {
const name = job.name || job.label || "Unknown";
console.log(` ${colors.dim}${name}${colors.reset}`);
}
if (runningJobs.length > 5) {
console.log(` ${colors.dim}... and ${runningJobs.length - 5} more${colors.reset}`);
}
}
}
process.exit(0);
}
}
// Build is in a terminal state (failed, etc.) - break out of loop
break;
}
// Print header for failed build
const timeAgo = formatTimeAgo(build.started_at || build.created_at);
console.log(`${timeAgo} - build #${buildNumber} https://buildkite.com/bun/bun/builds/${buildNumber}\n`);
// Check if build passed
if (build.state === "passed") {
console.log(`${colors.green}✅ Passed!${colors.reset}`);
process.exit(0);
}
// Get failed jobs
const failedJobs =
build.jobs?.filter(job => job.exit_status && job.exit_status > 0 && !job.soft_failed && job.type === "script") || [];
@@ -867,212 +734,7 @@ if (registerRequestIndex !== -1) {
console.log(` https://buildkite.com/bun/bun/builds/${buildNumber}#annotations`);
}
} else {
// No annotations found - show detailed job failure information
if (failedJobs.length > 0) {
console.log(`\n${colors.red}${colors.bold}${failedJobs.length} job failures${colors.reset}\n`);
// Show annotation counts if available
const annotationCounts = build.annotation_counts_by_style;
if (annotationCounts) {
const errors = annotationCounts.error || 0;
const warnings = annotationCounts.warning || 0;
if (errors > 0 || warnings > 0) {
const parts = [];
if (errors > 0) parts.push(`${errors} error${errors !== 1 ? "s" : ""}`);
if (warnings > 0) parts.push(`${warnings} warning${warnings !== 1 ? "s" : ""}`);
console.log(
`${colors.dim}Annotations: ${parts.join(", ")} - view at https://buildkite.com/bun/bun/builds/${buildNumber}#annotations${colors.reset}\n`,
);
}
}
// Group jobs by type
const buildJobs = failedJobs.filter(job => (job.name || job.label || "").includes("build-"));
const testJobs = failedJobs.filter(job => (job.name || job.label || "").includes("test"));
const otherJobs = failedJobs.filter(
job => !(job.name || job.label || "").includes("build-") && !(job.name || job.label || "").includes("test"),
);
// Display build failures
if (buildJobs.length > 0) {
console.log(
`${colors.bgRed}${colors.white}${colors.bold} Build Failures (${buildJobs.length}) ${colors.reset}\n`,
);
for (const job of buildJobs) {
const name = (job.name || job.label || "Unknown").replace(/^:([^:]+):/, (_, emoji) => {
const platform = emoji.toLowerCase();
return platformMap[platform] || `:${emoji}:`;
});
const duration =
job.started_at && job.finished_at
? `${((new Date(job.finished_at).getTime() - new Date(job.started_at).getTime()) / 1000).toFixed(0)}s`
: "N/A";
console.log(` ${colors.red}${colors.reset} ${name}`);
console.log(` ${colors.dim}Duration: ${duration} | Exit: ${job.exit_status}${colors.reset}`);
console.log(` ${colors.dim}https://buildkite.com${job.path}${colors.reset}`);
console.log();
}
}
// Display test failures
if (testJobs.length > 0) {
console.log(`${colors.bgBlue}${colors.white}${colors.bold} Test Failures (${testJobs.length}) ${colors.reset}\n`);
for (const job of testJobs) {
const name = (job.name || job.label || "Unknown").replace(/^:([^:]+):/, (_, emoji) => {
const platform = emoji.toLowerCase();
return platformMap[platform] || `:${emoji}:`;
});
const duration =
job.started_at && job.finished_at
? `${((new Date(job.finished_at).getTime() - new Date(job.started_at).getTime()) / 1000).toFixed(0)}s`
: "N/A";
console.log(` ${colors.red}${colors.reset} ${name}`);
console.log(` ${colors.dim}Duration: ${duration} | Exit: ${job.exit_status}${colors.reset}`);
console.log(` ${colors.dim}https://buildkite.com${job.path}${colors.reset}`);
console.log();
}
}
// Display other failures
if (otherJobs.length > 0) {
console.log(
`${colors.bgBlue}${colors.white}${colors.bold} Other Failures (${otherJobs.length}) ${colors.reset}\n`,
);
for (const job of otherJobs) {
const name = (job.name || job.label || "Unknown").replace(/^:([^:]+):/, (_, emoji) => {
const platform = emoji.toLowerCase();
return platformMap[platform] || `:${emoji}:`;
});
const duration =
job.started_at && job.finished_at
? `${((new Date(job.finished_at).getTime() - new Date(job.started_at).getTime()) / 1000).toFixed(0)}s`
: "N/A";
console.log(` ${colors.red}${colors.reset} ${name}`);
console.log(` ${colors.dim}Duration: ${duration} | Exit: ${job.exit_status}${colors.reset}`);
console.log(` ${colors.dim}https://buildkite.com${job.path}${colors.reset}`);
console.log();
}
}
// Fetch and display logs for all failed jobs
// Use the public BuildKite log endpoint
console.log(`${colors.dim}Fetching logs for ${failedJobs.length} failed jobs...${colors.reset}\n`);
for (const job of failedJobs) {
const name = (job.name || job.label || "Unknown").replace(/^:([^:]+):/, (_, emoji) => {
const platform = emoji.toLowerCase();
return platformMap[platform] || `:${emoji}:`;
});
// Create a sanitized filename from the job name
// e.g., ":darwin: aarch64 - build-cpp" -> "darwin-aarch64-build-cpp"
const sanitizedName = (job.name || job.label || "unknown")
.replace(/^:([^:]+):\s*/, "$1-") // :darwin: -> darwin-
.replace(/\s+-\s+/g, "-") // " - " -> "-"
.replace(/[^a-zA-Z0-9-]/g, "-") // Replace other chars with -
.replace(/-+/g, "-") // Collapse multiple -
.replace(/^-|-$/g, "") // Remove leading/trailing -
.toLowerCase();
const logFilePath = `/tmp/bun-build-${buildNumber}-${sanitizedName}.log`;
try {
const logResponse = await fetch(
`https://buildkite.com/organizations/bun/pipelines/bun/builds/${buildNumber}/jobs/${job.id}/log`,
);
if (logResponse.ok) {
const logData = await logResponse.json();
let output = logData.output || "";
// Convert HTML to readable text (without ANSI codes for file output)
const plainOutput = output
// Remove timestamp tags
.replace(/<time[^>]*>[^<]*<\/time>/g, "")
// Remove all span tags
.replace(/<span[^>]*>([^<]*)<\/span>/g, "$1")
// Remove remaining HTML tags
.replace(/<[^>]+>/g, "")
// Decode HTML entities
.replace(/&amp;/g, "&")
.replace(/&lt;/g, "<")
.replace(/&gt;/g, ">")
.replace(/&quot;/g, '"')
.replace(/&#39;/g, "'")
.replace(/&#47;/g, "/")
.replace(/&nbsp;/g, " ");
// Write the full log to a file
await Bun.write(logFilePath, plainOutput);
// Extract unique error messages for display
const lines = plainOutput.split("\n");
const uniqueErrors = new Set<string>();
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
// Look for actual error messages
const isError =
(line.includes("error:") && !line.includes('error: script "') && !line.includes("error: exit")) ||
line.includes("fatal error:") ||
line.includes("panic:") ||
line.includes("undefined reference");
if (isError) {
// Extract just the error message part (remove path prefixes and timestamps)
const errorMsg = line
.replace(/^.*?\d{4}-\d{2}-\d{2}T[\d:.]+Z/, "") // Remove timestamps
.replace(/^.*?\/[^\s]*:\d+:\d+:\s*/, "") // Remove file paths
.trim();
if (errorMsg && !uniqueErrors.has(errorMsg)) {
uniqueErrors.add(errorMsg);
}
}
}
// Display job info with log file path
console.log(`${colors.bgBlue}${colors.white}${colors.bold} ${name} ${colors.reset}`);
console.log(` ${colors.dim}Log: ${logFilePath}${colors.reset}`);
if (uniqueErrors.size > 0) {
console.log(` ${colors.red}Errors (${uniqueErrors.size}):${colors.reset}`);
let count = 0;
for (const err of uniqueErrors) {
if (count >= 5) {
console.log(` ${colors.dim}... and ${uniqueErrors.size - 5} more${colors.reset}`);
break;
}
console.log(` ${colors.red}${colors.reset} ${err.slice(0, 120)}${err.length > 120 ? "..." : ""}`);
count++;
}
} else {
// Show last few lines as a preview
const lastLines = lines.slice(-5).filter(l => l.trim());
if (lastLines.length > 0) {
console.log(` ${colors.dim}Last output:${colors.reset}`);
for (const line of lastLines) {
console.log(` ${colors.dim}${line.slice(0, 100)}${line.length > 100 ? "..." : ""}${colors.reset}`);
}
}
}
if (logData.truncated) {
console.log(` ${colors.dim}(Log was truncated by BuildKite)${colors.reset}`);
}
} else {
console.log(`${colors.bgBlue}${colors.white}${colors.bold} ${name} ${colors.reset}`);
console.log(` ${colors.dim}Failed to fetch log: ${logResponse.status}${colors.reset}`);
}
} catch (e) {
console.log(`${colors.bgBlue}${colors.white}${colors.bold} ${name} ${colors.reset}`);
console.log(` ${colors.dim}Error fetching log: ${e.message}${colors.reset}`);
}
console.log();
}
} else {
console.log("View detailed results at:");
console.log(` https://buildkite.com/bun/bun/builds/${buildNumber}#annotations`);
}
console.log(`\n${colors.red}${colors.bold}${failedJobs.length} job failures${colors.reset}\n`);
console.log("View detailed results at:");
console.log(` https://buildkite.com/bun/bun/builds/${buildNumber}#annotations`);
}

View File

@@ -1585,9 +1585,6 @@ function isNodeTest(path) {
if (isCI && isMacOS && isX64) {
return false;
}
if (!isJavaScript(path)) {
return false;
}
const unixPath = path.replaceAll(sep, "/");
return (
unixPath.includes("js/node/test/parallel/") ||

View File

@@ -1,82 +0,0 @@
#!/bin/bash
# Updates the vendored uucode library and regenerates grapheme tables.
#
# Usage:
# ./scripts/update-uucode.sh # update from default URL
# ./scripts/update-uucode.sh /path/to/uucode # update from local directory
# ./scripts/update-uucode.sh https://url.tar.gz # update from URL
#
# After running, verify with:
# bun bd test test/js/bun/util/stringWidth.test.ts
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
BUN_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
UUCODE_DIR="$BUN_ROOT/src/deps/uucode"
ZIG="$BUN_ROOT/vendor/zig/zig"
if [ ! -x "$ZIG" ]; then
echo "error: zig not found at $ZIG"
echo " run scripts/bootstrap.sh first"
exit 1
fi
update_from_dir() {
local src="$1"
echo "Updating uucode from: $src"
rm -rf "$UUCODE_DIR"
mkdir -p "$UUCODE_DIR"
cp -r "$src"/* "$UUCODE_DIR/"
}
update_from_url() {
local url="$1"
local tmp
tmp=$(mktemp -d)
trap "rm -rf $tmp" EXIT
echo "Downloading uucode from: $url"
curl -fsSL "$url" | tar -xz -C "$tmp" --strip-components=1
update_from_dir "$tmp"
}
# Handle source argument
if [ $# -ge 1 ]; then
SOURCE="$1"
if [ -d "$SOURCE" ]; then
update_from_dir "$SOURCE"
elif [[ "$SOURCE" == http* ]]; then
update_from_url "$SOURCE"
else
echo "error: argument must be a directory or URL"
exit 1
fi
else
# Default: use the zig global cache if available
CACHED=$(find "$HOME/.cache/zig/p" -maxdepth 1 -name "uucode-*" -type d 2>/dev/null | sort -V | tail -1)
if [ -n "$CACHED" ]; then
update_from_dir "$CACHED"
else
echo "error: no uucode source specified and none found in zig cache"
echo ""
echo "usage: $0 <path-to-uucode-dir-or-url>"
exit 1
fi
fi
echo ""
echo "Regenerating grapheme tables..."
cd "$BUN_ROOT"
"$ZIG" build generate-grapheme-tables
echo ""
echo "Done. Updated files:"
echo " src/deps/uucode/ (vendored library)"
echo " src/string/immutable/grapheme_tables.zig (regenerated)"
echo ""
echo "Next steps:"
echo " 1. bun bd test test/js/bun/util/stringWidth.test.ts"
echo " 2. git add src/deps/uucode src/string/immutable/grapheme_tables.zig"
echo " 3. git commit -m 'Update uucode to <version>'"

View File

@@ -3,10 +3,6 @@
$ErrorActionPreference = "Stop"
# Detect system architecture
$script:IsARM64 = [System.Runtime.InteropServices.RuntimeInformation]::OSArchitecture -eq [System.Runtime.InteropServices.Architecture]::Arm64
$script:VsArch = if ($script:IsARM64) { "arm64" } else { "amd64" }
if($env:VSINSTALLDIR -eq $null) {
Write-Host "Loading Visual Studio environment, this may take a second..."
@@ -27,14 +23,14 @@ if($env:VSINSTALLDIR -eq $null) {
Push-Location $vsDir
try {
$vsShell = (Join-Path -Path $vsDir -ChildPath "Common7\Tools\Launch-VsDevShell.ps1")
. $vsShell -Arch $script:VsArch -HostArch $script:VsArch
. $vsShell -Arch amd64 -HostArch amd64
} finally {
Pop-Location
}
}
if($env:VSCMD_ARG_TGT_ARCH -eq "x86") {
throw "Visual Studio environment is targeting 32 bit x86, but only 64-bit architectures (x64/arm64) are supported."
throw "Visual Studio environment is targeting 32 bit, but only 64 bit is supported."
}
if ($args.Count -gt 0) {

View File

@@ -429,49 +429,16 @@ pub const StandaloneModuleGraph = struct {
const bytecode: StringPointer = brk: {
if (output_file.bytecode_index != std.math.maxInt(u32)) {
// Bytecode alignment for JSC bytecode cache deserialization.
// Not aligning correctly causes a runtime assertion error or segfault.
//
// PLATFORM-SPECIFIC ALIGNMENT:
// - PE (Windows) and Mach-O (macOS): The module graph data is embedded in
// a dedicated section with an 8-byte size header. At runtime, the section
// is memory-mapped at a page-aligned address (hence 128-byte aligned).
// The data buffer starts 8 bytes after the section start.
// For bytecode at offset O to be 128-byte aligned:
// (section_va + 8 + O) % 128 == 0
// => O % 128 == 120
//
// - ELF (Linux): The module graph data is appended to the executable and
// read into a heap-allocated buffer at runtime. The allocator provides
// natural alignment, and there's no 8-byte section header offset.
// However, using target_mod=120 is still safe because:
// - If the buffer is 128-aligned: bytecode at offset 120 is at (128n + 120),
// which when loaded at a 128-aligned address gives proper alignment.
// - The extra 120 bytes of padding is acceptable overhead.
//
// This alignment strategy (target_mod=120) works for all platforms because
// it's the worst-case offset needed for the 8-byte header scenario.
// Use up to 256 byte alignment for bytecode
// Not aligning it correctly will cause a runtime assertion error, or a segfault.
const bytecode = output_files[output_file.bytecode_index].value.buffer.bytes;
const current_offset = string_builder.len;
// Calculate padding so that (current_offset + padding) % 128 == 120
// This accounts for the 8-byte section header on PE/Mach-O platforms.
const target_mod: usize = 128 - @sizeOf(u64); // 120 = accounts for 8-byte header
const current_mod = current_offset % 128;
const padding = if (current_mod <= target_mod)
target_mod - current_mod
else
128 - current_mod + target_mod;
// Zero the padding bytes to ensure deterministic output
const writable = string_builder.writable();
@memset(writable[0..padding], 0);
string_builder.len += padding;
const aligned_offset = string_builder.len;
const writable_after_padding = string_builder.writable();
@memcpy(writable_after_padding[0..bytecode.len], bytecode[0..bytecode.len]);
const unaligned_space = writable_after_padding[bytecode.len..];
const aligned = std.mem.alignInSlice(string_builder.writable(), 128).?;
@memcpy(aligned[0..bytecode.len], bytecode[0..bytecode.len]);
const unaligned_space = aligned[bytecode.len..];
const offset = @intFromPtr(aligned.ptr) - @intFromPtr(string_builder.ptr.?);
const len = bytecode.len + @min(unaligned_space.len, 128);
string_builder.len += len;
break :brk StringPointer{ .offset = @truncate(aligned_offset), .length = @truncate(len) };
break :brk StringPointer{ .offset = @truncate(offset), .length = @truncate(len) };
} else {
break :brk .{};
}
@@ -1192,9 +1159,7 @@ pub const StandaloneModuleGraph = struct {
return .success;
}
/// Loads the standalone module graph from the executable, allocates it on the heap,
/// sets it globally, and returns the pointer.
pub fn fromExecutable(allocator: std.mem.Allocator) !?*StandaloneModuleGraph {
pub fn fromExecutable(allocator: std.mem.Allocator) !?StandaloneModuleGraph {
if (comptime Environment.isMac) {
const macho_bytes = Macho.getData() orelse return null;
if (macho_bytes.len < @sizeOf(Offsets) + trailer.len) {
@@ -1208,7 +1173,7 @@ pub const StandaloneModuleGraph = struct {
return null;
}
const offsets = std.mem.bytesAsValue(Offsets, macho_bytes_slice).*;
return try fromBytesAlloc(allocator, @constCast(macho_bytes), offsets);
return try StandaloneModuleGraph.fromBytes(allocator, @constCast(macho_bytes), offsets);
}
if (comptime Environment.isWindows) {
@@ -1224,7 +1189,7 @@ pub const StandaloneModuleGraph = struct {
return null;
}
const offsets = std.mem.bytesAsValue(Offsets, pe_bytes_slice).*;
return try fromBytesAlloc(allocator, @constCast(pe_bytes), offsets);
return try StandaloneModuleGraph.fromBytes(allocator, @constCast(pe_bytes), offsets);
}
// Do not invoke libuv here.
@@ -1319,15 +1284,7 @@ pub const StandaloneModuleGraph = struct {
}
}
return try fromBytesAlloc(allocator, to_read, offsets);
}
/// Allocates a StandaloneModuleGraph on the heap, populates it from bytes, sets it globally, and returns the pointer.
fn fromBytesAlloc(allocator: std.mem.Allocator, raw_bytes: []u8, offsets: Offsets) !*StandaloneModuleGraph {
const graph_ptr = try allocator.create(StandaloneModuleGraph);
graph_ptr.* = try StandaloneModuleGraph.fromBytes(allocator, raw_bytes, offsets);
graph_ptr.set();
return graph_ptr;
return try StandaloneModuleGraph.fromBytes(allocator, to_read, offsets);
}
/// heuristic: `bun build --compile` won't be supported if the name is "bun", "bunx", or "node".

View File

@@ -166,7 +166,12 @@ pub const WatchEvent = struct {
pub fn merge(this: *WatchEvent, other: WatchEvent) void {
this.name_len += other.name_len;
this.op = Op.merge(this.op, other.op);
this.op = Op{
.delete = this.op.delete or other.op.delete,
.metadata = this.op.metadata or other.op.metadata,
.rename = this.op.rename or other.op.rename,
.write = this.op.write or other.op.write,
};
}
pub const Op = packed struct(u8) {
@@ -175,8 +180,7 @@ pub const WatchEvent = struct {
rename: bool = false,
write: bool = false,
move_to: bool = false,
create: bool = false,
_padding: u2 = 0,
_padding: u3 = 0,
pub fn merge(before: Op, after: Op) Op {
return .{
@@ -185,7 +189,6 @@ pub const WatchEvent = struct {
.metadata = before.metadata or after.metadata,
.rename = before.rename or after.rename,
.move_to = before.move_to or after.move_to,
.create = before.create or after.create,
};
}

View File

@@ -11,7 +11,6 @@ pub const AllocationScopeIn = allocation_scope.AllocationScopeIn;
pub const NullableAllocator = @import("./allocators/NullableAllocator.zig");
pub const MaxHeapAllocator = @import("./allocators/MaxHeapAllocator.zig");
pub const LinuxMemFdAllocator = @import("./allocators/LinuxMemFdAllocator.zig");
pub const BufferFallbackAllocator = @import("./allocators/BufferFallbackAllocator.zig");
pub const MaybeOwned = @import("./allocators/maybe_owned.zig").MaybeOwned;
pub fn isSliceInBufferT(comptime T: type, slice: []const T, buffer: []const T) bool {

View File

@@ -1,85 +0,0 @@
/// An allocator that attempts to allocate from a provided buffer first,
/// falling back to another allocator when the buffer is exhausted.
/// Unlike `std.heap.StackFallbackAllocator`, this does not own the buffer.
const BufferFallbackAllocator = @This();
#fallback_allocator: Allocator,
#fixed_buffer_allocator: FixedBufferAllocator,
pub fn init(buffer: []u8, fallback_allocator: Allocator) BufferFallbackAllocator {
return .{
.#fallback_allocator = fallback_allocator,
.#fixed_buffer_allocator = FixedBufferAllocator.init(buffer),
};
}
pub fn allocator(self: *BufferFallbackAllocator) Allocator {
return .{
.ptr = self,
.vtable = &.{
.alloc = alloc,
.resize = resize,
.remap = remap,
.free = free,
},
};
}
fn alloc(ctx: *anyopaque, len: usize, alignment: std.mem.Alignment, ra: usize) ?[*]u8 {
const self: *BufferFallbackAllocator = @ptrCast(@alignCast(ctx));
return FixedBufferAllocator.alloc(
&self.#fixed_buffer_allocator,
len,
alignment,
ra,
) orelse self.#fallback_allocator.rawAlloc(len, alignment, ra);
}
fn resize(ctx: *anyopaque, buf: []u8, alignment: std.mem.Alignment, new_len: usize, ra: usize) bool {
const self: *BufferFallbackAllocator = @ptrCast(@alignCast(ctx));
if (self.#fixed_buffer_allocator.ownsPtr(buf.ptr)) {
return FixedBufferAllocator.resize(
&self.#fixed_buffer_allocator,
buf,
alignment,
new_len,
ra,
);
}
return self.#fallback_allocator.rawResize(buf, alignment, new_len, ra);
}
fn remap(ctx: *anyopaque, memory: []u8, alignment: std.mem.Alignment, new_len: usize, ra: usize) ?[*]u8 {
const self: *BufferFallbackAllocator = @ptrCast(@alignCast(ctx));
if (self.#fixed_buffer_allocator.ownsPtr(memory.ptr)) {
return FixedBufferAllocator.remap(
&self.#fixed_buffer_allocator,
memory,
alignment,
new_len,
ra,
);
}
return self.#fallback_allocator.rawRemap(memory, alignment, new_len, ra);
}
fn free(ctx: *anyopaque, buf: []u8, alignment: std.mem.Alignment, ra: usize) void {
const self: *BufferFallbackAllocator = @ptrCast(@alignCast(ctx));
if (self.#fixed_buffer_allocator.ownsPtr(buf.ptr)) {
return FixedBufferAllocator.free(
&self.#fixed_buffer_allocator,
buf,
alignment,
ra,
);
}
return self.#fallback_allocator.rawFree(buf, alignment, ra);
}
pub fn reset(self: *BufferFallbackAllocator) void {
self.#fixed_buffer_allocator.reset();
}
const std = @import("std");
const Allocator = std.mem.Allocator;
const FixedBufferAllocator = std.heap.FixedBufferAllocator;

View File

@@ -2,10 +2,7 @@
const Self = @This();
const safety_checks = bun.Environment.isDebug or bun.Environment.enable_asan;
#heap: *mimalloc.Heap,
thread_id: if (safety_checks) std.Thread.Id else void,
#heap: if (safety_checks) Owned(*DebugHeap) else *mimalloc.Heap,
/// Uses the default thread-local heap. This type is zero-sized.
///
@@ -23,18 +20,18 @@ pub const Default = struct {
///
/// This type is a `GenericAllocator`; see `src/allocators.zig`.
pub const Borrowed = struct {
#heap: *mimalloc.Heap,
#heap: BorrowedHeap,
pub fn allocator(self: Borrowed) std.mem.Allocator {
return .{ .ptr = self.#heap, .vtable = c_allocator_vtable };
return .{ .ptr = self.#heap, .vtable = &c_allocator_vtable };
}
pub fn getDefault() Borrowed {
return .{ .#heap = mimalloc.mi_heap_main() };
return .{ .#heap = getThreadHeap() };
}
pub fn gc(self: Borrowed) void {
mimalloc.mi_heap_collect(self.#heap, false);
mimalloc.mi_heap_collect(self.getMimallocHeap(), false);
}
pub fn helpCatchMemoryIssues(self: Borrowed) void {
@@ -44,17 +41,30 @@ pub const Borrowed = struct {
}
}
pub fn ownsPtr(self: Borrowed, ptr: *const anyopaque) bool {
return mimalloc.mi_heap_check_owned(self.getMimallocHeap(), ptr);
}
fn fromOpaque(ptr: *anyopaque) Borrowed {
return .{ .#heap = @ptrCast(@alignCast(ptr)) };
}
fn getMimallocHeap(self: Borrowed) *mimalloc.Heap {
return if (comptime safety_checks) self.#heap.inner else self.#heap;
}
fn assertThreadLock(self: Borrowed) void {
if (comptime safety_checks) self.#heap.thread_lock.assertLocked();
}
fn alignedAlloc(self: Borrowed, len: usize, alignment: Alignment) ?[*]u8 {
log("Malloc: {d}\n", .{len});
const heap = self.getMimallocHeap();
const ptr: ?*anyopaque = if (mimalloc.mustUseAlignedAlloc(alignment))
mimalloc.mi_heap_malloc_aligned(self.#heap, len, alignment.toByteUnits())
mimalloc.mi_heap_malloc_aligned(heap, len, alignment.toByteUnits())
else
mimalloc.mi_heap_malloc(self.#heap, len);
mimalloc.mi_heap_malloc(heap, len);
if (comptime bun.Environment.isDebug) {
const usable = mimalloc.mi_malloc_usable_size(ptr);
@@ -79,17 +89,42 @@ pub const Borrowed = struct {
}
};
const BorrowedHeap = if (safety_checks) *DebugHeap else *mimalloc.Heap;
const DebugHeap = struct {
inner: *mimalloc.Heap,
thread_lock: bun.safety.ThreadLock,
pub const deinit = void;
};
threadlocal var thread_heap: if (safety_checks) ?DebugHeap else void = if (safety_checks) null;
fn getThreadHeap() BorrowedHeap {
if (comptime !safety_checks) return mimalloc.mi_heap_get_default();
if (thread_heap == null) {
thread_heap = .{
.inner = mimalloc.mi_heap_get_default(),
.thread_lock = .initLocked(),
};
}
return &thread_heap.?;
}
const log = bun.Output.scoped(.mimalloc, .hidden);
pub fn allocator(self: Self) std.mem.Allocator {
self.assertThreadOwnership();
return self.borrow().allocator();
}
pub fn borrow(self: Self) Borrowed {
return .{ .#heap = self.#heap };
return .{ .#heap = if (comptime safety_checks) self.#heap.get() else self.#heap };
}
/// Internally, mimalloc calls mi_heap_get_default()
/// to get the default heap.
/// It uses pthread_getspecific to do that.
/// We can save those extra calls if we just do it once in here
pub fn getThreadLocalDefault() std.mem.Allocator {
if (bun.Environment.enable_asan) return bun.default_allocator;
return Borrowed.getDefault().allocator();
@@ -122,15 +157,22 @@ pub fn dumpStats(_: Self) void {
}
pub fn deinit(self: *Self) void {
mimalloc.mi_heap_destroy(self.#heap);
const mimalloc_heap = self.borrow().getMimallocHeap();
if (comptime safety_checks) {
self.#heap.deinit();
}
mimalloc.mi_heap_destroy(mimalloc_heap);
self.* = undefined;
}
pub fn init() Self {
return .{
.#heap = mimalloc.mi_heap_new() orelse bun.outOfMemory(),
.thread_id = if (safety_checks) std.Thread.getCurrentId() else {},
};
const mimalloc_heap = mimalloc.mi_heap_new() orelse bun.outOfMemory();
if (comptime !safety_checks) return .{ .#heap = mimalloc_heap };
const heap: Owned(*DebugHeap) = .new(.{
.inner = mimalloc_heap,
.thread_lock = .initLocked(),
});
return .{ .#heap = heap };
}
pub fn gc(self: Self) void {
@@ -141,16 +183,8 @@ pub fn helpCatchMemoryIssues(self: Self) void {
self.borrow().helpCatchMemoryIssues();
}
fn assertThreadOwnership(self: Self) void {
if (comptime safety_checks) {
const current_thread = std.Thread.getCurrentId();
if (current_thread != self.thread_id) {
std.debug.panic(
"MimallocArena used from wrong thread: arena belongs to thread {d}, but current thread is {d}",
.{ self.thread_id, current_thread },
);
}
}
pub fn ownsPtr(self: Self, ptr: *const anyopaque) bool {
return self.borrow().ownsPtr(ptr);
}
fn alignedAllocSize(ptr: [*]u8) usize {
@@ -159,10 +193,13 @@ fn alignedAllocSize(ptr: [*]u8) usize {
fn vtable_alloc(ptr: *anyopaque, len: usize, alignment: Alignment, _: usize) ?[*]u8 {
const self: Borrowed = .fromOpaque(ptr);
self.assertThreadLock();
return self.alignedAlloc(len, alignment);
}
fn vtable_resize(_: *anyopaque, buf: []u8, _: Alignment, new_len: usize, _: usize) bool {
fn vtable_resize(ptr: *anyopaque, buf: []u8, _: Alignment, new_len: usize, _: usize) bool {
const self: Borrowed = .fromOpaque(ptr);
self.assertThreadLock();
return mimalloc.mi_expand(buf.ptr, new_len) != null;
}
@@ -186,17 +223,39 @@ fn vtable_free(
}
}
/// Attempt to expand or shrink memory, allowing relocation.
///
/// `memory.len` must equal the length requested from the most recent
/// successful call to `alloc`, `resize`, or `remap`. `alignment` must
/// equal the same value that was passed as the `alignment` parameter to
/// the original `alloc` call.
///
/// A non-`null` return value indicates the resize was successful. The
/// allocation may have same address, or may have been relocated. In either
/// case, the allocation now has size of `new_len`. A `null` return value
/// indicates that the resize would be equivalent to allocating new memory,
/// copying the bytes from the old memory, and then freeing the old memory.
/// In such case, it is more efficient for the caller to perform the copy.
///
/// `new_len` must be greater than zero.
///
/// `ret_addr` is optionally provided as the first return address of the
/// allocation call stack. If the value is `0` it means no return address
/// has been provided.
fn vtable_remap(ptr: *anyopaque, buf: []u8, alignment: Alignment, new_len: usize, _: usize) ?[*]u8 {
const self: Borrowed = .fromOpaque(ptr);
const value = mimalloc.mi_heap_realloc_aligned(self.#heap, buf.ptr, new_len, alignment.toByteUnits());
self.assertThreadLock();
const heap = self.getMimallocHeap();
const aligned_size = alignment.toByteUnits();
const value = mimalloc.mi_heap_realloc_aligned(heap, buf.ptr, new_len, aligned_size);
return @ptrCast(value);
}
pub fn isInstance(alloc: std.mem.Allocator) bool {
return alloc.vtable == c_allocator_vtable;
return alloc.vtable == &c_allocator_vtable;
}
const c_allocator_vtable = &std.mem.Allocator.VTable{
const c_allocator_vtable = std.mem.Allocator.VTable{
.alloc = vtable_alloc,
.resize = vtable_resize,
.remap = vtable_remap,
@@ -209,3 +268,5 @@ const Alignment = std.mem.Alignment;
const bun = @import("bun");
const assert = bun.assert;
const mimalloc = bun.mimalloc;
const Owned = bun.ptr.Owned;
const safety_checks = bun.Environment.ci_assert;

View File

@@ -60,29 +60,17 @@ pub const Heap = opaque {
return mi_heap_realloc(self, p, newsize);
}
pub fn isOwned(self: *Heap, p: ?*const anyopaque) bool {
return mi_heap_contains(self, p);
pub fn isOwned(self: *Heap, p: ?*anyopaque) bool {
return mi_heap_check_owned(self, p);
}
};
pub extern fn mi_heap_new() ?*Heap;
pub extern fn mi_heap_delete(heap: *Heap) void;
pub extern fn mi_heap_destroy(heap: *Heap) void;
pub extern fn mi_heap_set_default(heap: *Heap) *Heap;
pub extern fn mi_heap_get_default() *Heap;
pub extern fn mi_heap_get_backing() *Heap;
pub extern fn mi_heap_collect(heap: *Heap, force: bool) void;
pub extern fn mi_heap_main() *Heap;
// Thread-local heap (theap) API - new in mimalloc v3
pub const THeap = opaque {};
pub extern fn mi_theap_get_default() *THeap;
pub extern fn mi_theap_set_default(theap: *THeap) *THeap;
pub extern fn mi_theap_collect(theap: *THeap, force: bool) void;
pub extern fn mi_theap_malloc(theap: *THeap, size: usize) ?*anyopaque;
pub extern fn mi_theap_zalloc(theap: *THeap, size: usize) ?*anyopaque;
pub extern fn mi_theap_calloc(theap: *THeap, count: usize, size: usize) ?*anyopaque;
pub extern fn mi_theap_malloc_small(theap: *THeap, size: usize) ?*anyopaque;
pub extern fn mi_theap_malloc_aligned(theap: *THeap, size: usize, alignment: usize) ?*anyopaque;
pub extern fn mi_theap_realloc(theap: *THeap, p: ?*anyopaque, newsize: usize) ?*anyopaque;
pub extern fn mi_theap_destroy(theap: *THeap) void;
pub extern fn mi_heap_theap(heap: *Heap) *THeap;
pub extern fn mi_heap_malloc(heap: *Heap, size: usize) ?*anyopaque;
pub extern fn mi_heap_zalloc(heap: *Heap, size: usize) ?*anyopaque;
pub extern fn mi_heap_calloc(heap: *Heap, count: usize, size: usize) ?*anyopaque;
@@ -114,7 +102,8 @@ pub extern fn mi_heap_rezalloc_aligned(heap: *Heap, p: ?*anyopaque, newsize: usi
pub extern fn mi_heap_rezalloc_aligned_at(heap: *Heap, p: ?*anyopaque, newsize: usize, alignment: usize, offset: usize) ?*anyopaque;
pub extern fn mi_heap_recalloc_aligned(heap: *Heap, p: ?*anyopaque, newcount: usize, size: usize, alignment: usize) ?*anyopaque;
pub extern fn mi_heap_recalloc_aligned_at(heap: *Heap, p: ?*anyopaque, newcount: usize, size: usize, alignment: usize, offset: usize) ?*anyopaque;
pub extern fn mi_heap_contains(heap: *const Heap, p: ?*const anyopaque) bool;
pub extern fn mi_heap_contains_block(heap: *Heap, p: *const anyopaque) bool;
pub extern fn mi_heap_check_owned(heap: *Heap, p: *const anyopaque) bool;
pub extern fn mi_check_owned(p: ?*const anyopaque) bool;
pub const struct_mi_heap_area_s = extern struct {
blocks: ?*anyopaque,

View File

@@ -343,7 +343,6 @@ pub const api = struct {
sqlite_embedded = 17,
html = 18,
yaml = 19,
json5 = 20,
_,
pub fn jsonStringify(self: @This(), writer: anytype) !void {

View File

@@ -320,8 +320,9 @@ pub const Runner = struct {
.Null => return Expr.init(E.Null, E.Null{}, this.caller.loc),
.Private => {
this.is_top_level = false;
if (this.visited.get(value)) |cached| {
return cached;
const _entry = this.visited.getOrPut(this.allocator, value) catch unreachable;
if (_entry.found_existing) {
return _entry.value_ptr.*;
}
var blob_: ?*const jsc.WebCore.Blob = null;
@@ -363,28 +364,20 @@ pub const Runner = struct {
const _entry = this.visited.getOrPut(this.allocator, value) catch unreachable;
if (_entry.found_existing) {
// Check if we're in the middle of processing this value (circular reference)
if (_entry.value_ptr.isMissing()) {
this.log.addErrorFmt(
this.source,
this.caller.loc,
this.allocator,
"macro returned an object with a circular reference, which cannot be converted to JavaScript",
.{},
) catch unreachable;
return error.MacroFailed;
}
return _entry.value_ptr.*;
}
// Use sentinel to detect circular references during processing
_entry.value_ptr.* = Expr.empty;
var iter = try jsc.JSArrayIterator.init(value, this.global);
// Process all array items
var array = this.allocator.alloc(Expr, iter.len) catch unreachable;
errdefer this.allocator.free(array);
const expr = Expr.init(
E.Array,
E.Array{ .items = ExprNodeList.empty, .was_originally_macro = true },
this.caller.loc,
);
_entry.value_ptr.* = expr;
var i: usize = 0;
while (try iter.next()) |item| {
array[i] = try this.run(item);
@@ -393,15 +386,8 @@ pub const Runner = struct {
i += 1;
}
// Create and store the expression only after all items are processed
var items = ExprNodeList.fromOwnedSlice(array);
items.len = @truncate(i);
const expr = Expr.init(
E.Array,
E.Array{ .items = items, .was_originally_macro = true },
this.caller.loc,
);
_entry.value_ptr.* = expr;
expr.data.e_array.items = ExprNodeList.fromOwnedSlice(array);
expr.data.e_array.items.len = @truncate(i);
return expr;
},
// TODO: optimize this
@@ -409,22 +395,16 @@ pub const Runner = struct {
this.is_top_level = false;
const _entry = this.visited.getOrPut(this.allocator, value) catch unreachable;
if (_entry.found_existing) {
// Check if we're in the middle of processing this value (circular reference)
if (_entry.value_ptr.isMissing()) {
this.log.addErrorFmt(
this.source,
this.caller.loc,
this.allocator,
"macro returned an object with a circular reference, which cannot be converted to JavaScript",
.{},
) catch unreachable;
return error.MacroFailed;
}
return _entry.value_ptr.*;
}
// Use sentinel to detect circular references during processing
_entry.value_ptr.* = Expr.empty;
// Reserve a placeholder to break cycles.
const expr = Expr.init(
E.Object,
E.Object{ .properties = G.Property.List{}, .was_originally_macro = true },
this.caller.loc,
);
_entry.value_ptr.* = expr;
// SAFETY: tag ensures `value` is an object.
const obj = value.getObject() orelse unreachable;
@@ -453,13 +433,7 @@ pub const Runner = struct {
}) catch |err| bun.handleOom(err);
}
// Create and store the expression only after all properties are processed
const expr = Expr.init(
E.Object,
E.Object{ .properties = properties, .was_originally_macro = true },
this.caller.loc,
);
_entry.value_ptr.* = expr;
expr.data.e_object.properties = properties;
return expr;
},
@@ -496,8 +470,9 @@ pub const Runner = struct {
return Expr.init(E.String, E.String.init(out_slice), this.caller.loc);
},
.Promise => {
if (this.visited.get(value)) |cached| {
return cached;
const _entry = this.visited.getOrPut(this.allocator, value) catch unreachable;
if (_entry.found_existing) {
return _entry.value_ptr.*;
}
const promise = value.asAnyPromise() orelse @panic("Unexpected promise type");
@@ -519,7 +494,7 @@ pub const Runner = struct {
this.is_top_level = false;
const result = try this.run(promise_result);
this.visited.put(this.allocator, value, result) catch unreachable;
_entry.value_ptr.* = result;
return result;
},
else => {},

View File

@@ -2204,7 +2204,7 @@ pub fn NewParser_(
var is_sloppy_mode_block_level_fn_stmt = false;
const original_member_ref = value.ref;
if (p.willUseRenamer() and symbol.kind == .hoisted_function) {
if (p.willUseRenamer() and symbol.kind == .hoisted_function and scope.kind != .label) {
// Block-level function declarations behave like "let" in strict mode
if (scope.strict_mode != .sloppy_mode) {
continue;
@@ -6467,11 +6467,6 @@ pub fn NewParser_(
parts.items[0].stmts = top_level_stmts;
}
// REPL mode transforms
if (p.options.repl_mode) {
try repl_transforms.ReplTransforms(P).apply(p, parts, allocator);
}
var top_level_symbols_to_parts = js_ast.Ast.TopLevelSymbolToParts{};
var top_level = &top_level_symbols_to_parts;
@@ -6532,9 +6527,7 @@ pub fn NewParser_(
break :brk p.hmr_api_ref;
}
// When code splitting is enabled, always create wrapper_ref to match esbuild behavior.
// Otherwise, use needsWrapperRef() to optimize away unnecessary wrappers.
if (p.options.bundle and (p.options.code_splitting or p.needsWrapperRef(parts.items))) {
if (p.options.bundle and p.needsWrapperRef(parts.items)) {
break :brk p.newSymbol(
.other,
std.fmt.allocPrint(
@@ -6767,8 +6760,6 @@ var falseValueExpr = Expr.Data{ .e_boolean = E.Boolean{ .value = false } };
const string = []const u8;
const repl_transforms = @import("./repl_transforms.zig");
const Define = @import("../defines.zig").Define;
const DefineData = @import("../defines.zig").DefineData;

View File

@@ -19,7 +19,6 @@ pub const Parser = struct {
tree_shaking: bool = false,
bundle: bool = false,
code_splitting: bool = false,
package_version: string = "",
macro_context: *MacroContextType() = undefined,
@@ -39,13 +38,6 @@ pub const Parser = struct {
/// able to customize what import sources are used.
framework: ?*bun.bake.Framework = null,
/// REPL mode: transforms code for interactive evaluation
/// - Wraps lone object literals `{...}` in parentheses
/// - Hoists variable declarations for REPL persistence
/// - Wraps last expression in { value: expr } for result capture
/// - Wraps code with await in async IIFE
repl_mode: bool = false,
pub fn hashForRuntimeTranspiler(this: *const Options, hasher: *std.hash.Wyhash, did_use_jsx: bool) void {
bun.assert(!this.bundle);
@@ -791,39 +783,9 @@ pub const Parser = struct {
// else
// module.exports = require('./foo.dev.js')
//
// Find the part containing the actual module.exports = require() statement,
// skipping over parts that only contain comments, directives, and empty statements.
// This handles files like:
//
// /*!
// * express
// * MIT Licensed
// */
// 'use strict';
// module.exports = require('./lib/express');
//
// When tree-shaking is enabled, each statement becomes its own part, so we need
// to look across all parts to find the single meaningful statement.
const StmtAndPart = struct { stmt: Stmt, part_idx: usize };
const stmt_and_part: ?StmtAndPart = brk: {
var found: ?StmtAndPart = null;
for (parts.items, 0..) |part, part_idx| {
for (part.stmts) |s| {
switch (s.data) {
.s_comment, .s_directive, .s_empty => continue,
else => {
// If we already found a non-trivial statement, there's more than one
if (found != null) break :brk null;
found = .{ .stmt = s, .part_idx = part_idx };
},
}
}
}
break :brk found;
};
if (stmt_and_part) |found| {
const stmt = found.stmt;
var part = &parts.items[found.part_idx];
if (parts.items.len == 1 and parts.items[0].stmts.len == 1) {
var part = &parts.items[0];
const stmt: Stmt = part.stmts[0];
if (p.symbols.items[p.module_ref.innerIndex()].use_count_estimate == 1) {
if (stmt.data == .s_expr) {
const value: Expr = stmt.data.s_expr.value;
@@ -838,13 +800,13 @@ pub const Parser = struct {
left.data.e_dot.target.data == .e_identifier and
left.data.e_dot.target.data.e_identifier.ref.eql(p.module_ref))
{
const redirect_import_record_index: ?u32 = inner_brk: {
const redirect_import_record_index: ?u32 = brk: {
// general case:
//
// module.exports = require("foo");
//
if (right.data == .e_require_string) {
break :inner_brk right.data.e_require_string.import_record_index;
break :brk right.data.e_require_string.import_record_index;
}
// special case: a module for us to unwrap
@@ -863,10 +825,10 @@ pub const Parser = struct {
{
// We know it's 0 because there is only one import in the whole file
// so that one import must be the one we're looking for
break :inner_brk 0;
break :brk 0;
}
break :inner_brk null;
break :brk null;
};
if (redirect_import_record_index) |id| {
part.symbol_uses = .{};

View File

@@ -1,365 +0,0 @@
/// REPL Transform module - transforms code for interactive REPL evaluation
///
/// This module provides transformations for REPL mode:
/// - Wraps the last expression in { value: expr } for result capture
/// - Wraps code with await in async IIFE with variable hoisting
/// - Hoists declarations for variable persistence across REPL lines
pub fn ReplTransforms(comptime P: type) type {
return struct {
const Self = @This();
/// Apply REPL-mode transforms to the AST.
/// This transforms code for interactive evaluation:
/// - Wraps the last expression in { value: expr } for result capture
/// - Wraps code with await in async IIFE with variable hoisting
pub fn apply(p: *P, parts: *ListManaged(js_ast.Part), allocator: Allocator) !void {
// Skip transform if there's a top-level return (indicates module pattern)
if (p.has_top_level_return) {
return;
}
// Collect all statements
var total_stmts_count: usize = 0;
for (parts.items) |part| {
total_stmts_count += part.stmts.len;
}
if (total_stmts_count == 0) {
return;
}
// Check if there's top-level await
const has_top_level_await = p.top_level_await_keyword.len > 0;
// Collect all statements into a single array
var all_stmts = bun.handleOom(allocator.alloc(Stmt, total_stmts_count));
var stmt_idx: usize = 0;
for (parts.items) |part| {
for (part.stmts) |stmt| {
all_stmts[stmt_idx] = stmt;
stmt_idx += 1;
}
}
// Apply transform with is_async based on presence of top-level await
try transformWithHoisting(p, parts, all_stmts, allocator, has_top_level_await);
}
/// Transform code with hoisting and IIFE wrapper
/// @param is_async: true for async IIFE (when top-level await present), false for sync IIFE
fn transformWithHoisting(
p: *P,
parts: *ListManaged(js_ast.Part),
all_stmts: []Stmt,
allocator: Allocator,
is_async: bool,
) !void {
if (all_stmts.len == 0) return;
// Lists for hoisted declarations and inner statements
var hoisted_stmts = ListManaged(Stmt).init(allocator);
var inner_stmts = ListManaged(Stmt).init(allocator);
try hoisted_stmts.ensureTotalCapacity(all_stmts.len);
try inner_stmts.ensureTotalCapacity(all_stmts.len);
// Process each statement - hoist all declarations for REPL persistence
for (all_stmts) |stmt| {
switch (stmt.data) {
.s_local => |local| {
// Hoist all declarations as var so they become context properties
// In sloppy mode, var at top level becomes a property of the global/context object
// This is essential for REPL variable persistence across vm.runInContext calls
const kind: S.Local.Kind = .k_var;
// Extract individual identifiers from binding patterns for hoisting
var hoisted_decl_list = ListManaged(G.Decl).init(allocator);
for (local.decls.slice()) |decl| {
try extractIdentifiersFromBinding(p, decl.binding, &hoisted_decl_list);
}
if (hoisted_decl_list.items.len > 0) {
try hoisted_stmts.append(p.s(S.Local{
.kind = kind,
.decls = Decl.List.fromOwnedSlice(hoisted_decl_list.items),
}, stmt.loc));
}
// Create assignment expressions for the inner statements
for (local.decls.slice()) |decl| {
if (decl.value) |value| {
// Create assignment expression: binding = value
const assign_expr = createBindingAssignment(p, decl.binding, value, allocator);
try inner_stmts.append(p.s(S.SExpr{ .value = assign_expr }, stmt.loc));
}
}
},
.s_function => |func| {
// For function declarations:
// Hoist as: var funcName;
// Inner: this.funcName = funcName; function funcName() {}
if (func.func.name) |name_loc| {
try hoisted_stmts.append(p.s(S.Local{
.kind = .k_var,
.decls = Decl.List.fromOwnedSlice(bun.handleOom(allocator.dupe(G.Decl, &.{
G.Decl{
.binding = p.b(B.Identifier{ .ref = name_loc.ref.? }, name_loc.loc),
.value = null,
},
}))),
}, stmt.loc));
// Add this.funcName = funcName assignment
const this_expr = p.newExpr(E.This{}, stmt.loc);
const this_dot = p.newExpr(E.Dot{
.target = this_expr,
.name = p.symbols.items[name_loc.ref.?.innerIndex()].original_name,
.name_loc = name_loc.loc,
}, stmt.loc);
const func_id = p.newExpr(E.Identifier{ .ref = name_loc.ref.? }, name_loc.loc);
const assign = p.newExpr(E.Binary{
.op = .bin_assign,
.left = this_dot,
.right = func_id,
}, stmt.loc);
try inner_stmts.append(p.s(S.SExpr{ .value = assign }, stmt.loc));
}
// Add the function declaration itself
try inner_stmts.append(stmt);
},
.s_class => |class| {
// For class declarations:
// Hoist as: var ClassName; (use var so it persists to vm context)
// Inner: ClassName = class ClassName {}
if (class.class.class_name) |name_loc| {
try hoisted_stmts.append(p.s(S.Local{
.kind = .k_var,
.decls = Decl.List.fromOwnedSlice(bun.handleOom(allocator.dupe(G.Decl, &.{
G.Decl{
.binding = p.b(B.Identifier{ .ref = name_loc.ref.? }, name_loc.loc),
.value = null,
},
}))),
}, stmt.loc));
// Convert class declaration to assignment: ClassName = class ClassName {}
const class_expr = p.newExpr(class.class, stmt.loc);
const class_id = p.newExpr(E.Identifier{ .ref = name_loc.ref.? }, name_loc.loc);
const assign = p.newExpr(E.Binary{
.op = .bin_assign,
.left = class_id,
.right = class_expr,
}, stmt.loc);
try inner_stmts.append(p.s(S.SExpr{ .value = assign }, stmt.loc));
} else {
try inner_stmts.append(stmt);
}
},
.s_directive => |directive| {
// In REPL mode, treat directives (string literals) as expressions
const str_expr = p.newExpr(E.String{ .data = directive.value }, stmt.loc);
try inner_stmts.append(p.s(S.SExpr{ .value = str_expr }, stmt.loc));
},
else => {
try inner_stmts.append(stmt);
},
}
}
// Wrap the last expression in return { value: expr }
wrapLastExpressionWithReturn(p, &inner_stmts, allocator);
// Create the IIFE: (() => { ...inner_stmts... })() or (async () => { ... })()
const arrow = p.newExpr(E.Arrow{
.args = &.{},
.body = .{ .loc = logger.Loc.Empty, .stmts = inner_stmts.items },
.is_async = is_async,
}, logger.Loc.Empty);
const iife = p.newExpr(E.Call{
.target = arrow,
.args = ExprNodeList{},
}, logger.Loc.Empty);
// Final output: hoisted declarations + IIFE call
const final_stmts_count = hoisted_stmts.items.len + 1;
var final_stmts = bun.handleOom(allocator.alloc(Stmt, final_stmts_count));
for (hoisted_stmts.items, 0..) |stmt, j| {
final_stmts[j] = stmt;
}
final_stmts[hoisted_stmts.items.len] = p.s(S.SExpr{ .value = iife }, logger.Loc.Empty);
// Update parts
if (parts.items.len > 0) {
parts.items[0].stmts = final_stmts;
parts.items.len = 1;
}
}
/// Wrap the last expression in return { value: expr }
fn wrapLastExpressionWithReturn(p: *P, inner_stmts: *ListManaged(Stmt), allocator: Allocator) void {
if (inner_stmts.items.len > 0) {
var last_idx: usize = inner_stmts.items.len;
while (last_idx > 0) {
last_idx -= 1;
const last_stmt = inner_stmts.items[last_idx];
switch (last_stmt.data) {
.s_empty, .s_comment => continue,
.s_expr => |expr_data| {
// Wrap in return { value: expr }
const wrapped = wrapExprInValueObject(p, expr_data.value, allocator);
inner_stmts.items[last_idx] = p.s(S.Return{ .value = wrapped }, last_stmt.loc);
break;
},
else => break,
}
}
}
}
/// Extract individual identifiers from a binding pattern for hoisting
fn extractIdentifiersFromBinding(p: *P, binding: Binding, decls: *ListManaged(G.Decl)) !void {
switch (binding.data) {
.b_identifier => |ident| {
try decls.append(G.Decl{
.binding = p.b(B.Identifier{ .ref = ident.ref }, binding.loc),
.value = null,
});
},
.b_array => |arr| {
for (arr.items) |item| {
try extractIdentifiersFromBinding(p, item.binding, decls);
}
},
.b_object => |obj| {
for (obj.properties) |prop| {
try extractIdentifiersFromBinding(p, prop.value, decls);
}
},
.b_missing => {},
}
}
/// Create { __proto__: null, value: expr } wrapper object
/// Uses null prototype to create a clean data object
fn wrapExprInValueObject(p: *P, expr: Expr, allocator: Allocator) Expr {
var properties = bun.handleOom(allocator.alloc(G.Property, 2));
// __proto__: null - creates null-prototype object
properties[0] = G.Property{
.key = p.newExpr(E.String{ .data = "__proto__" }, expr.loc),
.value = p.newExpr(E.Null{}, expr.loc),
};
// value: expr - the actual result value
properties[1] = G.Property{
.key = p.newExpr(E.String{ .data = "value" }, expr.loc),
.value = expr,
};
return p.newExpr(E.Object{
.properties = G.Property.List.fromOwnedSlice(properties),
}, expr.loc);
}
/// Create assignment expression from binding pattern
fn createBindingAssignment(p: *P, binding: Binding, value: Expr, allocator: Allocator) Expr {
switch (binding.data) {
.b_identifier => |ident| {
return p.newExpr(E.Binary{
.op = .bin_assign,
.left = p.newExpr(E.Identifier{ .ref = ident.ref }, binding.loc),
.right = value,
}, binding.loc);
},
.b_array => {
// For array destructuring, create: [a, b] = value
return p.newExpr(E.Binary{
.op = .bin_assign,
.left = convertBindingToExpr(p, binding, allocator),
.right = value,
}, binding.loc);
},
.b_object => {
// For object destructuring, create: {a, b} = value
return p.newExpr(E.Binary{
.op = .bin_assign,
.left = convertBindingToExpr(p, binding, allocator),
.right = value,
}, binding.loc);
},
.b_missing => {
// Return Missing expression to match convertBindingToExpr
return p.newExpr(E.Missing{}, binding.loc);
},
}
}
/// Convert a binding pattern to an expression (for assignment targets)
/// Handles spread/rest patterns in arrays and objects to match Binding.toExpr behavior
fn convertBindingToExpr(p: *P, binding: Binding, allocator: Allocator) Expr {
switch (binding.data) {
.b_identifier => |ident| {
return p.newExpr(E.Identifier{ .ref = ident.ref }, binding.loc);
},
.b_array => |arr| {
var items = bun.handleOom(allocator.alloc(Expr, arr.items.len));
for (arr.items, 0..) |item, i| {
const expr = convertBindingToExpr(p, item.binding, allocator);
// Check for spread pattern: if has_spread and this is the last element
if (arr.has_spread and i == arr.items.len - 1) {
items[i] = p.newExpr(E.Spread{ .value = expr }, expr.loc);
} else if (item.default_value) |default_val| {
items[i] = p.newExpr(E.Binary{
.op = .bin_assign,
.left = expr,
.right = default_val,
}, item.binding.loc);
} else {
items[i] = expr;
}
}
return p.newExpr(E.Array{
.items = ExprNodeList.fromOwnedSlice(items),
.is_single_line = arr.is_single_line,
}, binding.loc);
},
.b_object => |obj| {
var properties = bun.handleOom(allocator.alloc(G.Property, obj.properties.len));
for (obj.properties, 0..) |prop, i| {
properties[i] = G.Property{
.flags = prop.flags,
.key = prop.key,
// Set kind to .spread if the property has spread flag
.kind = if (prop.flags.contains(.is_spread)) .spread else .normal,
.value = convertBindingToExpr(p, prop.value, allocator),
.initializer = prop.default_value,
};
}
return p.newExpr(E.Object{
.properties = G.Property.List.fromOwnedSlice(properties),
.is_single_line = obj.is_single_line,
}, binding.loc);
},
.b_missing => {
return p.newExpr(E.Missing{}, binding.loc);
},
}
}
};
}
const std = @import("std");
const Allocator = std.mem.Allocator;
const ListManaged = std.array_list.Managed;
const bun = @import("bun");
const logger = bun.logger;
const js_ast = bun.ast;
const B = js_ast.B;
const Binding = js_ast.Binding;
const E = js_ast.E;
const Expr = js_ast.Expr;
const ExprNodeList = js_ast.ExprNodeList;
const S = js_ast.S;
const Stmt = js_ast.Stmt;
const G = js_ast.G;
const Decl = G.Decl;

View File

@@ -810,7 +810,12 @@ pub fn Visit(
// This is only done for function declarations that are not generators
// or async functions, since this is a backwards-compatibility hack from
// Annex B of the JavaScript standard.
//
// However, function declarations inside labeled statements should NOT
// be treated as block-level functions. Per ECMAScript Annex B, they
// should hoist like regular function declarations in sloppy mode.
!p.current_scope.kindStopsHoisting() and
p.current_scope.kind != .label and
p.symbols.items[data.func.name.?.ref.?.innerIndex()].kind == .hoisted_function)
{
break :list_getter &before;

View File

@@ -688,7 +688,19 @@ pub fn VisitStmt(
else => {},
}
data.stmt = p.visitSingleStmt(data.stmt, StmtsKind.none);
// For function declarations inside labels in sloppy mode, we need special handling.
// Per ECMAScript Annex B, they should hoist like regular function declarations,
// not like block-scoped functions. We can't use visitSingleStmt because it would
// wrap the function in a block via stmtsToSingleStmt.
if (data.stmt.data == .s_function and p.current_scope.strict_mode == .sloppy_mode) {
var inner_stmts = ListManaged(Stmt).initCapacity(p.allocator, 1) catch unreachable;
inner_stmts.append(data.stmt) catch unreachable;
p.visitStmts(&inner_stmts, StmtsKind.none) catch unreachable;
// The function should remain as a single statement without block wrapping
data.stmt = if (inner_stmts.items.len == 1) inner_stmts.items[0] else p.stmtsToSingleStmt(data.stmt.loc, inner_stmts.items);
} else {
data.stmt = p.visitSingleStmt(data.stmt, StmtsKind.none);
}
p.popScope();
try stmts.append(stmt.*);

View File

@@ -1355,7 +1355,7 @@ fn computeArgumentsForFrameworkRequest(
const relative_path_buf = bun.path_buffer_pool.get();
defer bun.path_buffer_pool.put(relative_path_buf);
var route_name = bun.String.cloneUTF8(dev.relativePath(relative_path_buf, keys[fromOpaqueFileId(.server, route.file_page.unwrap().?).get()]));
try arr.putIndex(global, 0, try route_name.transferToJS(global));
try arr.putIndex(global, 0, route_name.transferToJS(global));
}
n = 1;
while (true) {
@@ -1366,7 +1366,7 @@ fn computeArgumentsForFrameworkRequest(
relative_path_buf,
keys[fromOpaqueFileId(.server, layout).get()],
));
try arr.putIndex(global, @intCast(n), try layout_name.transferToJS(global));
try arr.putIndex(global, @intCast(n), layout_name.transferToJS(global));
n += 1;
}
route = dev.router.routePtr(route.parent.unwrap() orelse break);
@@ -1383,7 +1383,7 @@ fn computeArgumentsForFrameworkRequest(
std.mem.asBytes(&generation),
}) catch |err| bun.handleOom(err);
defer str.deref();
const js = try str.toJS(dev.vm.global);
const js = str.toJS(dev.vm.global);
framework_bundle.cached_client_bundle_url = .create(js, dev.vm.global);
break :str js;
},
@@ -2091,7 +2091,7 @@ fn generateCssJSArray(dev: *DevServer, route_bundle: *RouteBundle) bun.JSError!j
}) catch unreachable;
const str = bun.String.cloneUTF8(path);
defer str.deref();
try arr.putIndex(dev.vm.global, @intCast(i), try str.toJS(dev.vm.global));
try arr.putIndex(dev.vm.global, @intCast(i), str.toJS(dev.vm.global));
}
return arr;
}
@@ -2136,7 +2136,7 @@ fn makeArrayForServerComponentsPatch(dev: *DevServer, global: *jsc.JSGlobalObjec
defer bun.path_buffer_pool.put(relative_path_buf);
const str = bun.String.cloneUTF8(dev.relativePath(relative_path_buf, names[item.get()]));
defer str.deref();
try arr.putIndex(global, @intCast(i), try str.toJS(global));
try arr.putIndex(global, @intCast(i), str.toJS(global));
}
return arr;
}
@@ -2845,7 +2845,7 @@ pub fn finalizeBundle(
if (dev.bundling_failures.count() == 0) {
if (current_bundle.had_reload_event) {
const clear_terminal = !debug.isVisible() and !dev.vm.transpiler.env.hasSetNoClearTerminalOnReload(false);
const clear_terminal = !debug.isVisible();
if (clear_terminal) {
Output.disableBuffering();
Output.resetTerminalAll();

View File

@@ -48,7 +48,6 @@ pub fn trackResolutionFailure(store: *DirectoryWatchStore, import_source: []cons
.jsonc,
.toml,
.yaml,
.json5,
.wasm,
.napi,
.base64,

View File

@@ -839,7 +839,7 @@ pub const MatchedParams = struct {
const value_str = bun.String.cloneUTF8(param.value);
defer value_str.deref();
_ = obj.putBunStringOneOrArray(global, &key_str, value_str.toJS(global) catch unreachable) catch unreachable;
_ = obj.putBunStringOneOrArray(global, &key_str, value_str.toJS(global)) catch unreachable;
}
return obj;
}
@@ -1247,7 +1247,7 @@ pub const JSFrameworkRouter = struct {
for (params_out.params.slice()) |param| {
const value = bun.String.cloneUTF8(param.value);
defer value.deref();
obj.put(global, param.key, try value.toJS(global));
obj.put(global, param.key, value.toJS(global));
}
break :params obj;
} else .null,
@@ -1271,8 +1271,8 @@ pub const JSFrameworkRouter = struct {
const route = jsfr.router.routePtr(route_index);
return (try jsc.JSObject.create(.{
.part = try partToJS(global, route.part, allocator),
.page = try jsfr.fileIdToJS(global, route.file_page),
.layout = try jsfr.fileIdToJS(global, route.file_layout),
.page = jsfr.fileIdToJS(global, route.file_page),
.layout = jsfr.fileIdToJS(global, route.file_layout),
// .notFound = jsfr.fileIdToJS(global, route.file_not_found),
.children = brk: {
var len: usize = 0;
@@ -1295,8 +1295,8 @@ pub const JSFrameworkRouter = struct {
const route = jsfr.router.routePtr(route_index);
return (try jsc.JSObject.create(.{
.part = try partToJS(global, route.part, allocator),
.page = try jsfr.fileIdToJS(global, route.file_page),
.layout = try jsfr.fileIdToJS(global, route.file_layout),
.page = jsfr.fileIdToJS(global, route.file_page),
.layout = jsfr.fileIdToJS(global, route.file_layout),
// .notFound = jsfr.fileIdToJS(global, route.file_not_found),
.parent = if (route.parent.unwrap()) |parent|
try routeToJsonInverse(jsfr, global, parent, allocator)
@@ -1341,8 +1341,8 @@ pub const JSFrameworkRouter = struct {
var out = bun.String.init(rendered.items);
const obj = JSValue.createEmptyObject(global, 2);
obj.put(global, "kind", try bun.String.static(@tagName(parsed.kind)).toJS(global));
obj.put(global, "pattern", try out.transferToJS(global));
obj.put(global, "kind", bun.String.static(@tagName(parsed.kind)).toJS(global));
obj.put(global, "pattern", out.transferToJS(global));
return obj;
}
@@ -1352,7 +1352,7 @@ pub const JSFrameworkRouter = struct {
var it = pattern.iterate();
while (it.next()) |part| try part.toStringForInternalUse(rendered.writer());
var str = bun.String.cloneUTF8(rendered.items);
return try str.transferToJS(global);
return str.transferToJS(global);
}
fn partToJS(global: *JSGlobalObject, part: Part, temp_allocator: Allocator) !JSValue {
@@ -1360,7 +1360,7 @@ pub const JSFrameworkRouter = struct {
defer rendered.deinit();
try part.toStringForInternalUse(rendered.writer());
var str = bun.String.cloneUTF8(rendered.items);
return try str.transferToJS(global);
return str.transferToJS(global);
}
pub fn getFileIdForRouter(jsfr: *JSFrameworkRouter, abs_path: []const u8, _: Route.Index, _: Route.FileKind) !OpaqueFileId {
@@ -1377,7 +1377,7 @@ pub const JSFrameworkRouter = struct {
});
}
pub fn fileIdToJS(jsfr: *JSFrameworkRouter, global: *JSGlobalObject, id: OpaqueFileId.Optional) bun.JSError!JSValue {
pub fn fileIdToJS(jsfr: *JSFrameworkRouter, global: *JSGlobalObject, id: OpaqueFileId.Optional) JSValue {
return jsfr.files.items[(id.unwrap() orelse return .null).get()].toJS(global);
}
};

View File

@@ -182,7 +182,7 @@ pub fn buildWithVm(ctx: bun.cli.Command.Context, cwd: []const u8, vm: *VirtualMa
.pending => unreachable,
.fulfilled => |resolved| config: {
bun.assert(resolved.isUndefined());
const default = BakeGetDefaultExportFromModule(vm.global, try config_entry_point_string.toJS(vm.global));
const default = BakeGetDefaultExportFromModule(vm.global, config_entry_point_string.toJS(vm.global));
if (!default.isObject()) {
return global.throwInvalidArguments(
@@ -485,7 +485,7 @@ pub fn buildWithVm(ctx: bun.cli.Command.Context, cwd: []const u8, vm: *VirtualMa
for (router.types, 0..) |router_type, i| {
if (router_type.client_file.unwrap()) |client_file| {
const str = try (try bun.String.createFormat("{s}{s}", .{
const str = (try bun.String.createFormat("{s}{s}", .{
public_path,
pt.outputFile(client_file).dest_path,
})).toJS(global);
@@ -542,7 +542,7 @@ pub fn buildWithVm(ctx: bun.cli.Command.Context, cwd: []const u8, vm: *VirtualMa
bun.assert(output_file.dest_path[0] != '.');
// CSS chunks must be in contiguous order!!
bun.assert(output_file.loader.isCSS());
str.* = try (try bun.String.createFormat("{s}{s}", .{ public_path, output_file.dest_path })).toJS(global);
str.* = (try bun.String.createFormat("{s}{s}", .{ public_path, output_file.dest_path })).toJS(global);
}
// Route URL patterns with parameter placeholders.
@@ -659,10 +659,10 @@ pub fn buildWithVm(ctx: bun.cli.Command.Context, cwd: []const u8, vm: *VirtualMa
// Init the items
var pattern_string = bun.String.cloneUTF8(pattern.slice());
defer pattern_string.deref();
try route_patterns.putIndex(global, @intCast(nav_index), try pattern_string.toJS(global));
try route_patterns.putIndex(global, @intCast(nav_index), pattern_string.toJS(global));
var src_path = bun.String.cloneUTF8(bun.path.relative(cwd, pt.inputFile(main_file_route_index).absPath()));
try route_source_files.putIndex(global, @intCast(nav_index), try src_path.transferToJS(global));
try route_source_files.putIndex(global, @intCast(nav_index), src_path.transferToJS(global));
try route_nested_files.putIndex(global, @intCast(nav_index), file_list);
try route_type_and_flags.putIndex(global, @intCast(nav_index), JSValue.jsNumberFromInt32(@bitCast(TypeAndFlags{
@@ -993,7 +993,7 @@ pub const PerThread = struct {
return try loadModule(
pt.vm,
pt.vm.global,
try pt.module_keys[id.get()].toJS(pt.vm.global),
pt.module_keys[id.get()].toJS(pt.vm.global),
);
}
@@ -1010,7 +1010,7 @@ pub const PerThread = struct {
try pt.all_server_files.putIndex(
pt.vm.global,
@intCast(id.get()),
try pt.module_keys[id.get()].toJS(pt.vm.global),
pt.module_keys[id.get()].toJS(pt.vm.global),
);
}

View File

@@ -13,18 +13,21 @@ pub const Run = struct {
var run: Run = undefined;
pub fn bootStandalone(ctx: Command.Context, entry_path: string, graph_ptr: *bun.StandaloneModuleGraph) !void {
pub fn bootStandalone(ctx: Command.Context, entry_path: string, graph: bun.StandaloneModuleGraph) !void {
jsc.markBinding(@src());
bun.jsc.initialize(false);
bun.analytics.Features.standalone_executable += 1;
const graph_ptr = try bun.default_allocator.create(bun.StandaloneModuleGraph);
graph_ptr.* = graph;
graph_ptr.set();
js_ast.Expr.Data.Store.create();
js_ast.Stmt.Data.Store.create();
const arena = Arena.init();
// Load bunfig.toml unless disabled by compile flags
// Note: config loading with execArgv is handled earlier in cli.zig via loadConfig
if (!ctx.debug.loaded_bunfig and !graph_ptr.flags.disable_autoload_bunfig) {
if (!ctx.debug.loaded_bunfig and !graph.flags.disable_autoload_bunfig) {
try bun.cli.Arguments.loadConfigPath(ctx.allocator, true, "bunfig.toml", ctx, .RunCommand);
}
@@ -84,7 +87,7 @@ pub const Run = struct {
// If .env loading is disabled, only load process env vars
// Otherwise, load all .env files
if (graph_ptr.flags.disable_default_env_files) {
if (graph.flags.disable_default_env_files) {
b.options.env.behavior = .disable;
} else {
b.options.env.behavior = .load_all_without_inlining;
@@ -92,8 +95,8 @@ pub const Run = struct {
// Control loading of tsconfig.json and package.json at runtime
// By default, these are disabled for standalone executables
b.resolver.opts.load_tsconfig_json = !graph_ptr.flags.disable_autoload_tsconfig;
b.resolver.opts.load_package_json = !graph_ptr.flags.disable_autoload_package_json;
b.resolver.opts.load_tsconfig_json = !graph.flags.disable_autoload_tsconfig;
b.resolver.opts.load_package_json = !graph.flags.disable_autoload_package_json;
b.configureDefines() catch {
failWithBuildError(vm);
@@ -283,25 +286,11 @@ pub const Run = struct {
vm.cpu_profiler_config = CPUProfiler.CPUProfilerConfig{
.name = cpu_prof_opts.name,
.dir = cpu_prof_opts.dir,
.md_format = cpu_prof_opts.md_format,
.json_format = cpu_prof_opts.json_format,
};
CPUProfiler.startCPUProfiler(vm.jsc_vm);
bun.analytics.Features.cpu_profile += 1;
}
// Set up heap profiler config if enabled (actual profiling happens on exit)
if (this.ctx.runtime_options.heap_prof.enabled) {
const heap_prof_opts = this.ctx.runtime_options.heap_prof;
vm.heap_profiler_config = HeapProfiler.HeapProfilerConfig{
.name = heap_prof_opts.name,
.dir = heap_prof_opts.dir,
.text_format = heap_prof_opts.text_format,
};
bun.analytics.Features.heap_snapshot += 1;
}
this.addConditionalGlobals();
do_redis_preconnect: {
// This must happen within the API lock, which is why it's not in the "doPreconnect" function
@@ -563,7 +552,6 @@ const VirtualMachine = jsc.VirtualMachine;
const string = []const u8;
const CPUProfiler = @import("./bun.js/bindings/BunCPUProfiler.zig");
const HeapProfiler = @import("./bun.js/bindings/BunHeapProfiler.zig");
const options = @import("./options.zig");
const std = @import("std");
const Command = @import("./cli.zig").Command;

View File

@@ -417,7 +417,7 @@ pub const AsyncModule = struct {
jsc.markBinding(@src());
var specifier = specifier_;
var referrer = referrer_;
var scope: jsc.TopExceptionScope = undefined;
var scope: jsc.CatchScope = undefined;
scope.init(globalThis, @src());
defer {
specifier.deref();

View File

@@ -94,7 +94,7 @@ pub const BuildMessage = struct {
_: *jsc.CallFrame,
) bun.JSError!jsc.JSValue {
var object = jsc.JSValue.createEmptyObject(globalThis, 4);
object.put(globalThis, ZigString.static("name"), try bun.String.static("BuildMessage").toJS(globalThis));
object.put(globalThis, ZigString.static("name"), bun.String.static("BuildMessage").toJS(globalThis));
object.put(globalThis, ZigString.static("position"), this.getPosition(globalThis));
object.put(globalThis, ZigString.static("message"), this.getMessage(globalThis));
object.put(globalThis, ZigString.static("level"), this.getLevel(globalThis));

View File

@@ -316,7 +316,6 @@ pub const TestReporterAgent = struct {
pub const Handle = opaque {
extern "c" fn Bun__TestReporterAgentReportTestFound(agent: *Handle, callFrame: *jsc.CallFrame, testId: c_int, name: *bun.String, item_type: TestType, parentId: c_int) void;
extern "c" fn Bun__TestReporterAgentReportTestFoundWithLocation(agent: *Handle, testId: c_int, name: *bun.String, item_type: TestType, parentId: c_int, sourceURL: *bun.String, line: c_int) void;
extern "c" fn Bun__TestReporterAgentReportTestStart(agent: *Handle, testId: c_int) void;
extern "c" fn Bun__TestReporterAgentReportTestEnd(agent: *Handle, testId: c_int, bunTestStatus: TestStatus, elapsed: f64) void;
@@ -324,10 +323,6 @@ pub const TestReporterAgent = struct {
Bun__TestReporterAgentReportTestFound(this, callFrame, testId, name, item_type, parentId);
}
pub fn reportTestFoundWithLocation(this: *Handle, testId: i32, name: *bun.String, item_type: TestType, parentId: i32, sourceURL: *bun.String, line: i32) void {
Bun__TestReporterAgentReportTestFoundWithLocation(this, testId, name, item_type, parentId, sourceURL, line);
}
pub fn reportTestStart(this: *Handle, testId: c_int) void {
Bun__TestReporterAgentReportTestStart(this, testId);
}
@@ -340,88 +335,8 @@ pub const TestReporterAgent = struct {
if (VirtualMachine.get().debugger) |*debugger| {
debug("enable", .{});
debugger.test_reporter_agent.handle = agent;
// Retroactively report any tests that were already discovered before the debugger connected
retroactivelyReportDiscoveredTests(agent);
}
}
/// When TestReporter.enable is called after test collection has started/finished,
/// we need to retroactively assign test IDs and report discovered tests.
fn retroactivelyReportDiscoveredTests(agent: *Handle) void {
const Jest = jsc.Jest.Jest;
const runner = Jest.runner orelse return;
const active_file = runner.bun_test_root.active_file.get() orelse return;
// Only report if we're in collection or execution phase (tests have been discovered)
switch (active_file.phase) {
.collection, .execution => {},
.done => return,
}
// Get the file path for source location info
const file_path = runner.files.get(active_file.file_id).source.path.text;
var source_url = bun.String.init(file_path);
// Track the maximum ID we assign
var max_id: i32 = 0;
// Recursively report all discovered tests starting from root scope
const root_scope = active_file.collection.root_scope;
retroactivelyReportScope(agent, root_scope, -1, &max_id, &source_url);
debug("retroactively reported {} tests", .{max_id});
}
fn retroactivelyReportScope(agent: *Handle, scope: *bun_test.DescribeScope, parent_id: i32, max_id: *i32, source_url: *bun.String) void {
for (scope.entries.items) |*entry| {
switch (entry.*) {
.describe => |describe| {
// Only report and assign ID if not already assigned
if (describe.base.test_id_for_debugger == 0) {
max_id.* += 1;
const test_id = max_id.*;
// Assign the ID so start/end events will fire during execution
describe.base.test_id_for_debugger = test_id;
var name = bun.String.init(describe.base.name orelse "(unnamed)");
agent.reportTestFoundWithLocation(
test_id,
&name,
.describe,
parent_id,
source_url,
@intCast(describe.base.line_no),
);
// Recursively report children with this describe as parent
retroactivelyReportScope(agent, describe, test_id, max_id, source_url);
} else {
// Already has ID, just recurse with existing ID as parent
retroactivelyReportScope(agent, describe, describe.base.test_id_for_debugger, max_id, source_url);
}
},
.test_callback => |test_entry| {
// Only report and assign ID if not already assigned
if (test_entry.base.test_id_for_debugger == 0) {
max_id.* += 1;
const test_id = max_id.*;
// Assign the ID so start/end events will fire during execution
test_entry.base.test_id_for_debugger = test_id;
var name = bun.String.init(test_entry.base.name orelse "(unnamed)");
agent.reportTestFoundWithLocation(
test_id,
&name,
.@"test",
parent_id,
source_url,
@intCast(test_entry.base.line_no),
);
}
},
}
}
}
const bun_test = jsc.Jest.bun_test;
pub export fn Bun__TestReporterAgentDisable(_: *Handle) void {
if (VirtualMachine.get().debugger) |*debugger| {
debug("disable", .{});

View File

@@ -65,7 +65,6 @@ pub const HardcodedModule = enum {
@"node:trace_events",
@"node:repl",
@"node:inspector",
@"node:inspector/promises",
@"node:http2",
@"node:diagnostics_channel",
@"node:dgram",
@@ -122,7 +121,6 @@ pub const HardcodedModule = enum {
.{ "node:http2", .@"node:http2" },
.{ "node:https", .@"node:https" },
.{ "node:inspector", .@"node:inspector" },
.{ "node:inspector/promises", .@"node:inspector/promises" },
.{ "node:module", .@"node:module" },
.{ "node:net", .@"node:net" },
.{ "node:readline", .@"node:readline" },
@@ -232,7 +230,6 @@ pub const HardcodedModule = enum {
nodeEntry("node:http2"),
nodeEntry("node:https"),
nodeEntry("node:inspector"),
nodeEntry("node:inspector/promises"),
nodeEntry("node:module"),
nodeEntry("node:net"),
nodeEntry("node:os"),
@@ -288,7 +285,6 @@ pub const HardcodedModule = enum {
nodeEntry("http2"),
nodeEntry("https"),
nodeEntry("inspector"),
nodeEntry("inspector/promises"),
nodeEntry("module"),
nodeEntry("net"),
nodeEntry("os"),
@@ -370,6 +366,10 @@ pub const HardcodedModule = enum {
.{ "bun:internal-for-testing", .{ .path = "bun:internal-for-testing" } },
.{ "ffi", .{ .path = "bun:ffi" } },
// inspector/promises is not implemented, it is an alias of inspector
.{ "node:inspector/promises", .{ .path = "node:inspector", .node_builtin = true } },
.{ "inspector/promises", .{ .path = "node:inspector", .node_builtin = true } },
// Thirdparty packages we override
.{ "@vercel/fetch", .{ .path = "@vercel/fetch" } },
.{ "isomorphic-fetch", .{ .path = "isomorphic-fetch" } },
@@ -394,7 +394,12 @@ pub const HardcodedModule = enum {
.{ "vitest", .{ .path = "bun:test" } },
};
const node_aliases = bun.ComptimeStringMap(Alias, common_alias_kvs);
const node_extra_alias_kvs = [_]struct { string, Alias }{
nodeEntry("node:inspector/promises"),
nodeEntry("inspector/promises"),
};
const node_aliases = bun.ComptimeStringMap(Alias, common_alias_kvs ++ node_extra_alias_kvs);
pub const bun_aliases = bun.ComptimeStringMap(Alias, common_alias_kvs ++ bun_extra_alias_kvs);
const bun_test_aliases = bun.ComptimeStringMap(Alias, common_alias_kvs ++ bun_extra_alias_kvs ++ bun_test_extra_alias_kvs);

View File

@@ -30,20 +30,45 @@ pub fn resetArena(this: *ModuleLoader, jsc_vm: *VirtualMachine) void {
}
}
pub fn resolveEmbeddedFile(vm: *VirtualMachine, path_buf: *bun.PathBuffer, input_path: []const u8, extname: []const u8) ?[]const u8 {
fn resolveEmbeddedNodeFileViaMemfd(file: *bun.StandaloneModuleGraph.File, path_buffer: *bun.PathBuffer, fd: *i32) ![]const u8 {
var label_buf: [128]u8 = undefined;
const count = struct {
pub var counter = std.atomic.Value(u32).init(0);
pub fn get() u32 {
return counter.fetchAdd(1, .seq_cst);
}
}.get();
const label = std.fmt.bufPrintZ(&label_buf, "node-addon-{d}", .{count}) catch "";
const memfd = try bun.sys.memfd_create(label, .executable).unwrap();
errdefer memfd.close();
fd.* = @intCast(memfd.cast());
errdefer fd.* = -1;
try bun.sys.ftruncate(memfd, @intCast(file.contents.len)).unwrap();
try bun.sys.File.writeAll(.{ .handle = memfd }, file.contents).unwrap();
return try std.fmt.bufPrint(path_buffer, "/proc/self/fd/{d}", .{memfd.cast()});
}
pub fn resolveEmbeddedFile(vm: *VirtualMachine, path_buf: *bun.PathBuffer, linux_memfd: *i32, input_path: []const u8, extname: []const u8) ?[]const u8 {
if (input_path.len == 0) return null;
var graph = vm.standalone_module_graph orelse return null;
const file = graph.find(input_path) orelse return null;
if (comptime Environment.isLinux) {
// TODO: use /proc/fd/12346 instead! Avoid the copy!
// Best-effort: use memfd to avoid hitting the disk
if (resolveEmbeddedNodeFileViaMemfd(file, path_buf, linux_memfd)) |path| {
return path;
} else |_| {
// fall back to temp file
}
}
// atomically write to a tmpfile and then move it to the final destination
const tmpname_buf = bun.path_buffer_pool.get();
defer bun.path_buffer_pool.put(tmpname_buf);
const tmpfilename = bun.fs.FileSystem.tmpname(extname, tmpname_buf, bun.hash(file.name)) catch return null;
const tmpdir: bun.FD = .fromStdDir(bun.fs.FileSystem.instance.tmpdir() catch return null);
// First we open the tmpfile, to avoid any other work in the event of failure.
@@ -100,7 +125,7 @@ pub fn transpileSourceCode(
const disable_transpilying = comptime flags.disableTranspiling();
if (comptime disable_transpilying) {
if (!(loader.isJavaScriptLike() or loader == .toml or loader == .yaml or loader == .json5 or loader == .text or loader == .json or loader == .jsonc)) {
if (!(loader.isJavaScriptLike() or loader == .toml or loader == .yaml or loader == .text or loader == .json or loader == .jsonc)) {
// Don't print "export default <file path>"
return ResolvedSource{
.allocator = null,
@@ -112,7 +137,7 @@ pub fn transpileSourceCode(
}
switch (loader) {
.js, .jsx, .ts, .tsx, .json, .jsonc, .toml, .yaml, .json5, .text => {
.js, .jsx, .ts, .tsx, .json, .jsonc, .toml, .yaml, .text => {
// Ensure that if there was an ASTMemoryAllocator in use, it's not used anymore.
var ast_scope = js_ast.ASTMemoryAllocator.Scope{};
ast_scope.enter();
@@ -361,7 +386,7 @@ pub fn transpileSourceCode(
};
}
if (loader == .json or loader == .jsonc or loader == .toml or loader == .yaml or loader == .json5) {
if (loader == .json or loader == .jsonc or loader == .toml or loader == .yaml) {
if (parse_result.empty) {
return ResolvedSource{
.allocator = null,
@@ -1301,14 +1326,14 @@ pub const FetchFlags = enum {
};
/// Support embedded .node files
export fn Bun__resolveEmbeddedNodeFile(vm: *VirtualMachine, in_out_str: *bun.String) bool {
export fn Bun__resolveEmbeddedNodeFile(vm: *VirtualMachine, in_out_str: *bun.String, linux_memfd_fd_to_close: *i32) bool {
if (vm.standalone_module_graph == null) return false;
const input_path = in_out_str.toUTF8(bun.default_allocator);
defer input_path.deinit();
const path_buf = bun.path_buffer_pool.get();
defer bun.path_buffer_pool.put(path_buf);
const result = ModuleLoader.resolveEmbeddedFile(vm, path_buf, input_path.slice(), "node") orelse return false;
const path_buffer = bun.path_buffer_pool.get();
defer bun.path_buffer_pool.put(path_buffer);
const result = ModuleLoader.resolveEmbeddedFile(vm, path_buffer, linux_memfd_fd_to_close, input_path.slice(), "node") orelse return false;
in_out_str.* = bun.String.cloneUTF8(result);
return true;
}

Some files were not shown because too many files have changed in this diff Show More