Compare commits

..

68 Commits

Author SHA1 Message Date
Ciro Spaciari MacBook
a706902996 Merge remote-tracking branch 'origin/main' into ciro/fetch-upgrade-node-http 2026-01-08 15:50:37 -08:00
Ciro Spaciari MacBook
2836cd49aa refactor(http): move ClientRequest closures to module scope
Refactor the ClientRequest constructor to eliminate anonymous function
closures that capture the entire constructor scope. This improves memory
characteristics by allowing earlier garbage collection.

Changes:
- Add symbols to internal/http.ts for mutable state: kWriteCount,
  kResolveNextChunk, kFetching, kOnEnd, kHandleResponse
- Move emit helpers to module scope: maybeEmitSocketClientRequest,
  maybeEmitPrefinishClientRequest, maybeEmitFinishClientRequest,
  maybeEmitCloseClientRequest
- Move event handlers to module scope: socketCloseListenerClientRequest,
  onAbortClientRequest
- Move write functions to module scope: pushChunkClientRequest,
  writeInternalClientRequest, writeClientRequest, endClientRequest
- Move utility functions to module scope: flushHeadersClientRequest,
  destroyClientRequest, abortClientRequest, ensureTlsClientRequest,
  signalAbortHandler, clearTimeoutClientRequest
- Move send/fetch functions to module scope: sendClientRequest,
  bodyIteratorClientRequest, goClientRequest, startFetchClientRequest,
  iterateCandidatesClientRequest, getURLClientRequest,
  failDNSLookupClientRequest

The streaming body generator still uses a minimal closure that only
captures 'self', with all mutable state stored in symbol properties.
2026-01-08 15:40:54 -08:00
Ciro Spaciari
9ffb9763f5 Merge branch 'main' into ciro/fetch-upgrade-node-http 2025-12-01 11:49:48 -08:00
Ciro Spaciari
f02ebff5d8 Merge branch 'main' into ciro/fetch-upgrade-node-http 2025-11-19 11:35:39 -08:00
Ciro Spaciari
f358f77512 revert so fetch is keeped alive 2025-11-14 15:44:10 -08:00
Ciro Spaciari
28819a7e49 bind only 1 2025-11-14 15:34:08 -08:00
Ciro Spaciari
2c719f7d09 address review 2025-11-14 15:14:12 -08:00
Ciro Spaciari
fbee1c8e74 Merge branch 'main' into ciro/fetch-upgrade-node-http 2025-11-14 14:57:58 -08:00
Ciro Spaciari
8fc82cf22a address review 2025-11-14 14:57:29 -08:00
Ciro Spaciari
06922bcfae Merge branch 'main' into ciro/fetch-upgrade-node-http 2025-11-14 13:07:38 -08:00
Ciro Spaciari
4f0866d7b8 Merge branch 'main' into ciro/fetch-upgrade-node-http 2025-11-13 15:12:28 -08:00
Ciro Spaciari
e475055148 fix duplicate declare 2025-11-13 14:47:36 -08:00
autofix-ci[bot]
58330503f2 [autofix.ci] apply automated fixes 2025-11-13 22:44:53 +00:00
Ciro Spaciari
a2bfb02826 Merge branch 'main' into ciro/fetch-upgrade-node-http 2025-11-13 14:43:13 -08:00
Ciro Spaciari
f052b2d944 Merge branch 'main' into ciro/fetch-upgrade-node-http 2025-09-05 11:54:56 -07:00
Ciro Spaciari
6333dd44cb fixes #20547 2025-09-04 17:21:07 -07:00
Ciro Spaciari
5de400ee0c Merge branch 'ciro/fetch-upgrade' into ciro/fetch-upgrade-node-http 2025-09-04 17:16:16 -07:00
Ciro Spaciari
16b632e249 missed that 2025-09-04 17:15:40 -07:00
Ciro Spaciari
cedbe28d01 avoid chunked here 2025-09-04 17:15:40 -07:00
Ciro Spaciari
7f36e5c0ac ok 2025-09-04 17:15:40 -07:00
Ciro Spaciari
aaae722c0d we need to close at some point 2025-09-04 17:15:40 -07:00
Ciro Spaciari
b7ce235e93 dont break stuff 2025-09-04 17:15:40 -07:00
Ciro Spaciari
e10c995139 opsie 2025-09-04 17:15:40 -07:00
Ciro Spaciari
b05964def4 more generic 2025-09-04 17:15:40 -07:00
autofix-ci[bot]
4b61f99adf [autofix.ci] apply automated fixes 2025-09-04 17:15:40 -07:00
Ciro Spaciari
6ce1e14e5c ok 2025-09-04 17:15:40 -07:00
Ciro Spaciari
27ec84905d Merge branch 'ciro/fetch-upgrade' into ciro/fetch-upgrade-node-http 2025-09-04 17:10:49 -07:00
Ciro Spaciari
b0dac57298 missed that 2025-09-04 17:10:40 -07:00
Ciro Spaciari
c33fc879dc more 2025-09-04 17:10:18 -07:00
Ciro Spaciari
36a2fb6093 more 2025-09-04 17:10:04 -07:00
Ciro Spaciari
2560c08922 Merge branch 'ciro/fetch-upgrade' into ciro/fetch-upgrade-node-http 2025-09-04 16:37:10 -07:00
Ciro Spaciari
4f0d126d75 avoid chunked here 2025-09-04 16:36:54 -07:00
Ciro Spaciari
3a5c3ac657 ok 2025-09-04 15:53:31 -07:00
Ciro Spaciari
e782f9ebbd Merge branch 'ciro/fetch-upgrade' into ciro/fetch-upgrade-node-http 2025-09-04 15:45:14 -07:00
Ciro Spaciari
3a10be5191 ok 2025-09-04 15:45:06 -07:00
autofix-ci[bot]
b49e20c193 [autofix.ci] apply automated fixes 2025-09-04 22:40:13 +00:00
Ciro Spaciari
19491b9db3 minimal test 2025-09-04 15:38:49 -07:00
Ciro Spaciari
79bbada1ed linter 2025-09-04 15:27:35 -07:00
Ciro Spaciari
ccf021fab9 Merge branch 'ciro/fetch-upgrade' into ciro/fetch-upgrade-node-http 2025-09-04 15:22:02 -07:00
Ciro Spaciari
8df4827833 we need to close at some point 2025-09-04 15:21:53 -07:00
Ciro Spaciari
0609fa5122 dont break stuff 2025-09-04 15:21:53 -07:00
Ciro Spaciari
ed21db9414 opsie 2025-09-04 15:21:53 -07:00
Ciro Spaciari
8727416808 more generic 2025-09-04 15:21:53 -07:00
autofix-ci[bot]
c7f6623878 [autofix.ci] apply automated fixes 2025-09-04 15:21:53 -07:00
Ciro Spaciari
1503715c0e ok 2025-09-04 15:21:53 -07:00
Ciro Spaciari
4cbe315002 Merge branch 'ciro/fetch-upgrade' into ciro/fetch-upgrade-node-http 2025-09-04 15:21:38 -07:00
Ciro Spaciari
fbdde3a89c we need to close at some point 2025-09-04 15:21:11 -07:00
autofix-ci[bot]
ed037cece0 [autofix.ci] apply automated fixes 2025-09-04 22:14:29 +00:00
Ciro Spaciari
e2767b970d dont break stuff 2025-09-04 15:07:34 -07:00
Ciro Spaciari
68c5a293eb opsie 2025-09-04 15:07:34 -07:00
Ciro Spaciari
6b5e41a0b3 more generic 2025-09-04 15:07:34 -07:00
autofix-ci[bot]
50bc82a97e [autofix.ci] apply automated fixes 2025-09-04 15:07:34 -07:00
Ciro Spaciari
345666b194 ok 2025-09-04 15:07:34 -07:00
Ciro Spaciari
01785cf3b7 experiment 2025-09-04 15:06:20 -07:00
autofix-ci[bot]
56f1d991cc [autofix.ci] apply automated fixes (attempt 3/3) 2025-09-04 20:05:33 +00:00
autofix-ci[bot]
0853c555b2 [autofix.ci] apply automated fixes (attempt 2/3) 2025-09-04 20:03:03 +00:00
autofix-ci[bot]
ae42b8f045 [autofix.ci] apply automated fixes 2025-09-04 20:01:32 +00:00
Ciro Spaciari
e5abcf45ef dont break stuff 2025-09-04 12:59:30 -07:00
autofix-ci[bot]
0e8f4ba114 [autofix.ci] apply automated fixes (attempt 3/3) 2025-09-04 19:31:15 +00:00
autofix-ci[bot]
efda53ea6f [autofix.ci] apply automated fixes (attempt 2/3) 2025-09-04 19:29:16 +00:00
autofix-ci[bot]
d561b17b46 [autofix.ci] apply automated fixes 2025-09-04 19:27:41 +00:00
Ciro Spaciari
1dc1d4795e opsie 2025-09-04 12:25:02 -07:00
autofix-ci[bot]
1cef7f29e9 [autofix.ci] apply automated fixes (attempt 3/3) 2025-09-04 17:44:24 +00:00
autofix-ci[bot]
c38d674cbf [autofix.ci] apply automated fixes (attempt 2/3) 2025-09-04 17:42:40 +00:00
autofix-ci[bot]
e75b9aa01b [autofix.ci] apply automated fixes 2025-09-04 17:40:37 +00:00
Ciro Spaciari
8ad5f0bb57 more generic 2025-09-04 10:37:51 -07:00
autofix-ci[bot]
f51aed3f40 [autofix.ci] apply automated fixes 2025-09-04 10:37:51 -07:00
Ciro Spaciari
b1990a484f ok 2025-09-04 10:37:51 -07:00
510 changed files with 6717 additions and 145054 deletions

View File

@@ -114,8 +114,6 @@ const buildPlatforms = [
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.22" },
{ os: "windows", arch: "x64", release: "2019" },
{ os: "windows", arch: "x64", baseline: true, release: "2019" },
// TODO: Enable when Windows ARM64 CI runners are ready
// { os: "windows", arch: "aarch64", release: "2019" },
];
/**
@@ -138,8 +136,6 @@ const testPlatforms = [
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.22", tier: "latest" },
{ os: "windows", arch: "x64", release: "2019", tier: "oldest" },
{ os: "windows", arch: "x64", release: "2019", baseline: true, tier: "oldest" },
// TODO: Enable when Windows ARM64 CI runners are ready
// { os: "windows", arch: "aarch64", release: "2019", tier: "oldest" },
];
/**

View File

@@ -36,20 +36,16 @@ function Log-Debug {
}
}
# Detect system architecture
$script:IsARM64 = [System.Runtime.InteropServices.RuntimeInformation]::OSArchitecture -eq [System.Runtime.InteropServices.Architecture]::Arm64
$script:VsArch = if ($script:IsARM64) { "arm64" } else { "amd64" }
# Load Visual Studio environment if not already loaded
function Ensure-VSEnvironment {
if ($null -eq $env:VSINSTALLDIR) {
Log-Info "Loading Visual Studio environment for $script:VsArch..."
Log-Info "Loading Visual Studio environment..."
$vswhere = "C:\Program Files (x86)\Microsoft Visual Studio\Installer\vswhere.exe"
if (!(Test-Path $vswhere)) {
throw "Command not found: vswhere (did you install Visual Studio?)"
}
$vsDir = & $vswhere -prerelease -latest -property installationPath
if ($null -eq $vsDir) {
$vsDir = Get-ChildItem -Path "C:\Program Files\Microsoft Visual Studio\2022" -Directory -ErrorAction SilentlyContinue
@@ -58,20 +54,20 @@ function Ensure-VSEnvironment {
}
$vsDir = $vsDir.FullName
}
Push-Location $vsDir
try {
$vsShell = Join-Path -Path $vsDir -ChildPath "Common7\Tools\Launch-VsDevShell.ps1"
. $vsShell -Arch $script:VsArch -HostArch $script:VsArch
. $vsShell -Arch amd64 -HostArch amd64
} finally {
Pop-Location
}
Log-Success "Visual Studio environment loaded"
}
if ($env:VSCMD_ARG_TGT_ARCH -eq "x86") {
throw "Visual Studio environment is targeting 32 bit x86, but only 64-bit architectures (x64/arm64) are supported."
throw "Visual Studio environment is targeting 32 bit, but only 64 bit is supported."
}
}
@@ -190,10 +186,8 @@ function Install-KeyLocker {
}
# Download MSI installer
# Note: KeyLocker tools currently only available for x64, but works on ARM64 via emulation
$msiArch = "x64"
$msiUrl = "https://bun-ci-assets.bun.sh/Keylockertools-windows-${msiArch}.msi"
$msiPath = Join-Path $env:TEMP "Keylockertools-windows-${msiArch}.msi"
$msiUrl = "https://bun-ci-assets.bun.sh/Keylockertools-windows-x64.msi"
$msiPath = Join-Path $env:TEMP "Keylockertools-windows-x64.msi"
Log-Info "Downloading MSI from: $msiUrl"
Log-Info "Downloading to: $msiPath"

View File

@@ -219,9 +219,6 @@ function create_release() {
bun-windows-x64-profile.zip
bun-windows-x64-baseline.zip
bun-windows-x64-baseline-profile.zip
# TODO: Enable when Windows ARM64 CI runners are ready
# bun-windows-aarch64.zip
# bun-windows-aarch64-profile.zip
)
function upload_artifact() {

View File

@@ -6,7 +6,8 @@ To do that:
- git fetch upstream
- git merge upstream main
- Fix the merge conflicts
- bun build.ts debug
- cd ../../ (back to bun)
- make jsc-build (this will take about 7 minutes)
- While it compiles, in another task review the JSC commits between the last version of Webkit and the new version. Write up a summary of the webkit changes in a file called "webkit-changes.md"
- bun run build:local (build a build of Bun with the new Webkit, make sure it compiles)
- After making sure it compiles, run some code to make sure things work. something like ./build/debug-local/bun-debug --print '42' should be all you need
@@ -20,7 +21,3 @@ To do that:
- commit + push (without adding the webkit-changes.md file)
- create PR titled "Upgrade Webkit to the <commit-sha>", paste your webkit-changes.md into the PR description
- delete the webkit-changes.md file
Things to check for a successful upgrade:
- Did JSType in vendor/WebKit/Source/JavaScriptCore have any recent changes? Does the enum values align with whats present in src/bun.js/bindings/JSType.zig?
- Were there any changes to the webcore code generator? If there are C++ compilation errors, check for differences in some of the generated code in like vendor/WebKit/source/WebCore/bindings/scripts/test/JS/

View File

@@ -88,7 +88,7 @@ jobs:
commit-message: "deps: update c-ares to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update c-ares to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-cares
branch: deps/update-cares-${{ github.run_number }}
body: |
## What does this PR do?

View File

@@ -91,7 +91,7 @@ jobs:
commit-message: "deps: update hdrhistogram to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update hdrhistogram to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-hdrhistogram
branch: deps/update-hdrhistogram-${{ github.run_number }}
body: |
## What does this PR do?

View File

@@ -107,7 +107,7 @@ jobs:
commit-message: "deps: update highway to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update highway to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-highway
branch: deps/update-highway-${{ github.run_number }}
body: |
## What does this PR do?

View File

@@ -88,7 +88,7 @@ jobs:
commit-message: "deps: update libarchive to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update libarchive to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-libarchive
branch: deps/update-libarchive-${{ github.run_number }}
body: |
## What does this PR do?

View File

@@ -88,7 +88,7 @@ jobs:
commit-message: "deps: update libdeflate to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update libdeflate to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-libdeflate
branch: deps/update-libdeflate-${{ github.run_number }}
body: |
## What does this PR do?

View File

@@ -100,7 +100,7 @@ jobs:
commit-message: "deps: update lolhtml to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update lolhtml to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-lolhtml
branch: deps/update-lolhtml-${{ github.run_number }}
body: |
## What does this PR do?

View File

@@ -105,7 +105,7 @@ jobs:
commit-message: "deps: update lshpack to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update lshpack to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-lshpack
branch: deps/update-lshpack-${{ github.run_number }}
body: |
## What does this PR do?

View File

@@ -74,7 +74,7 @@ jobs:
```
${{ env.changed_files }}
```
branch: certs/update-root-certs
branch: certs/update-root-certs-${{ github.run_number }}
base: main
delete-branch: true
labels:

View File

@@ -83,7 +83,7 @@ jobs:
commit-message: "deps: update sqlite to ${{ steps.check-version.outputs.latest }}"
title: "deps: update sqlite to ${{ steps.check-version.outputs.latest }}"
delete-branch: true
branch: deps/update-sqlite
branch: deps/update-sqlite-${{ steps.check-version.outputs.latest }}
body: |
## What does this PR do?

View File

@@ -68,7 +68,7 @@ jobs:
commit-message: "deps: update ${{ matrix.package }} to ${{ steps.check-version.outputs.latest }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update ${{ matrix.package }} to ${{ steps.check-version.outputs.latest }}"
delete-branch: true
branch: deps/update-${{ matrix.package }}
branch: deps/update-${{ matrix.package }}-${{ github.run_number }}
body: |
## What does this PR do?

View File

@@ -88,7 +88,7 @@ jobs:
commit-message: "deps: update zstd to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update zstd to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-zstd
branch: deps/update-zstd-${{ github.run_number }}
body: |
## What does this PR do?

1
.gitignore vendored
View File

@@ -1,5 +1,4 @@
.claude/settings.local.json
.direnv
.DS_Store
.env
.envrc

View File

@@ -10,8 +10,6 @@ This is the Bun repository - an all-in-one JavaScript runtime & toolkit designed
- **Run tests with your debug build**: `bun bd test <test-file>`
- **CRITICAL**: Never use `bun test` directly - it won't include your changes
- **Run any command with debug build**: `bun bd <command>`
- **Run with JavaScript exception scope verification**: `BUN_JSC_validateExceptionChecks=1
BUN_JSC_dumpSimulatedThrows=1 bun bd <command>`
Tip: Bun is already installed and in $PATH. The `bd` subcommand is a package.json script.
@@ -211,24 +209,3 @@ Built-in JavaScript modules use special syntax and are organized as:
12. **Branch names must start with `claude/`** - This is a requirement for the CI to work.
**ONLY** push up changes after running `bun bd test <file>` and ensuring your tests pass.
## Debugging CI Failures
Use `scripts/buildkite-failures.ts` to fetch and analyze CI build failures:
```bash
# View failures for current branch
bun run scripts/buildkite-failures.ts
# View failures for a specific build number
bun run scripts/buildkite-failures.ts 35051
# View failures for a GitHub PR
bun run scripts/buildkite-failures.ts #26173
bun run scripts/buildkite-failures.ts https://github.com/oven-sh/bun/pull/26173
# Wait for build to complete (polls every 10s until pass/fail)
bun run scripts/buildkite-failures.ts --wait
```
The script fetches logs from BuildKite's public API and saves complete logs to `/tmp/bun-build-{number}-{platform}-{step}.log`. It displays a summary of errors and the file path for each failed job. Use `--wait` to poll continuously until the build completes or fails.

2
LATEST
View File

@@ -1 +1 @@
1.3.6
1.3.5

View File

@@ -36,7 +36,6 @@ Bun statically links these libraries:
| [`libbase64`](https://github.com/aklomp/base64/blob/master/LICENSE) | BSD 2-Clause |
| [`libuv`](https://github.com/libuv/libuv) (on Windows) | MIT |
| [`libdeflate`](https://github.com/ebiggers/libdeflate) | MIT |
| [`uucode`](https://github.com/jacobsandlund/uucode) | MIT |
| A fork of [`uWebsockets`](https://github.com/jarred-sumner/uwebsockets) | Apache 2.0 licensed |
| Parts of [Tigerbeetle's IO code](https://github.com/tigerbeetle/tigerbeetle/blob/532c8b70b9142c17e07737ab6d3da68d7500cbca/src/io/windows.zig#L1) | Apache 2.0 licensed |

View File

@@ -23,9 +23,7 @@
"react-dom": "^18.3.1",
"string-width": "7.1.0",
"strip-ansi": "^7.1.0",
"tar": "^7.4.3",
"tinycolor2": "^1.6.0",
"wrap-ansi": "^9.0.0",
"zx": "^7.2.3",
},
"devDependencies": {
@@ -110,8 +108,6 @@
"@fastify/proxy-addr": ["@fastify/proxy-addr@5.0.0", "", { "dependencies": { "@fastify/forwarded": "^3.0.0", "ipaddr.js": "^2.1.0" } }, "sha512-37qVVA1qZ5sgH7KpHkkC4z9SK6StIsIcOmpjvMPXNb3vx2GQxhZocogVYbr2PbbeLCQxYIPDok307xEvRZOzGA=="],
"@isaacs/fs-minipass": ["@isaacs/fs-minipass@4.0.1", "", { "dependencies": { "minipass": "^7.0.4" } }, "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w=="],
"@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.1.1", "", { "dependencies": { "@jridgewell/set-array": "^1.0.0", "@jridgewell/sourcemap-codec": "^1.4.10" } }, "sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w=="],
"@jridgewell/resolve-uri": ["@jridgewell/resolve-uri@3.1.0", "", {}, "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w=="],
@@ -170,7 +166,7 @@
"ansi-regex": ["ansi-regex@6.0.1", "", {}, "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA=="],
"ansi-styles": ["ansi-styles@6.2.3", "https://artifactory.infra.ant.dev:443/artifactory/api/npm/npm-all/ansi-styles/-/ansi-styles-6.2.3.tgz", {}, "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg=="],
"ansi-styles": ["ansi-styles@3.2.1", "", { "dependencies": { "color-convert": "^1.9.0" } }, "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA=="],
"atomic-sleep": ["atomic-sleep@1.0.0", "", {}, "sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ=="],
@@ -186,8 +182,6 @@
"chalk": ["chalk@5.3.0", "", {}, "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w=="],
"chownr": ["chownr@3.0.0", "", {}, "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g=="],
"color": ["color@4.2.3", "", { "dependencies": { "color-convert": "^2.0.1", "color-string": "^1.9.0" } }, "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A=="],
"color-convert": ["color-convert@2.0.1", "", { "dependencies": { "color-name": "~1.1.4" } }, "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ=="],
@@ -368,10 +362,6 @@
"minimist": ["minimist@1.2.8", "", {}, "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA=="],
"minipass": ["minipass@7.1.2", "", {}, "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw=="],
"minizlib": ["minizlib@3.1.0", "", { "dependencies": { "minipass": "^7.1.2" } }, "sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw=="],
"mitata": ["mitata@1.0.25", "", {}, "sha512-0v5qZtVW5vwj9FDvYfraR31BMDcRLkhSFWPTLaxx/Z3/EvScfVtAAWtMI2ArIbBcwh7P86dXh0lQWKiXQPlwYA=="],
"ms": ["ms@2.1.2", "", {}, "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="],
@@ -468,8 +458,6 @@
"supports-color": ["supports-color@5.5.0", "", { "dependencies": { "has-flag": "^3.0.0" } }, "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow=="],
"tar": ["tar@7.5.2", "", { "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", "minizlib": "^3.1.0", "yallist": "^5.0.0" } }, "sha512-7NyxrTE4Anh8km8iEy7o0QYPs+0JKBTj5ZaqHg6B39erLg0qYXN3BijtShwbsNSvQ+LN75+KV+C4QR/f6Gwnpg=="],
"thread-stream": ["thread-stream@3.1.0", "", { "dependencies": { "real-require": "^0.2.0" } }, "sha512-OqyPZ9u96VohAyMfJykzmivOrY2wfMSf3C5TtFJVgN+Hm6aj+voFhlK+kZEIv2FBh1X6Xp3DlnCOfEQ3B2J86A=="],
"through": ["through@2.3.8", "", {}, "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg=="],
@@ -494,9 +482,7 @@
"which": ["which@3.0.1", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "bin/which.js" } }, "sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg=="],
"wrap-ansi": ["wrap-ansi@9.0.2", "https://artifactory.infra.ant.dev:443/artifactory/api/npm/npm-all/wrap-ansi/-/wrap-ansi-9.0.2.tgz", { "dependencies": { "ansi-styles": "^6.2.1", "string-width": "^7.0.0", "strip-ansi": "^7.1.0" } }, "sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww=="],
"yallist": ["yallist@5.0.0", "", {}, "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw=="],
"yallist": ["yallist@3.1.1", "", {}, "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g=="],
"yaml": ["yaml@2.3.4", "", {}, "sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA=="],
@@ -506,6 +492,8 @@
"@babel/highlight/chalk": ["chalk@2.4.2", "", { "dependencies": { "ansi-styles": "^3.2.1", "escape-string-regexp": "^1.0.5", "supports-color": "^5.3.0" } }, "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ=="],
"ansi-styles/color-convert": ["color-convert@1.9.3", "", { "dependencies": { "color-name": "1.1.3" } }, "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg=="],
"avvio/fastq": ["fastq@1.19.1", "", { "dependencies": { "reusify": "^1.0.4" } }, "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ=="],
"cross-spawn/which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="],
@@ -514,14 +502,8 @@
"light-my-request/process-warning": ["process-warning@4.0.1", "", {}, "sha512-3c2LzQ3rY9d0hc1emcsHhfT9Jwz0cChib/QN89oME2R451w5fy3f0afAhERFZAwrbDU43wk12d0ORBpDVME50Q=="],
"lru-cache/yallist": ["yallist@3.1.1", "", {}, "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g=="],
"npm-run-path/path-key": ["path-key@4.0.0", "", {}, "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ=="],
"@babel/highlight/chalk/ansi-styles": ["ansi-styles@3.2.1", "", { "dependencies": { "color-convert": "^1.9.0" } }, "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA=="],
"@babel/highlight/chalk/ansi-styles/color-convert": ["color-convert@1.9.3", "", { "dependencies": { "color-name": "1.1.3" } }, "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg=="],
"@babel/highlight/chalk/ansi-styles/color-convert/color-name": ["color-name@1.1.3", "", {}, "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw=="],
"ansi-styles/color-convert/color-name": ["color-name@1.1.3", "", {}, "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw=="],
}
}

View File

@@ -18,9 +18,7 @@
"react": "^18.3.1",
"react-dom": "^18.3.1",
"string-width": "7.1.0",
"wrap-ansi": "^9.0.0",
"strip-ansi": "^7.1.0",
"tar": "^7.4.3",
"tinycolor2": "^1.6.0",
"zx": "^7.2.3"
},

View File

@@ -14,4 +14,3 @@ export function run(opts = {}) {
export const bench = Mitata.bench;
export const group = Mitata.group;
export const summary = Mitata.summary;

View File

@@ -1,477 +0,0 @@
import { mkdirSync, mkdtempSync, rmSync, writeFileSync } from "node:fs";
import { tmpdir } from "node:os";
import { join } from "node:path";
import { Pack, Unpack } from "tar";
import { bench, group, run } from "../runner.mjs";
// Check if Bun.Archive is available
const hasBunArchive = typeof Bun !== "undefined" && typeof Bun.Archive !== "undefined";
// Test data sizes
const smallContent = "Hello, World!";
const mediumContent = Buffer.alloc(10 * 1024, "x").toString(); // 10KB
const largeContent = Buffer.alloc(100 * 1024, "x").toString(); // 100KB
// Create test files for node-tar (it reads from filesystem)
const setupDir = mkdtempSync(join(tmpdir(), "archive-bench-setup-"));
function setupNodeTarFiles(prefix, files) {
const dir = join(setupDir, prefix);
mkdirSync(dir, { recursive: true });
for (const [name, content] of Object.entries(files)) {
const filePath = join(dir, name);
const fileDir = join(filePath, "..");
mkdirSync(fileDir, { recursive: true });
writeFileSync(filePath, content);
}
return dir;
}
// Setup directories for different test cases
const smallFilesDir = setupNodeTarFiles("small", {
"file1.txt": smallContent,
"file2.txt": smallContent,
"file3.txt": smallContent,
});
const mediumFilesDir = setupNodeTarFiles("medium", {
"file1.txt": mediumContent,
"file2.txt": mediumContent,
"file3.txt": mediumContent,
});
const largeFilesDir = setupNodeTarFiles("large", {
"file1.txt": largeContent,
"file2.txt": largeContent,
"file3.txt": largeContent,
});
const manyFilesEntries = {};
for (let i = 0; i < 100; i++) {
manyFilesEntries[`file${i}.txt`] = smallContent;
}
const manyFilesDir = setupNodeTarFiles("many", manyFilesEntries);
// Pre-create archives for extraction benchmarks
let smallTarGzBuffer, mediumTarGzBuffer, largeTarGzBuffer, manyFilesTarGzBuffer;
let smallTarBuffer, mediumTarBuffer, largeTarBuffer, manyFilesTarBuffer;
let smallBunArchiveGz, mediumBunArchiveGz, largeBunArchiveGz, manyFilesBunArchiveGz;
let smallBunArchive, mediumBunArchive, largeBunArchive, manyFilesBunArchive;
// Create tar buffer using node-tar (with optional gzip)
async function createNodeTarBuffer(cwd, files, gzip = false) {
return new Promise(resolve => {
const pack = new Pack({ cwd, gzip });
const bufs = [];
pack.on("data", chunk => bufs.push(chunk));
pack.on("end", () => resolve(Buffer.concat(bufs)));
for (const file of files) {
pack.add(file);
}
pack.end();
});
}
// Extract tar buffer using node-tar
async function extractNodeTarBuffer(buffer, cwd) {
return new Promise((resolve, reject) => {
const unpack = new Unpack({ cwd });
unpack.on("end", resolve);
unpack.on("error", reject);
unpack.end(buffer);
});
}
// Initialize gzipped archives
smallTarGzBuffer = await createNodeTarBuffer(smallFilesDir, ["file1.txt", "file2.txt", "file3.txt"], true);
mediumTarGzBuffer = await createNodeTarBuffer(mediumFilesDir, ["file1.txt", "file2.txt", "file3.txt"], true);
largeTarGzBuffer = await createNodeTarBuffer(largeFilesDir, ["file1.txt", "file2.txt", "file3.txt"], true);
manyFilesTarGzBuffer = await createNodeTarBuffer(manyFilesDir, Object.keys(manyFilesEntries), true);
// Initialize uncompressed archives
smallTarBuffer = await createNodeTarBuffer(smallFilesDir, ["file1.txt", "file2.txt", "file3.txt"], false);
mediumTarBuffer = await createNodeTarBuffer(mediumFilesDir, ["file1.txt", "file2.txt", "file3.txt"], false);
largeTarBuffer = await createNodeTarBuffer(largeFilesDir, ["file1.txt", "file2.txt", "file3.txt"], false);
manyFilesTarBuffer = await createNodeTarBuffer(manyFilesDir, Object.keys(manyFilesEntries), false);
const smallFiles = { "file1.txt": smallContent, "file2.txt": smallContent, "file3.txt": smallContent };
const mediumFiles = { "file1.txt": mediumContent, "file2.txt": mediumContent, "file3.txt": mediumContent };
const largeFiles = { "file1.txt": largeContent, "file2.txt": largeContent, "file3.txt": largeContent };
if (hasBunArchive) {
smallBunArchiveGz = await Bun.Archive.from(smallFiles).bytes("gzip");
mediumBunArchiveGz = await Bun.Archive.from(mediumFiles).bytes("gzip");
largeBunArchiveGz = await Bun.Archive.from(largeFiles).bytes("gzip");
manyFilesBunArchiveGz = await Bun.Archive.from(manyFilesEntries).bytes("gzip");
smallBunArchive = await Bun.Archive.from(smallFiles).bytes();
mediumBunArchive = await Bun.Archive.from(mediumFiles).bytes();
largeBunArchive = await Bun.Archive.from(largeFiles).bytes();
manyFilesBunArchive = await Bun.Archive.from(manyFilesEntries).bytes();
}
// Create reusable extraction directories (overwriting is fine)
const extractDirNodeTar = mkdtempSync(join(tmpdir(), "archive-bench-extract-node-"));
const extractDirBun = mkdtempSync(join(tmpdir(), "archive-bench-extract-bun-"));
const writeDirNodeTar = mkdtempSync(join(tmpdir(), "archive-bench-write-node-"));
const writeDirBun = mkdtempSync(join(tmpdir(), "archive-bench-write-bun-"));
// ============================================================================
// Create .tar (uncompressed) benchmarks
// ============================================================================
group("create .tar (3 small files)", () => {
bench("node-tar", async () => {
await createNodeTarBuffer(smallFilesDir, ["file1.txt", "file2.txt", "file3.txt"], false);
});
if (hasBunArchive) {
bench("Bun.Archive", async () => {
await Bun.Archive.from(smallFiles).bytes();
});
}
});
group("create .tar (3 x 100KB files)", () => {
bench("node-tar", async () => {
await createNodeTarBuffer(largeFilesDir, ["file1.txt", "file2.txt", "file3.txt"], false);
});
if (hasBunArchive) {
bench("Bun.Archive", async () => {
await Bun.Archive.from(largeFiles).bytes();
});
}
});
group("create .tar (100 small files)", () => {
bench("node-tar", async () => {
await createNodeTarBuffer(manyFilesDir, Object.keys(manyFilesEntries), false);
});
if (hasBunArchive) {
bench("Bun.Archive", async () => {
await Bun.Archive.from(manyFilesEntries).bytes();
});
}
});
// ============================================================================
// Create .tar.gz (compressed) benchmarks
// ============================================================================
group("create .tar.gz (3 small files)", () => {
bench("node-tar", async () => {
await createNodeTarBuffer(smallFilesDir, ["file1.txt", "file2.txt", "file3.txt"], true);
});
if (hasBunArchive) {
bench("Bun.Archive", async () => {
await Bun.Archive.from(smallFiles).bytes("gzip");
});
}
});
group("create .tar.gz (3 x 100KB files)", () => {
bench("node-tar", async () => {
await createNodeTarBuffer(largeFilesDir, ["file1.txt", "file2.txt", "file3.txt"], true);
});
if (hasBunArchive) {
bench("Bun.Archive", async () => {
await Bun.Archive.from(largeFiles).bytes("gzip");
});
}
});
group("create .tar.gz (100 small files)", () => {
bench("node-tar", async () => {
await createNodeTarBuffer(manyFilesDir, Object.keys(manyFilesEntries), true);
});
if (hasBunArchive) {
bench("Bun.Archive", async () => {
await Bun.Archive.from(manyFilesEntries).bytes("gzip");
});
}
});
// ============================================================================
// Extract .tar (uncompressed) benchmarks
// ============================================================================
group("extract .tar (3 small files)", () => {
bench("node-tar", async () => {
await extractNodeTarBuffer(smallTarBuffer, extractDirNodeTar);
});
if (hasBunArchive) {
bench("Bun.Archive", async () => {
await Bun.Archive.from(smallBunArchive).extract(extractDirBun);
});
}
});
group("extract .tar (3 x 100KB files)", () => {
bench("node-tar", async () => {
await extractNodeTarBuffer(largeTarBuffer, extractDirNodeTar);
});
if (hasBunArchive) {
bench("Bun.Archive", async () => {
await Bun.Archive.from(largeBunArchive).extract(extractDirBun);
});
}
});
group("extract .tar (100 small files)", () => {
bench("node-tar", async () => {
await extractNodeTarBuffer(manyFilesTarBuffer, extractDirNodeTar);
});
if (hasBunArchive) {
bench("Bun.Archive", async () => {
await Bun.Archive.from(manyFilesBunArchive).extract(extractDirBun);
});
}
});
// ============================================================================
// Extract .tar.gz (compressed) benchmarks
// ============================================================================
group("extract .tar.gz (3 small files)", () => {
bench("node-tar", async () => {
await extractNodeTarBuffer(smallTarGzBuffer, extractDirNodeTar);
});
if (hasBunArchive) {
bench("Bun.Archive", async () => {
await Bun.Archive.from(smallBunArchiveGz).extract(extractDirBun);
});
}
});
group("extract .tar.gz (3 x 100KB files)", () => {
bench("node-tar", async () => {
await extractNodeTarBuffer(largeTarGzBuffer, extractDirNodeTar);
});
if (hasBunArchive) {
bench("Bun.Archive", async () => {
await Bun.Archive.from(largeBunArchiveGz).extract(extractDirBun);
});
}
});
group("extract .tar.gz (100 small files)", () => {
bench("node-tar", async () => {
await extractNodeTarBuffer(manyFilesTarGzBuffer, extractDirNodeTar);
});
if (hasBunArchive) {
bench("Bun.Archive", async () => {
await Bun.Archive.from(manyFilesBunArchiveGz).extract(extractDirBun);
});
}
});
// ============================================================================
// Write .tar to disk benchmarks
// ============================================================================
let writeCounter = 0;
group("write .tar to disk (3 small files)", () => {
bench("node-tar + writeFileSync", async () => {
const buffer = await createNodeTarBuffer(smallFilesDir, ["file1.txt", "file2.txt", "file3.txt"], false);
writeFileSync(join(writeDirNodeTar, `archive-${writeCounter++}.tar`), buffer);
});
if (hasBunArchive) {
bench("Bun.Archive.write", async () => {
await Bun.Archive.write(join(writeDirBun, `archive-${writeCounter++}.tar`), smallFiles);
});
}
});
group("write .tar to disk (3 x 100KB files)", () => {
bench("node-tar + writeFileSync", async () => {
const buffer = await createNodeTarBuffer(largeFilesDir, ["file1.txt", "file2.txt", "file3.txt"], false);
writeFileSync(join(writeDirNodeTar, `archive-${writeCounter++}.tar`), buffer);
});
if (hasBunArchive) {
bench("Bun.Archive.write", async () => {
await Bun.Archive.write(join(writeDirBun, `archive-${writeCounter++}.tar`), largeFiles);
});
}
});
group("write .tar to disk (100 small files)", () => {
bench("node-tar + writeFileSync", async () => {
const buffer = await createNodeTarBuffer(manyFilesDir, Object.keys(manyFilesEntries), false);
writeFileSync(join(writeDirNodeTar, `archive-${writeCounter++}.tar`), buffer);
});
if (hasBunArchive) {
bench("Bun.Archive.write", async () => {
await Bun.Archive.write(join(writeDirBun, `archive-${writeCounter++}.tar`), manyFilesEntries);
});
}
});
// ============================================================================
// Write .tar.gz to disk benchmarks
// ============================================================================
group("write .tar.gz to disk (3 small files)", () => {
bench("node-tar + writeFileSync", async () => {
const buffer = await createNodeTarBuffer(smallFilesDir, ["file1.txt", "file2.txt", "file3.txt"], true);
writeFileSync(join(writeDirNodeTar, `archive-${writeCounter++}.tar.gz`), buffer);
});
if (hasBunArchive) {
bench("Bun.Archive.write", async () => {
await Bun.Archive.write(join(writeDirBun, `archive-${writeCounter++}.tar.gz`), smallFiles, "gzip");
});
}
});
group("write .tar.gz to disk (3 x 100KB files)", () => {
bench("node-tar + writeFileSync", async () => {
const buffer = await createNodeTarBuffer(largeFilesDir, ["file1.txt", "file2.txt", "file3.txt"], true);
writeFileSync(join(writeDirNodeTar, `archive-${writeCounter++}.tar.gz`), buffer);
});
if (hasBunArchive) {
bench("Bun.Archive.write", async () => {
await Bun.Archive.write(join(writeDirBun, `archive-${writeCounter++}.tar.gz`), largeFiles, "gzip");
});
}
});
group("write .tar.gz to disk (100 small files)", () => {
bench("node-tar + writeFileSync", async () => {
const buffer = await createNodeTarBuffer(manyFilesDir, Object.keys(manyFilesEntries), true);
writeFileSync(join(writeDirNodeTar, `archive-${writeCounter++}.tar.gz`), buffer);
});
if (hasBunArchive) {
bench("Bun.Archive.write", async () => {
await Bun.Archive.write(join(writeDirBun, `archive-${writeCounter++}.tar.gz`), manyFilesEntries, "gzip");
});
}
});
// ============================================================================
// Get files array from archive (files() method) benchmarks
// ============================================================================
// Helper to get files array from node-tar (reads all entries into memory)
async function getFilesArrayNodeTar(buffer) {
return new Promise((resolve, reject) => {
const files = new Map();
let pending = 0;
let closed = false;
const maybeResolve = () => {
if (closed && pending === 0) {
resolve(files);
}
};
const unpack = new Unpack({
onReadEntry: entry => {
if (entry.type === "File") {
pending++;
const chunks = [];
entry.on("data", chunk => chunks.push(chunk));
entry.on("end", () => {
const content = Buffer.concat(chunks);
// Create a File-like object similar to Bun.Archive.files()
files.set(entry.path, new Blob([content]));
pending--;
maybeResolve();
});
}
entry.resume(); // Drain the entry
},
});
unpack.on("close", () => {
closed = true;
maybeResolve();
});
unpack.on("error", reject);
unpack.end(buffer);
});
}
group("files() - get all files as Map (3 small files)", () => {
bench("node-tar", async () => {
await getFilesArrayNodeTar(smallTarBuffer);
});
if (hasBunArchive) {
bench("Bun.Archive.files()", async () => {
await Bun.Archive.from(smallBunArchive).files();
});
}
});
group("files() - get all files as Map (3 x 100KB files)", () => {
bench("node-tar", async () => {
await getFilesArrayNodeTar(largeTarBuffer);
});
if (hasBunArchive) {
bench("Bun.Archive.files()", async () => {
await Bun.Archive.from(largeBunArchive).files();
});
}
});
group("files() - get all files as Map (100 small files)", () => {
bench("node-tar", async () => {
await getFilesArrayNodeTar(manyFilesTarBuffer);
});
if (hasBunArchive) {
bench("Bun.Archive.files()", async () => {
await Bun.Archive.from(manyFilesBunArchive).files();
});
}
});
group("files() - get all files as Map from .tar.gz (3 small files)", () => {
bench("node-tar", async () => {
await getFilesArrayNodeTar(smallTarGzBuffer);
});
if (hasBunArchive) {
bench("Bun.Archive.files()", async () => {
await Bun.Archive.from(smallBunArchiveGz).files();
});
}
});
group("files() - get all files as Map from .tar.gz (100 small files)", () => {
bench("node-tar", async () => {
await getFilesArrayNodeTar(manyFilesTarGzBuffer);
});
if (hasBunArchive) {
bench("Bun.Archive.files()", async () => {
await Bun.Archive.from(manyFilesBunArchiveGz).files();
});
}
});
await run();
// Cleanup
rmSync(setupDir, { recursive: true, force: true });
rmSync(extractDirNodeTar, { recursive: true, force: true });
rmSync(extractDirBun, { recursive: true, force: true });
rmSync(writeDirNodeTar, { recursive: true, force: true });
rmSync(writeDirBun, { recursive: true, force: true });

View File

@@ -1,38 +0,0 @@
// @runtime bun,node
import { Buffer } from "node:buffer";
import { bench, group, run } from "../runner.mjs";
// Small arrays (common case)
const int32Array8 = [1, 2, 3, 4, 5, 6, 7, 8];
const doubleArray8 = [1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5];
// Medium arrays
const int32Array64 = Array.from({ length: 64 }, (_, i) => i % 256);
const doubleArray64 = Array.from({ length: 64 }, (_, i) => i + 0.5);
// Large arrays
const int32Array1024 = Array.from({ length: 1024 }, (_, i) => i % 256);
// Array-like objects (fallback path)
const arrayLike8 = { 0: 1, 1: 2, 2: 3, 3: 4, 4: 5, 5: 6, 6: 7, 7: 8, length: 8 };
// Empty array
const emptyArray = [];
group("Buffer.from(array) - Int32 arrays", () => {
bench("Buffer.from(int32[8])", () => Buffer.from(int32Array8));
bench("Buffer.from(int32[64])", () => Buffer.from(int32Array64));
bench("Buffer.from(int32[1024])", () => Buffer.from(int32Array1024));
});
group("Buffer.from(array) - Double arrays", () => {
bench("Buffer.from(double[8])", () => Buffer.from(doubleArray8));
bench("Buffer.from(double[64])", () => Buffer.from(doubleArray64));
});
group("Buffer.from(array) - Edge cases", () => {
bench("Buffer.from([])", () => Buffer.from(emptyArray));
bench("Buffer.from(arrayLike[8])", () => Buffer.from(arrayLike8));
});
await run();

View File

@@ -1,103 +0,0 @@
import wrapAnsi from "wrap-ansi";
import { bench, run, summary } from "../runner.mjs";
// Test fixtures
const shortText = "The quick brown fox jumped over the lazy dog.";
const mediumText = "The quick brown fox jumped over the lazy dog and then ran away with the unicorn. ".repeat(10);
const longText = "The quick brown fox jumped over the lazy dog and then ran away with the unicorn. ".repeat(100);
// ANSI colored text
const red = s => `\u001B[31m${s}\u001B[39m`;
const green = s => `\u001B[32m${s}\u001B[39m`;
const blue = s => `\u001B[34m${s}\u001B[39m`;
const coloredShort = `The quick ${red("brown fox")} jumped over the ${green("lazy dog")}.`;
const coloredMedium =
`The quick ${red("brown fox jumped over")} the ${green("lazy dog and then ran away")} with the ${blue("unicorn")}. `.repeat(
10,
);
const coloredLong =
`The quick ${red("brown fox jumped over")} the ${green("lazy dog and then ran away")} with the ${blue("unicorn")}. `.repeat(
100,
);
// Full-width characters (Japanese)
const japaneseText = "日本語のテキストを折り返すテストです。全角文字は幅2としてカウントされます。".repeat(5);
// Emoji text
const emojiText = "Hello 👋 World 🌍! Let's test 🧪 some emoji 😀 wrapping 📦!".repeat(5);
// Hyperlink text
const hyperlinkText = "Check out \u001B]8;;https://bun.sh\u0007Bun\u001B]8;;\u0007, it's fast! ".repeat(10);
// Options
const hardOpts = { hard: true };
const noTrimOpts = { trim: false };
// Basic text benchmarks
summary(() => {
bench("Short text (45 chars) - npm", () => wrapAnsi(shortText, 20));
bench("Short text (45 chars) - Bun", () => Bun.wrapAnsi(shortText, 20));
});
summary(() => {
bench("Medium text (810 chars) - npm", () => wrapAnsi(mediumText, 40));
bench("Medium text (810 chars) - Bun", () => Bun.wrapAnsi(mediumText, 40));
});
summary(() => {
bench("Long text (8100 chars) - npm", () => wrapAnsi(longText, 80));
bench("Long text (8100 chars) - Bun", () => Bun.wrapAnsi(longText, 80));
});
// ANSI colored text benchmarks
summary(() => {
bench("Colored short - npm", () => wrapAnsi(coloredShort, 20));
bench("Colored short - Bun", () => Bun.wrapAnsi(coloredShort, 20));
});
summary(() => {
bench("Colored medium - npm", () => wrapAnsi(coloredMedium, 40));
bench("Colored medium - Bun", () => Bun.wrapAnsi(coloredMedium, 40));
});
summary(() => {
bench("Colored long - npm", () => wrapAnsi(coloredLong, 80));
bench("Colored long - Bun", () => Bun.wrapAnsi(coloredLong, 80));
});
// Hard wrap benchmarks
summary(() => {
bench("Hard wrap long - npm", () => wrapAnsi(longText, 80, hardOpts));
bench("Hard wrap long - Bun", () => Bun.wrapAnsi(longText, 80, hardOpts));
});
summary(() => {
bench("Hard wrap colored - npm", () => wrapAnsi(coloredLong, 80, hardOpts));
bench("Hard wrap colored - Bun", () => Bun.wrapAnsi(coloredLong, 80, hardOpts));
});
// Unicode benchmarks
summary(() => {
bench("Japanese (full-width) - npm", () => wrapAnsi(japaneseText, 40));
bench("Japanese (full-width) - Bun", () => Bun.wrapAnsi(japaneseText, 40));
});
summary(() => {
bench("Emoji text - npm", () => wrapAnsi(emojiText, 30));
bench("Emoji text - Bun", () => Bun.wrapAnsi(emojiText, 30));
});
// Hyperlink benchmarks
summary(() => {
bench("Hyperlink (OSC 8) - npm", () => wrapAnsi(hyperlinkText, 40));
bench("Hyperlink (OSC 8) - Bun", () => Bun.wrapAnsi(hyperlinkText, 40));
});
// No trim option
summary(() => {
bench("No trim long - npm", () => wrapAnsi(longText, 80, noTrimOpts));
bench("No trim long - Bun", () => Bun.wrapAnsi(longText, 80, noTrimOpts));
});
await run();

150
build.zig
View File

@@ -34,7 +34,6 @@ const BunBuildOptions = struct {
enable_asan: bool,
enable_fuzzilli: bool,
enable_valgrind: bool,
enable_tinycc: bool,
use_mimalloc: bool,
tracy_callstack_depth: u16,
reported_nodejs_version: Version,
@@ -85,7 +84,6 @@ const BunBuildOptions = struct {
opts.addOption(bool, "enable_asan", this.enable_asan);
opts.addOption(bool, "enable_fuzzilli", this.enable_fuzzilli);
opts.addOption(bool, "enable_valgrind", this.enable_valgrind);
opts.addOption(bool, "enable_tinycc", this.enable_tinycc);
opts.addOption(bool, "use_mimalloc", this.use_mimalloc);
opts.addOption([]const u8, "reported_nodejs_version", b.fmt("{f}", .{this.reported_nodejs_version}));
opts.addOption(bool, "zig_self_hosted_backend", this.no_llvm);
@@ -261,7 +259,6 @@ pub fn build(b: *Build) !void {
.enable_asan = b.option(bool, "enable_asan", "Enable asan") orelse false,
.enable_fuzzilli = b.option(bool, "enable_fuzzilli", "Enable fuzzilli instrumentation") orelse false,
.enable_valgrind = b.option(bool, "enable_valgrind", "Enable valgrind") orelse false,
.enable_tinycc = b.option(bool, "enable_tinycc", "Enable TinyCC for FFI JIT compilation") orelse true,
.use_mimalloc = b.option(bool, "use_mimalloc", "Use mimalloc as default allocator") orelse false,
.llvm_codegen_threads = b.option(u32, "llvm_codegen_threads", "Number of threads to use for LLVM codegen") orelse 1,
};
@@ -345,7 +342,6 @@ pub fn build(b: *Build) !void {
const step = b.step("check-debug", "Check for semantic analysis errors on some platforms");
addMultiCheck(b, step, build_options, &.{
.{ .os = .windows, .arch = .x86_64 },
.{ .os = .windows, .arch = .aarch64 },
.{ .os = .mac, .arch = .aarch64 },
.{ .os = .linux, .arch = .x86_64 },
}, &.{.Debug});
@@ -356,7 +352,6 @@ pub fn build(b: *Build) !void {
const step = b.step("check-all", "Check for semantic analysis errors on all supported platforms");
addMultiCheck(b, step, build_options, &.{
.{ .os = .windows, .arch = .x86_64 },
.{ .os = .windows, .arch = .aarch64 },
.{ .os = .mac, .arch = .x86_64 },
.{ .os = .mac, .arch = .aarch64 },
.{ .os = .linux, .arch = .x86_64 },
@@ -371,7 +366,6 @@ pub fn build(b: *Build) !void {
const step = b.step("check-all-debug", "Check for semantic analysis errors on all supported platforms in debug mode");
addMultiCheck(b, step, build_options, &.{
.{ .os = .windows, .arch = .x86_64 },
.{ .os = .windows, .arch = .aarch64 },
.{ .os = .mac, .arch = .x86_64 },
.{ .os = .mac, .arch = .aarch64 },
.{ .os = .linux, .arch = .x86_64 },
@@ -386,14 +380,12 @@ pub fn build(b: *Build) !void {
const step = b.step("check-windows", "Check for semantic analysis errors on Windows");
addMultiCheck(b, step, build_options, &.{
.{ .os = .windows, .arch = .x86_64 },
.{ .os = .windows, .arch = .aarch64 },
}, &.{ .Debug, .ReleaseFast });
}
{
const step = b.step("check-windows-debug", "Check for semantic analysis errors on Windows");
addMultiCheck(b, step, build_options, &.{
.{ .os = .windows, .arch = .x86_64 },
.{ .os = .windows, .arch = .aarch64 },
}, &.{.Debug});
}
{
@@ -430,7 +422,6 @@ pub fn build(b: *Build) !void {
const step = b.step("translate-c", "Copy generated translated-c-headers.zig to zig-out");
for ([_]TargetDescription{
.{ .os = .windows, .arch = .x86_64 },
.{ .os = .windows, .arch = .aarch64 },
.{ .os = .mac, .arch = .x86_64 },
.{ .os = .mac, .arch = .aarch64 },
.{ .os = .linux, .arch = .x86_64 },
@@ -459,146 +450,6 @@ pub fn build(b: *Build) !void {
// const run = b.addRunArtifact(exe);
// step.dependOn(&run.step);
}
// zig build generate-grapheme-tables
// Regenerates src/string/immutable/grapheme_tables.zig from the vendored uucode.
// Run this when updating src/deps/uucode. Normal builds use the committed file.
{
const step = b.step("generate-grapheme-tables", "Regenerate grapheme property tables from vendored uucode");
// --- Phase 1: Build uucode tables (separate module graph, no tables dependency) ---
const bt_config_mod = b.createModule(.{
.root_source_file = b.path("src/deps/uucode/src/config.zig"),
.target = b.graph.host,
});
const bt_types_mod = b.createModule(.{
.root_source_file = b.path("src/deps/uucode/src/types.zig"),
.target = b.graph.host,
});
bt_types_mod.addImport("config.zig", bt_config_mod);
bt_config_mod.addImport("types.zig", bt_types_mod);
const bt_config_x_mod = b.createModule(.{
.root_source_file = b.path("src/deps/uucode/src/x/config.x.zig"),
.target = b.graph.host,
});
const bt_types_x_mod = b.createModule(.{
.root_source_file = b.path("src/deps/uucode/src/x/types.x.zig"),
.target = b.graph.host,
});
bt_types_x_mod.addImport("config.x.zig", bt_config_x_mod);
bt_config_x_mod.addImport("types.x.zig", bt_types_x_mod);
bt_config_x_mod.addImport("types.zig", bt_types_mod);
bt_config_x_mod.addImport("config.zig", bt_config_mod);
const bt_build_config_mod = b.createModule(.{
.root_source_file = b.path("src/unicode/uucode/uucode_config.zig"),
.target = b.graph.host,
});
bt_build_config_mod.addImport("types.zig", bt_types_mod);
bt_build_config_mod.addImport("config.zig", bt_config_mod);
bt_build_config_mod.addImport("types.x.zig", bt_types_x_mod);
bt_build_config_mod.addImport("config.x.zig", bt_config_x_mod);
const build_tables_mod = b.createModule(.{
.root_source_file = b.path("src/deps/uucode/src/build/tables.zig"),
.target = b.graph.host,
.optimize = .Debug,
});
build_tables_mod.addImport("config.zig", bt_config_mod);
build_tables_mod.addImport("build_config", bt_build_config_mod);
build_tables_mod.addImport("types.zig", bt_types_mod);
const build_tables_exe = b.addExecutable(.{
.name = "uucode_build_tables",
.root_module = build_tables_mod,
.use_llvm = true,
});
const run_build_tables = b.addRunArtifact(build_tables_exe);
run_build_tables.setCwd(b.path("src/deps/uucode"));
const tables_path = run_build_tables.addOutputFileArg("tables.zig");
// --- Phase 2: Build grapheme-gen with full uucode (separate module graph) ---
const rt_config_mod = b.createModule(.{
.root_source_file = b.path("src/deps/uucode/src/config.zig"),
.target = b.graph.host,
});
const rt_types_mod = b.createModule(.{
.root_source_file = b.path("src/deps/uucode/src/types.zig"),
.target = b.graph.host,
});
rt_types_mod.addImport("config.zig", rt_config_mod);
rt_config_mod.addImport("types.zig", rt_types_mod);
const rt_config_x_mod = b.createModule(.{
.root_source_file = b.path("src/deps/uucode/src/x/config.x.zig"),
.target = b.graph.host,
});
const rt_types_x_mod = b.createModule(.{
.root_source_file = b.path("src/deps/uucode/src/x/types.x.zig"),
.target = b.graph.host,
});
rt_types_x_mod.addImport("config.x.zig", rt_config_x_mod);
rt_config_x_mod.addImport("types.x.zig", rt_types_x_mod);
rt_config_x_mod.addImport("types.zig", rt_types_mod);
rt_config_x_mod.addImport("config.zig", rt_config_mod);
const rt_build_config_mod = b.createModule(.{
.root_source_file = b.path("src/unicode/uucode/uucode_config.zig"),
.target = b.graph.host,
});
rt_build_config_mod.addImport("types.zig", rt_types_mod);
rt_build_config_mod.addImport("config.zig", rt_config_mod);
rt_build_config_mod.addImport("types.x.zig", rt_types_x_mod);
rt_build_config_mod.addImport("config.x.zig", rt_config_x_mod);
const rt_tables_mod = b.createModule(.{
.root_source_file = tables_path,
.target = b.graph.host,
});
rt_tables_mod.addImport("types.zig", rt_types_mod);
rt_tables_mod.addImport("types.x.zig", rt_types_x_mod);
rt_tables_mod.addImport("config.zig", rt_config_mod);
rt_tables_mod.addImport("build_config", rt_build_config_mod);
const rt_get_mod = b.createModule(.{
.root_source_file = b.path("src/deps/uucode/src/get.zig"),
.target = b.graph.host,
});
rt_get_mod.addImport("types.zig", rt_types_mod);
rt_get_mod.addImport("tables", rt_tables_mod);
rt_types_mod.addImport("get.zig", rt_get_mod);
const uucode_mod = b.createModule(.{
.root_source_file = b.path("src/deps/uucode/src/root.zig"),
.target = b.graph.host,
});
uucode_mod.addImport("types.zig", rt_types_mod);
uucode_mod.addImport("config.zig", rt_config_mod);
uucode_mod.addImport("types.x.zig", rt_types_x_mod);
uucode_mod.addImport("tables", rt_tables_mod);
uucode_mod.addImport("get.zig", rt_get_mod);
// grapheme_gen executable
const gen_exe = b.addExecutable(.{
.name = "grapheme-gen",
.root_module = b.createModule(.{
.root_source_file = b.path("src/unicode/uucode/grapheme_gen.zig"),
.target = b.graph.host,
.optimize = .Debug,
.imports = &.{
.{ .name = "uucode", .module = uucode_mod },
},
}),
.use_llvm = true,
});
const run_gen = b.addRunArtifact(gen_exe);
const gen_output = run_gen.captureStdOut();
const install = b.addInstallFile(gen_output, "../src/string/immutable/grapheme_tables.zig");
step.dependOn(&install.step);
}
}
const TargetDescription = struct {
@@ -642,7 +493,6 @@ fn addMultiCheck(
.no_llvm = root_build_options.no_llvm,
.enable_asan = root_build_options.enable_asan,
.enable_valgrind = root_build_options.enable_valgrind,
.enable_tinycc = root_build_options.enable_tinycc,
.enable_fuzzilli = root_build_options.enable_fuzzilli,
.use_mimalloc = root_build_options.use_mimalloc,
.override_no_export_cpp_apis = root_build_options.override_no_export_cpp_apis,

View File

@@ -21,10 +21,6 @@ endforeach()
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm|ARM|arm64|ARM64|aarch64|AARCH64")
if(APPLE)
register_compiler_flags(-mcpu=apple-m1)
elseif(WIN32)
# Windows ARM64: use /clang: prefix for clang-cl, skip for MSVC cl.exe subprojects
# These flags are only understood by clang-cl, not MSVC cl.exe
register_compiler_flags(/clang:-march=armv8-a+crc /clang:-mtune=ampere1)
else()
register_compiler_flags(-march=armv8-a+crc -mtune=ampere1)
endif()
@@ -246,17 +242,10 @@ if(UNIX)
)
endif()
if(WIN32)
register_compiler_flags(
DESCRIPTION "Set C/C++ error limit"
/clang:-ferror-limit=${ERROR_LIMIT}
)
else()
register_compiler_flags(
DESCRIPTION "Set C/C++ error limit"
-ferror-limit=${ERROR_LIMIT}
)
endif()
register_compiler_flags(
DESCRIPTION "Set C/C++ error limit"
-ferror-limit=${ERROR_LIMIT}
)
# --- LTO ---
if(ENABLE_LTO)

View File

@@ -106,9 +106,9 @@ else()
endif()
if(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "arm64|ARM64|aarch64|AARCH64")
set(HOST_ARCH "aarch64")
set(HOST_OS "aarch64")
elseif(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "x86_64|X86_64|x64|X64|amd64|AMD64")
set(HOST_ARCH "x64")
set(HOST_OS "x64")
else()
unsupported(CMAKE_HOST_SYSTEM_PROCESSOR)
endif()
@@ -433,33 +433,6 @@ function(register_command)
list(APPEND CMD_EFFECTIVE_DEPENDS ${CMD_EXECUTABLE})
endif()
# SKIP_CODEGEN: Skip commands that use BUN_EXECUTABLE if all outputs exist
# This is used for Windows ARM64 builds where x64 bun crashes under emulation
if(SKIP_CODEGEN AND CMD_EXECUTABLE STREQUAL "${BUN_EXECUTABLE}")
set(ALL_OUTPUTS_EXIST TRUE)
foreach(output ${CMD_OUTPUTS})
if(NOT EXISTS ${output})
set(ALL_OUTPUTS_EXIST FALSE)
break()
endif()
endforeach()
if(ALL_OUTPUTS_EXIST AND CMD_OUTPUTS)
message(STATUS "SKIP_CODEGEN: Skipping ${CMD_TARGET} (outputs exist)")
if(CMD_TARGET)
add_custom_target(${CMD_TARGET})
endif()
return()
elseif(NOT CMD_OUTPUTS)
message(STATUS "SKIP_CODEGEN: Skipping ${CMD_TARGET} (no outputs)")
if(CMD_TARGET)
add_custom_target(${CMD_TARGET})
endif()
return()
else()
message(FATAL_ERROR "SKIP_CODEGEN: Cannot skip ${CMD_TARGET} - missing outputs. Run codegen on x64 first.")
endif()
endif()
foreach(target ${CMD_TARGETS})
if(target MATCHES "/|\\\\")
message(FATAL_ERROR "register_command: TARGETS contains \"${target}\", if it's a path add it to SOURCES instead")
@@ -677,7 +650,6 @@ function(register_bun_install)
${NPM_CWD}
COMMAND
${BUN_EXECUTABLE}
${BUN_FLAGS}
install
--frozen-lockfile
SOURCES
@@ -785,7 +757,7 @@ function(register_cmake_command)
set(MAKE_EFFECTIVE_ARGS -B${MAKE_BUILD_PATH} ${CMAKE_ARGS})
set(setFlags GENERATOR BUILD_TYPE)
set(appendFlags C_FLAGS CXX_FLAGS LINKER_FLAGS STATIC_LINKER_FLAGS EXE_LINKER_FLAGS SHARED_LINKER_FLAGS MODULE_LINKER_FLAGS)
set(appendFlags C_FLAGS CXX_FLAGS LINKER_FLAGS)
set(specialFlags POSITION_INDEPENDENT_CODE)
set(flags ${setFlags} ${appendFlags} ${specialFlags})
@@ -831,14 +803,6 @@ function(register_cmake_command)
list(APPEND MAKE_EFFECTIVE_ARGS "-DCMAKE_${flag}=${MAKE_${flag}}")
endforeach()
# Workaround for CMake 4.1.0 bug: Force correct machine type for Windows ARM64
# Use toolchain file and set CMP0197 policy to prevent duplicate /machine: flags
if(WIN32 AND CMAKE_SYSTEM_PROCESSOR MATCHES "ARM64|aarch64|AARCH64")
list(APPEND MAKE_EFFECTIVE_ARGS "-DCMAKE_TOOLCHAIN_FILE=${CWD}/cmake/toolchains/windows-aarch64.cmake")
list(APPEND MAKE_EFFECTIVE_ARGS "-DCMAKE_POLICY_DEFAULT_CMP0197=NEW")
list(APPEND MAKE_EFFECTIVE_ARGS "-DCMAKE_PROJECT_INCLUDE=${CWD}/cmake/arm64-static-lib-fix.cmake")
endif()
if(DEFINED FRESH)
list(APPEND MAKE_EFFECTIVE_ARGS --fresh)
endif()

View File

@@ -4,7 +4,6 @@ endif()
optionx(BUN_LINK_ONLY BOOL "If only the linking step should be built" DEFAULT OFF)
optionx(BUN_CPP_ONLY BOOL "If only the C++ part of Bun should be built" DEFAULT OFF)
optionx(SKIP_CODEGEN BOOL "Skip JavaScript codegen (for Windows ARM64 debug)" DEFAULT OFF)
optionx(BUILDKITE BOOL "If Buildkite is enabled" DEFAULT OFF)
optionx(GITHUB_ACTIONS BOOL "If GitHub Actions is enabled" DEFAULT OFF)
@@ -50,7 +49,7 @@ else()
message(FATAL_ERROR "Unsupported operating system: ${CMAKE_SYSTEM_NAME}")
endif()
if(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|arm64|ARM64")
if(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|arm64|arm")
setx(ARCH "aarch64")
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "amd64|x86_64|x64|AMD64")
setx(ARCH "x64")
@@ -58,18 +57,6 @@ else()
message(FATAL_ERROR "Unsupported architecture: ${CMAKE_SYSTEM_PROCESSOR}")
endif()
# CMake 4.0+ policy CMP0197 controls how MSVC machine type flags are handled
# Setting to NEW prevents duplicate /machine: flags being added to linker commands
if(WIN32 AND ARCH STREQUAL "aarch64")
set(CMAKE_POLICY_DEFAULT_CMP0197 NEW)
set(CMAKE_MSVC_CMP0197 NEW)
# Set linker flags for exe/shared linking
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} /machine:ARM64")
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} /machine:ARM64")
set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} /machine:ARM64")
set(CMAKE_STATIC_LINKER_FLAGS "${CMAKE_STATIC_LINKER_FLAGS} /machine:ARM64")
endif()
# Windows Code Signing Option
if(WIN32)
optionx(ENABLE_WINDOWS_CODESIGNING BOOL "Enable Windows code signing with DigiCert KeyLocker" DEFAULT OFF)
@@ -212,16 +199,6 @@ optionx(USE_WEBKIT_ICU BOOL "Use the ICU libraries from WebKit" DEFAULT ${DEFAUL
optionx(ERROR_LIMIT STRING "Maximum number of errors to show when compiling C++ code" DEFAULT "100")
# TinyCC is used for FFI JIT compilation
# Disable on Windows ARM64 where it's not yet supported
if(WIN32 AND ARCH STREQUAL "aarch64")
set(DEFAULT_ENABLE_TINYCC OFF)
else()
set(DEFAULT_ENABLE_TINYCC ON)
endif()
optionx(ENABLE_TINYCC BOOL "Enable TinyCC for FFI JIT compilation" DEFAULT ${DEFAULT_ENABLE_TINYCC})
# This is not an `option` because setting this variable to OFF is experimental
# and unsupported. This replaces the `use_mimalloc` variable previously in
# bun.zig, and enables C++ code to also be aware of the option.

View File

@@ -1,8 +0,0 @@
# This file is included after project() via CMAKE_PROJECT_INCLUDE
# It fixes the static library creation command to use ARM64 machine type
if(WIN32 AND CMAKE_SYSTEM_PROCESSOR STREQUAL \"aarch64\")
# Override the static library creation commands to avoid spurious /machine:x64 flags
set(CMAKE_C_CREATE_STATIC_LIBRARY \"<CMAKE_AR> /nologo /machine:ARM64 /out:<TARGET> <OBJECTS>\" CACHE STRING \"\" FORCE)
set(CMAKE_CXX_CREATE_STATIC_LIBRARY \"<CMAKE_AR> /nologo /machine:ARM64 /out:<TARGET> <OBJECTS>\" CACHE STRING \"\" FORCE)
endif()

View File

@@ -21,12 +21,7 @@ if(NOT DEFINED CMAKE_HOST_SYSTEM_PROCESSOR)
endif()
if(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "arm64|ARM64|aarch64|AARCH64")
# Windows ARM64 can run x86_64 via emulation, and no native ARM64 Zig build exists yet
if(CMAKE_HOST_WIN32)
set(ZIG_ARCH "x86_64")
else()
set(ZIG_ARCH "aarch64")
endif()
set(ZIG_ARCH "aarch64")
elseif(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "amd64|AMD64|x86_64|X86_64|x64|X64")
set(ZIG_ARCH "x86_64")
else()

View File

@@ -1,34 +0,0 @@
@echo off
setlocal enabledelayedexpansion
REM Wrapper for llvm-lib that strips conflicting /machine:x64 flag for ARM64 builds
REM This is a workaround for CMake 4.1.0 bug
REM Find llvm-lib.exe - check LLVM_LIB env var, then PATH, then known locations
if defined LLVM_LIB (
set "LLVM_LIB_EXE=!LLVM_LIB!"
) else (
where llvm-lib.exe >nul 2>&1
if !ERRORLEVEL! equ 0 (
for /f "delims=" %%i in ('where llvm-lib.exe') do set "LLVM_LIB_EXE=%%i"
) else if exist "C:\Program Files\LLVM\bin\llvm-lib.exe" (
set "LLVM_LIB_EXE=C:\Program Files\LLVM\bin\llvm-lib.exe"
) else (
echo Error: Cannot find llvm-lib.exe. Set LLVM_LIB environment variable or add LLVM to PATH.
exit /b 1
)
)
set "ARGS="
for %%a in (%*) do (
set "ARG=%%a"
if /i "!ARG!"=="/machine:x64" (
REM Skip this argument
) else (
set "ARGS=!ARGS! %%a"
)
)
"!LLVM_LIB_EXE!" %ARGS%
exit /b %ERRORLEVEL%

View File

@@ -1,18 +0,0 @@
# Wrapper for llvm-lib that strips conflicting /machine:x64 flag for ARM64 builds
# This is a workaround for CMake 4.1.0 bug where both /machine:ARM64 and /machine:x64 are added
# Find llvm-lib.exe - check LLVM_LIB env var, then PATH, then known locations
if ($env:LLVM_LIB) {
$llvmLib = $env:LLVM_LIB
} elseif (Get-Command llvm-lib.exe -ErrorAction SilentlyContinue) {
$llvmLib = (Get-Command llvm-lib.exe).Source
} elseif (Test-Path "C:\Program Files\LLVM\bin\llvm-lib.exe") {
$llvmLib = "C:\Program Files\LLVM\bin\llvm-lib.exe"
} else {
Write-Error "Cannot find llvm-lib.exe. Set LLVM_LIB environment variable or add LLVM to PATH."
exit 1
}
$filteredArgs = $args | Where-Object { $_ -ne "/machine:x64" }
& $llvmLib @filteredArgs
exit $LASTEXITCODE

View File

@@ -1,34 +0,0 @@
@echo off
setlocal enabledelayedexpansion
REM Wrapper for llvm-lib that strips conflicting /machine:x64 flag for ARM64 builds
REM This is a workaround for CMake 4.1.0 bug
REM Find llvm-lib.exe - check LLVM_LIB env var, then PATH, then known locations
if defined LLVM_LIB (
set "LLVM_LIB_EXE=!LLVM_LIB!"
) else (
where llvm-lib.exe >nul 2>&1
if !ERRORLEVEL! equ 0 (
for /f "delims=" %%i in ('where llvm-lib.exe') do set "LLVM_LIB_EXE=%%i"
) else if exist "C:\Program Files\LLVM\bin\llvm-lib.exe" (
set "LLVM_LIB_EXE=C:\Program Files\LLVM\bin\llvm-lib.exe"
) else (
echo Error: Cannot find llvm-lib.exe. Set LLVM_LIB environment variable or add LLVM to PATH.
exit /b 1
)
)
set NEWARGS=
for %%a in (%*) do (
set "ARG=%%a"
if /i "!ARG!"=="/machine:x64" (
REM Skip /machine:x64 argument
) else (
set "NEWARGS=!NEWARGS! %%a"
)
)
"!LLVM_LIB_EXE!" %NEWARGS%
exit /b %ERRORLEVEL%

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
oven-sh/boringssl
COMMIT
4f4f5ef8ebc6e23cbf393428f0ab1b526773f7ac
f1ffd9e83d4f5c28a9c70d73f9a4e6fcf310062f
)
register_cmake_command(

View File

@@ -57,17 +57,13 @@ set(BUN_DEPENDENCIES
LolHtml
Lshpack
Mimalloc
TinyCC
Zlib
LibArchive # must be loaded after zlib
HdrHistogram # must be loaded after zlib
Zstd
)
# TinyCC is optional - disabled on Windows ARM64 where it's not supported
if(ENABLE_TINYCC)
list(APPEND BUN_DEPENDENCIES TinyCC)
endif()
include(CloneZstd)
# --- Codegen ---
@@ -189,7 +185,7 @@ register_command(
CWD
${BUN_NODE_FALLBACKS_SOURCE}
COMMAND
${BUN_EXECUTABLE} ${BUN_FLAGS} run build-fallbacks
${BUN_EXECUTABLE} run build-fallbacks
${BUN_NODE_FALLBACKS_OUTPUT}
${BUN_NODE_FALLBACKS_SOURCES}
SOURCES
@@ -210,7 +206,7 @@ register_command(
CWD
${BUN_NODE_FALLBACKS_SOURCE}
COMMAND
${BUN_EXECUTABLE} ${BUN_FLAGS} build
${BUN_EXECUTABLE} build
${BUN_NODE_FALLBACKS_SOURCE}/node_modules/react-refresh/cjs/react-refresh-runtime.development.js
--outfile=${BUN_REACT_REFRESH_OUTPUT}
--target=browser
@@ -247,7 +243,6 @@ register_command(
"Generating ErrorCode.{zig,h}"
COMMAND
${BUN_EXECUTABLE}
${BUN_FLAGS}
run
${BUN_ERROR_CODE_SCRIPT}
${CODEGEN_PATH}
@@ -283,7 +278,6 @@ register_command(
"Generating ZigGeneratedClasses.{zig,cpp,h}"
COMMAND
${BUN_EXECUTABLE}
${BUN_FLAGS}
run
${BUN_ZIG_GENERATED_CLASSES_SCRIPT}
${BUN_ZIG_GENERATED_CLASSES_SOURCES}
@@ -334,7 +328,6 @@ register_command(
"Generating C++ --> Zig bindings"
COMMAND
${BUN_EXECUTABLE}
${BUN_FLAGS}
${CWD}/src/codegen/cppbind.ts
${CWD}/src
${CODEGEN_PATH}
@@ -352,7 +345,6 @@ register_command(
"Generating CI info"
COMMAND
${BUN_EXECUTABLE}
${BUN_FLAGS}
${CWD}/src/codegen/ci_info.ts
${CODEGEN_PATH}/ci_info.zig
SOURCES
@@ -361,35 +353,24 @@ register_command(
${BUN_CI_INFO_OUTPUTS}
)
if(SKIP_CODEGEN)
# Skip JavaScript codegen - useful for Windows ARM64 debug builds where bun crashes
message(STATUS "SKIP_CODEGEN is ON - skipping bun-js-modules codegen")
foreach(output ${BUN_JAVASCRIPT_OUTPUTS})
if(NOT EXISTS ${output})
message(FATAL_ERROR "SKIP_CODEGEN is ON but ${output} does not exist. Run codegen manually first.")
endif()
endforeach()
else()
register_command(
TARGET
bun-js-modules
COMMENT
"Generating JavaScript modules"
COMMAND
${BUN_EXECUTABLE}
${BUN_FLAGS}
run
${BUN_JAVASCRIPT_CODEGEN_SCRIPT}
--debug=${DEBUG}
${BUILD_PATH}
SOURCES
${BUN_JAVASCRIPT_SOURCES}
${BUN_JAVASCRIPT_CODEGEN_SOURCES}
register_command(
TARGET
bun-js-modules
COMMENT
"Generating JavaScript modules"
COMMAND
${BUN_EXECUTABLE}
run
${BUN_JAVASCRIPT_CODEGEN_SCRIPT}
OUTPUTS
${BUN_JAVASCRIPT_OUTPUTS}
)
endif()
--debug=${DEBUG}
${BUILD_PATH}
SOURCES
${BUN_JAVASCRIPT_SOURCES}
${BUN_JAVASCRIPT_CODEGEN_SOURCES}
${BUN_JAVASCRIPT_CODEGEN_SCRIPT}
OUTPUTS
${BUN_JAVASCRIPT_OUTPUTS}
)
set(BUN_BAKE_RUNTIME_CODEGEN_SCRIPT ${CWD}/src/codegen/bake-codegen.ts)
@@ -411,7 +392,6 @@ register_command(
"Bundling Bake Runtime"
COMMAND
${BUN_EXECUTABLE}
${BUN_FLAGS}
run
${BUN_BAKE_RUNTIME_CODEGEN_SCRIPT}
--debug=${DEBUG}
@@ -435,7 +415,7 @@ string(REPLACE ";" "," BUN_BINDGENV2_SOURCES_COMMA_SEPARATED
"${BUN_BINDGENV2_SOURCES}")
execute_process(
COMMAND ${BUN_EXECUTABLE} ${BUN_FLAGS} run ${BUN_BINDGENV2_SCRIPT}
COMMAND ${BUN_EXECUTABLE} run ${BUN_BINDGENV2_SCRIPT}
--command=list-outputs
--sources=${BUN_BINDGENV2_SOURCES_COMMA_SEPARATED}
--codegen-path=${CODEGEN_PATH}
@@ -458,7 +438,7 @@ register_command(
COMMENT
"Generating bindings (v2)"
COMMAND
${BUN_EXECUTABLE} ${BUN_FLAGS} run ${BUN_BINDGENV2_SCRIPT}
${BUN_EXECUTABLE} run ${BUN_BINDGENV2_SCRIPT}
--command=generate
--codegen-path=${CODEGEN_PATH}
--sources=${BUN_BINDGENV2_SOURCES_COMMA_SEPARATED}
@@ -489,7 +469,6 @@ register_command(
"Processing \".bind.ts\" files"
COMMAND
${BUN_EXECUTABLE}
${BUN_FLAGS}
run
${BUN_BINDGEN_SCRIPT}
--debug=${DEBUG}
@@ -522,7 +501,6 @@ register_command(
"Generating JSSink.{cpp,h}"
COMMAND
${BUN_EXECUTABLE}
${BUN_FLAGS}
run
${BUN_JS_SINK_SCRIPT}
${CODEGEN_PATH}
@@ -595,7 +573,6 @@ foreach(i RANGE 0 ${BUN_OBJECT_LUT_SOURCES_MAX_INDEX})
${BUN_OBJECT_LUT_SOURCE}
COMMAND
${BUN_EXECUTABLE}
${BUN_FLAGS}
run
${BUN_OBJECT_LUT_SCRIPT}
${BUN_OBJECT_LUT_SOURCE}
@@ -679,10 +656,6 @@ endif()
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm|ARM|arm64|ARM64|aarch64|AARCH64")
if(APPLE)
set(ZIG_CPU "apple_m1")
elseif(WIN32)
# Windows ARM64: use a specific CPU with NEON support
# Zig running under x64 emulation would detect wrong CPU with "native"
set(ZIG_CPU "cortex_a76")
else()
set(ZIG_CPU "native")
endif()
@@ -721,7 +694,6 @@ register_command(
-Denable_asan=$<IF:$<BOOL:${ENABLE_ZIG_ASAN}>,true,false>
-Denable_fuzzilli=$<IF:$<BOOL:${ENABLE_FUZZILLI}>,true,false>
-Denable_valgrind=$<IF:$<BOOL:${ENABLE_VALGRIND}>,true,false>
-Denable_tinycc=$<IF:$<BOOL:${ENABLE_TINYCC}>,true,false>
-Duse_mimalloc=$<IF:$<BOOL:${USE_MIMALLOC_AS_DEFAULT_ALLOCATOR}>,true,false>
-Dllvm_codegen_threads=${LLVM_ZIG_CODEGEN_THREADS}
-Dversion=${VERSION}
@@ -1239,7 +1211,7 @@ if(BUN_LINK_ONLY)
WEBKIT_DOWNLOAD_URL=${WEBKIT_DOWNLOAD_URL}
WEBKIT_VERSION=${WEBKIT_VERSION}
ZIG_COMMIT=${ZIG_COMMIT}
${BUN_EXECUTABLE} ${BUN_FLAGS} ${CWD}/scripts/create-link-metadata.mjs ${BUILD_PATH} ${bun}
${BUN_EXECUTABLE} ${CWD}/scripts/create-link-metadata.mjs ${BUILD_PATH} ${bun}
SOURCES
${BUN_ZIG_OUTPUT}
${BUN_CPP_OUTPUT}
@@ -1338,9 +1310,6 @@ if(WIN32)
wsock32 # ws2_32 required by TransmitFile aka sendfile on windows
delayimp.lib
)
# Required for static ICU linkage - without this, ICU headers expect DLL linkage
# which causes ABI mismatch and crashes (STATUS_STACK_BUFFER_OVERRUN)
target_compile_definitions(${bun} PRIVATE U_STATIC_IMPLEMENTATION)
endif()
# --- Packaging ---

View File

@@ -20,15 +20,6 @@ set(HIGHWAY_CMAKE_ARGS
-DHWY_ENABLE_INSTALL=OFF
)
# On Windows ARM64 with clang-cl, the __ARM_NEON macro isn't defined by default
# but NEON intrinsics are supported. Define it so Highway can detect NEON support.
if(WIN32 AND CMAKE_SYSTEM_PROCESSOR MATCHES "ARM64|aarch64|AARCH64")
list(APPEND HIGHWAY_CMAKE_ARGS
-DCMAKE_C_FLAGS=-D__ARM_NEON=1
-DCMAKE_CXX_FLAGS=-D__ARM_NEON=1
)
endif()
register_cmake_command(
TARGET
highway

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
cloudflare/lol-html
COMMIT
e9e16dca48dd4a8ffbc77642bc4be60407585f11
d64457d9ff0143deef025d5df7e8586092b9afb7
)
set(LOLHTML_CWD ${VENDOR_PATH}/lolhtml/c-api)
@@ -33,37 +33,6 @@ if (NOT WIN32)
set(RUSTFLAGS "-Cpanic=abort-Cdebuginfo=0-Cforce-unwind-tables=no-Copt-level=s")
endif()
# On Windows, ensure MSVC link.exe is used instead of Git's link.exe
set(LOLHTML_ENV
CARGO_TERM_COLOR=always
CARGO_TERM_VERBOSE=true
CARGO_TERM_DIAGNOSTIC=true
CARGO_ENCODED_RUSTFLAGS=${RUSTFLAGS}
CARGO_HOME=${CARGO_HOME}
RUSTUP_HOME=${RUSTUP_HOME}
)
if(WIN32)
# On Windows, tell Rust to use MSVC link.exe directly via the target-specific linker env var.
# This avoids Git's /usr/bin/link being found first in PATH.
# Find the MSVC link.exe from Visual Studio installation
file(GLOB MSVC_VERSIONS "C:/Program Files/Microsoft Visual Studio/2022/*/VC/Tools/MSVC/*")
if(MSVC_VERSIONS)
list(GET MSVC_VERSIONS -1 MSVC_LATEST) # Get the latest version
if(CMAKE_SYSTEM_PROCESSOR MATCHES "ARM64|aarch64")
set(MSVC_LINK_PATH "${MSVC_LATEST}/bin/HostARM64/arm64/link.exe")
set(CARGO_LINKER_VAR "CARGO_TARGET_AARCH64_PC_WINDOWS_MSVC_LINKER")
else()
set(MSVC_LINK_PATH "${MSVC_LATEST}/bin/Hostx64/x64/link.exe")
set(CARGO_LINKER_VAR "CARGO_TARGET_X86_64_PC_WINDOWS_MSVC_LINKER")
endif()
if(EXISTS "${MSVC_LINK_PATH}")
list(APPEND LOLHTML_ENV "${CARGO_LINKER_VAR}=${MSVC_LINK_PATH}")
message(STATUS "lolhtml: Using MSVC link.exe: ${MSVC_LINK_PATH}")
endif()
endif()
endif()
register_command(
TARGET
lolhtml
@@ -76,7 +45,12 @@ register_command(
ARTIFACTS
${LOLHTML_LIBRARY}
ENVIRONMENT
${LOLHTML_ENV}
CARGO_TERM_COLOR=always
CARGO_TERM_VERBOSE=true
CARGO_TERM_DIAGNOSTIC=true
CARGO_ENCODED_RUSTFLAGS=${RUSTFLAGS}
CARGO_HOME=${CARGO_HOME}
RUSTUP_HOME=${RUSTUP_HOME}
)
target_link_libraries(${bun} PRIVATE ${LOLHTML_LIBRARY})

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
oven-sh/mimalloc
COMMIT
989115cefb6915baa13788cb8252d83aac5330ad
1beadf9651a7bfdec6b5367c380ecc3fe1c40d1a
)
set(MIMALLOC_CMAKE_ARGS
@@ -14,7 +14,7 @@ set(MIMALLOC_CMAKE_ARGS
-DMI_BUILD_TESTS=OFF
-DMI_USE_CXX=ON
-DMI_SKIP_COLLECT_ON_EXIT=ON
# ```
# mimalloc_allow_large_os_pages=0 BUN_PORT=3004 mem bun http-hello.js
# Started development server: http://localhost:3004
@@ -51,7 +51,7 @@ if(ENABLE_ASAN)
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_DEBUG_UBSAN=ON)
elseif(APPLE OR LINUX)
if(APPLE)
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OVERRIDE=OFF)
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OVERRIDE=OFF)
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_ZONE=OFF)
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_INTERPOSE=OFF)
else()
@@ -77,9 +77,9 @@ endif()
if(WIN32)
if(DEBUG)
set(MIMALLOC_LIBRARY mimalloc-debug)
set(MIMALLOC_LIBRARY mimalloc-static-debug)
else()
set(MIMALLOC_LIBRARY mimalloc)
set(MIMALLOC_LIBRARY mimalloc-static)
endif()
elseif(DEBUG)
if (ENABLE_ASAN)

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
oven-sh/tinycc
COMMIT
12882eee073cfe5c7621bcfadf679e1372d4537b
29985a3b59898861442fa3b43f663fc1af2591d7
)
register_cmake_command(

View File

@@ -1,20 +0,0 @@
set(CMAKE_SYSTEM_NAME Windows)
set(CMAKE_SYSTEM_PROCESSOR aarch64)
set(CMAKE_C_COMPILER_WORKS ON)
set(CMAKE_CXX_COMPILER_WORKS ON)
# Force ARM64 architecture ID - this is what CMake uses to determine /machine: flag
set(MSVC_C_ARCHITECTURE_ID ARM64 CACHE INTERNAL "")
set(MSVC_CXX_ARCHITECTURE_ID ARM64 CACHE INTERNAL "")
# CMake 4.0+ policy CMP0197 controls how MSVC machine type flags are handled
set(CMAKE_POLICY_DEFAULT_CMP0197 NEW CACHE INTERNAL "")
# Clear any inherited static linker flags that might have wrong machine types
set(CMAKE_STATIC_LINKER_FLAGS "" CACHE STRING "" FORCE)
# Use wrapper script for llvm-lib that strips /machine:x64 flags
# This works around CMake 4.1.0 bug where both ARM64 and x64 machine flags are added
get_filename_component(_TOOLCHAIN_DIR "${CMAKE_CURRENT_LIST_DIR}" DIRECTORY)
set(CMAKE_AR "${_TOOLCHAIN_DIR}/scripts/llvm-lib-wrapper.bat" CACHE FILEPATH "" FORCE)

View File

@@ -6,8 +6,7 @@ endif()
optionx(BUILDKITE_ORGANIZATION_SLUG STRING "The organization slug to use on Buildkite" DEFAULT "bun")
optionx(BUILDKITE_PIPELINE_SLUG STRING "The pipeline slug to use on Buildkite" DEFAULT "bun")
optionx(BUILDKITE_BUILD_ID STRING "The build ID (UUID) to use on Buildkite")
optionx(BUILDKITE_BUILD_NUMBER STRING "The build number to use on Buildkite")
optionx(BUILDKITE_BUILD_ID STRING "The build ID to use on Buildkite")
optionx(BUILDKITE_GROUP_ID STRING "The group ID to use on Buildkite")
if(ENABLE_BASELINE)
@@ -33,13 +32,7 @@ if(NOT BUILDKITE_BUILD_ID)
return()
endif()
# Use BUILDKITE_BUILD_NUMBER for the URL if available, as the UUID format causes a 302 redirect
# that CMake's file(DOWNLOAD) doesn't follow, resulting in empty response.
if(BUILDKITE_BUILD_NUMBER)
setx(BUILDKITE_BUILD_URL https://buildkite.com/${BUILDKITE_ORGANIZATION_SLUG}/${BUILDKITE_PIPELINE_SLUG}/builds/${BUILDKITE_BUILD_NUMBER})
else()
setx(BUILDKITE_BUILD_URL https://buildkite.com/${BUILDKITE_ORGANIZATION_SLUG}/${BUILDKITE_PIPELINE_SLUG}/builds/${BUILDKITE_BUILD_ID})
endif()
setx(BUILDKITE_BUILD_URL https://buildkite.com/${BUILDKITE_ORGANIZATION_SLUG}/${BUILDKITE_PIPELINE_SLUG}/builds/${BUILDKITE_BUILD_ID})
setx(BUILDKITE_BUILD_PATH ${BUILDKITE_BUILDS_PATH}/builds/${BUILDKITE_BUILD_ID})
file(
@@ -55,16 +48,8 @@ if(NOT BUILDKITE_BUILD_STATUS EQUAL 0)
endif()
file(READ ${BUILDKITE_BUILD_PATH}/build.json BUILDKITE_BUILD)
# CMake's string(JSON ...) interprets escape sequences like \n, \r, \t.
# We need to escape these specific sequences while preserving valid JSON escapes like \" and \\.
# Strategy: Use a unique placeholder to protect \\ sequences, escape \n/\r/\t, then restore \\.
# This prevents \\n (literal backslash + n) from being corrupted to \\\n.
set(BKSLASH_PLACEHOLDER "___BKSLASH_PLACEHOLDER_7f3a9b2c___")
string(REPLACE "\\\\" "${BKSLASH_PLACEHOLDER}" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
string(REPLACE "\\n" "\\\\n" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
string(REPLACE "\\r" "\\\\r" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
string(REPLACE "\\t" "\\\\t" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
string(REPLACE "${BKSLASH_PLACEHOLDER}" "\\\\" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
# Escape backslashes so CMake doesn't interpret JSON escape sequences (e.g., \n in commit messages)
string(REPLACE "\\" "\\\\" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
string(JSON BUILDKITE_BUILD_UUID GET ${BUILDKITE_BUILD} id)
string(JSON BUILDKITE_JOBS GET ${BUILDKITE_BUILD} jobs)

View File

@@ -17,14 +17,6 @@ if (NOT CI)
set(BUN_EXECUTABLE ${BUN_EXECUTABLE} CACHE FILEPATH "Bun executable" FORCE)
endif()
# On Windows ARM64, we need to add --smol flag to avoid crashes when running
# x64 bun under WoW64 emulation
if(WIN32 AND ARCH STREQUAL "aarch64")
set(BUN_FLAGS "--smol" CACHE STRING "Extra flags for bun executable")
else()
set(BUN_FLAGS "" CACHE STRING "Extra flags for bun executable")
endif()
# If this is not set, some advanced features are not checked.
# https://github.com/oven-sh/bun/blob/cd7f6a1589db7f1e39dc4e3f4a17234afbe7826c/src/bun.js/javascript.zig#L1069-L1072
setenv(BUN_GARBAGE_COLLECTOR_LEVEL 1)

View File

@@ -12,13 +12,7 @@ if(NOT ENABLE_LLVM)
return()
endif()
# LLVM 21 is required for Windows ARM64 (first version with ARM64 Windows builds)
# Other platforms use LLVM 19.1.7
if(WIN32 AND CMAKE_SYSTEM_PROCESSOR MATCHES "ARM64|aarch64|AARCH64")
set(DEFAULT_LLVM_VERSION "21.1.8")
else()
set(DEFAULT_LLVM_VERSION "19.1.7")
endif()
set(DEFAULT_LLVM_VERSION "19.1.7")
optionx(LLVM_VERSION STRING "The version of LLVM to use" DEFAULT ${DEFAULT_LLVM_VERSION})

View File

@@ -2,14 +2,10 @@ option(WEBKIT_VERSION "The version of WebKit to use")
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
if(NOT WEBKIT_VERSION)
set(WEBKIT_VERSION daf95b4b4574799ff22c8c4effd0dc6e864968a5)
set(WEBKIT_VERSION 863778130931e0081a688f48e8479b8ee61b9507)
endif()
# Use preview build URL for Windows ARM64 until the fix is merged to main
set(WEBKIT_PREVIEW_PR 140)
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)
string(SUBSTRING ${WEBKIT_VERSION} 0 8 WEBKIT_VERSION_SHORT)
if(WEBKIT_LOCAL)
set(DEFAULT_WEBKIT_PATH ${VENDOR_PATH}/WebKit/WebKitBuild/${CMAKE_BUILD_TYPE})
@@ -37,25 +33,9 @@ if(WEBKIT_LOCAL)
${WEBKIT_PATH}/JavaScriptCore/PrivateHeaders
${WEBKIT_PATH}/bmalloc/Headers
${WEBKIT_PATH}/WTF/Headers
${WEBKIT_PATH}/JavaScriptCore/PrivateHeaders/JavaScriptCore
${WEBKIT_PATH}/JavaScriptCore/DerivedSources/inspector
${WEBKIT_PATH}/JavaScriptCore/PrivateHeaders/JavaScriptCore
)
# On Windows, add ICU include path from vcpkg
if(WIN32)
# Auto-detect vcpkg triplet
set(VCPKG_ARM64_PATH ${VENDOR_PATH}/WebKit/vcpkg_installed/arm64-windows-static)
set(VCPKG_X64_PATH ${VENDOR_PATH}/WebKit/vcpkg_installed/x64-windows-static)
if(EXISTS ${VCPKG_ARM64_PATH})
set(VCPKG_ICU_PATH ${VCPKG_ARM64_PATH})
else()
set(VCPKG_ICU_PATH ${VCPKG_X64_PATH})
endif()
if(EXISTS ${VCPKG_ICU_PATH}/include)
include_directories(${VCPKG_ICU_PATH}/include)
message(STATUS "Using ICU from vcpkg: ${VCPKG_ICU_PATH}/include")
endif()
endif()
endif()
# After this point, only prebuilt WebKit is supported
@@ -72,7 +52,7 @@ else()
message(FATAL_ERROR "Unsupported operating system: ${CMAKE_SYSTEM_NAME}")
endif()
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm64|ARM64|aarch64|AARCH64")
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm64|aarch64")
set(WEBKIT_ARCH "arm64")
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "amd64|x86_64|x64|AMD64")
set(WEBKIT_ARCH "amd64")
@@ -101,14 +81,7 @@ endif()
setx(WEBKIT_NAME bun-webkit-${WEBKIT_OS}-${WEBKIT_ARCH}${WEBKIT_SUFFIX})
set(WEBKIT_FILENAME ${WEBKIT_NAME}.tar.gz)
if(WEBKIT_VERSION MATCHES "^autobuild-")
set(WEBKIT_TAG ${WEBKIT_VERSION})
else()
set(WEBKIT_TAG autobuild-${WEBKIT_VERSION})
endif()
setx(WEBKIT_DOWNLOAD_URL https://github.com/oven-sh/WebKit/releases/download/${WEBKIT_TAG}/${WEBKIT_FILENAME})
setx(WEBKIT_DOWNLOAD_URL https://github.com/oven-sh/WebKit/releases/download/autobuild-${WEBKIT_VERSION}/${WEBKIT_FILENAME})
if(EXISTS ${WEBKIT_PATH}/package.json)
file(READ ${WEBKIT_PATH}/package.json WEBKIT_PACKAGE_JSON)

View File

@@ -1,4 +1,4 @@
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm64|ARM64|aarch64|AARCH64")
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm64|aarch64")
set(DEFAULT_ZIG_ARCH "aarch64")
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "amd64|x86_64|x64|AMD64")
set(DEFAULT_ZIG_ARCH "x86_64")

View File

@@ -35,8 +35,8 @@ end
set -l bun_install_boolean_flags yarn production optional development no-save dry-run force no-cache silent verbose global
set -l bun_install_boolean_flags_descriptions "Write a yarn.lock file (yarn v1)" "Don't install devDependencies" "Add dependency to optionalDependencies" "Add dependency to devDependencies" "Don't update package.json or save a lockfile" "Don't install anything" "Always request the latest versions from the registry & reinstall all dependencies" "Ignore manifest cache entirely" "Don't output anything" "Excessively verbose logging" "Use global folder"
set -l bun_builtin_cmds_without_run dev create help bun upgrade discord install remove add update init pm x
set -l bun_builtin_cmds_accepting_flags create help bun upgrade discord run init link unlink pm x update
set -l bun_builtin_cmds_without_run dev create help bun upgrade discord install remove add init pm x
set -l bun_builtin_cmds_accepting_flags create help bun upgrade discord run init link unlink pm x
function __bun_complete_bins_scripts --inherit-variable bun_builtin_cmds_without_run -d "Emit bun completions for bins and scripts"
# Do nothing if we already have a builtin subcommand,
@@ -148,14 +148,14 @@ complete -c bun \
for i in (seq (count $bun_install_boolean_flags))
complete -c bun \
-n "__fish_seen_subcommand_from install add remove update" -l "$bun_install_boolean_flags[$i]" -d "$bun_install_boolean_flags_descriptions[$i]"
-n "__fish_seen_subcommand_from install add remove" -l "$bun_install_boolean_flags[$i]" -d "$bun_install_boolean_flags_descriptions[$i]"
end
complete -c bun \
-n "__fish_seen_subcommand_from install add remove update" -l 'cwd' -d 'Change working directory'
-n "__fish_seen_subcommand_from install add remove" -l 'cwd' -d 'Change working directory'
complete -c bun \
-n "__fish_seen_subcommand_from install add remove update" -l 'cache-dir' -d 'Choose a cache directory (default: $HOME/.bun/install/cache)'
-n "__fish_seen_subcommand_from install add remove" -l 'cache-dir' -d 'Choose a cache directory (default: $HOME/.bun/install/cache)'
complete -c bun \
-n "__fish_seen_subcommand_from add" -d 'Popular' -a '(__fish__get_bun_packages)'
@@ -183,5 +183,4 @@ complete -c bun -n "__fish_use_subcommand" -a "unlink" -d "Unregister a local np
complete -c bun -n "__fish_use_subcommand" -a "pm" -d "Additional package management utilities" -f
complete -c bun -n "__fish_use_subcommand" -a "x" -d "Execute a package binary, installing if needed" -f
complete -c bun -n "__fish_use_subcommand" -a "outdated" -d "Display the latest versions of outdated dependencies" -f
complete -c bun -n "__fish_use_subcommand" -a "update" -d "Update dependencies to their latest versions" -f
complete -c bun -n "__fish_use_subcommand" -a "publish" -d "Publish your package from local to npm" -f

View File

@@ -121,7 +121,6 @@
"/runtime/file-io",
"/runtime/streams",
"/runtime/binary-data",
"/runtime/archive",
"/runtime/sql",
"/runtime/sqlite",
"/runtime/s3",
@@ -150,7 +149,6 @@
"/runtime/secrets",
"/runtime/console",
"/runtime/yaml",
"/runtime/jsonl",
"/runtime/html-rewriter",
"/runtime/hashing",
"/runtime/glob",

View File

@@ -26,6 +26,21 @@ The `bun` CLI contains a Node.js-compatible package manager designed to be a dra
</Note>
<Accordion title="For Linux users">
The recommended minimum Linux Kernel version is 5.6. If you're on Linux kernel 5.1 - 5.5, `bun install` will work, but HTTP requests will be slow due to a lack of support for io_uring's `connect()` operation.
If you're using Ubuntu 20.04, here's how to install a [newer kernel](https://wiki.ubuntu.com/Kernel/LTSEnablementStack):
```bash terminal icon="terminal"
# If this returns a version >= 5.6, you don't need to do anything
uname -r
# Install the official Ubuntu hardware enablement kernel
sudo apt install --install-recommends linux-generic-hwe-20.04
```
</Accordion>
To install all dependencies of a project:
```bash terminal icon="terminal"

View File

@@ -35,7 +35,7 @@ winget install "Visual Studio Community 2022" --override "--add Microsoft.Visual
After Visual Studio, you need the following:
- LLVM (19.1.7 for x64, 21.1.8 for ARM64)
- LLVM 19.1.7
- Go
- Rust
- NASM
@@ -47,35 +47,25 @@ After Visual Studio, you need the following:
[Scoop](https://scoop.sh) can be used to install these remaining tools easily.
```ps1 Scoop (x64)
```ps1 Scoop
irm https://get.scoop.sh | iex
scoop install nodejs-lts go rust nasm ruby perl ccache
# scoop seems to be buggy if you install llvm and the rest at the same time
scoop install llvm@19.1.7
```
For Windows ARM64, download LLVM 21.1.8 directly from GitHub releases (first version with ARM64 Windows builds):
```ps1 ARM64
# Download and install LLVM for ARM64
Invoke-WebRequest -Uri "https://github.com/llvm/llvm-project/releases/download/llvmorg-21.1.8/LLVM-21.1.8-woa64.exe" -OutFile "$env:TEMP\LLVM-21.1.8-woa64.exe"
Start-Process -FilePath "$env:TEMP\LLVM-21.1.8-woa64.exe" -ArgumentList "/S" -Wait
```
<Note>
Please do not use WinGet/other package manager for these, as you will likely install Strawberry Perl instead of a more
minimal installation of Perl. Strawberry Perl includes many other utilities that get installed into `$Env:PATH` that
will conflict with MSVC and break the build.
</Note>
If you intend on building WebKit locally (optional, x64 only), you should install these packages:
If you intend on building WebKit locally (optional), you should install these packages:
```ps1 Scoop
scoop install make cygwin python
```
<Note>Cygwin is not required for ARM64 builds as WebKit is provided as a pre-built binary.</Note>
From here on out, it is **expected you use a PowerShell Terminal with `.\scripts\vs-shell.ps1` sourced**. This script is available in the Bun repository and can be loaded by executing it:
```ps1

View File

@@ -1,452 +0,0 @@
---
title: Archive
description: Create and extract tar archives with Bun's fast native implementation
---
Bun provides a fast, native implementation for working with tar archives through `Bun.Archive`. It supports creating archives from in-memory data, extracting archives to disk, and reading archive contents without extraction.
## Quickstart
**Create an archive from files:**
```ts
const archive = new Bun.Archive({
"hello.txt": "Hello, World!",
"data.json": JSON.stringify({ foo: "bar" }),
"nested/file.txt": "Nested content",
});
// Write to disk
await Bun.write("bundle.tar", archive);
```
**Extract an archive:**
```ts
const tarball = await Bun.file("package.tar.gz").bytes();
const archive = new Bun.Archive(tarball);
const entryCount = await archive.extract("./output");
console.log(`Extracted ${entryCount} entries`);
```
**Read archive contents without extracting:**
```ts
const tarball = await Bun.file("package.tar.gz").bytes();
const archive = new Bun.Archive(tarball);
const files = await archive.files();
for (const [path, file] of files) {
console.log(`${path}: ${await file.text()}`);
}
```
## Creating Archives
Use `new Bun.Archive()` to create an archive from an object where keys are file paths and values are file contents. By default, archives are uncompressed:
```ts
// Creates an uncompressed tar archive (default)
const archive = new Bun.Archive({
"README.md": "# My Project",
"src/index.ts": "console.log('Hello');",
"package.json": JSON.stringify({ name: "my-project" }),
});
```
File contents can be:
- **Strings** - Text content
- **Blobs** - Binary data
- **ArrayBufferViews** (e.g., `Uint8Array`) - Raw bytes
- **ArrayBuffers** - Raw binary data
```ts
const data = "binary data";
const arrayBuffer = new ArrayBuffer(8);
const archive = new Bun.Archive({
"text.txt": "Plain text",
"blob.bin": new Blob([data]),
"bytes.bin": new Uint8Array([1, 2, 3, 4]),
"buffer.bin": arrayBuffer,
});
```
### Writing Archives to Disk
Use `Bun.write()` to write an archive to disk:
```ts
// Write uncompressed tar (default)
const archive = new Bun.Archive({
"file1.txt": "content1",
"file2.txt": "content2",
});
await Bun.write("output.tar", archive);
// Write gzipped tar
const compressed = new Bun.Archive({ "src/index.ts": "console.log('Hello');" }, { compress: "gzip" });
await Bun.write("output.tar.gz", compressed);
```
### Getting Archive Bytes
Get the archive data as bytes or a Blob:
```ts
const archive = new Bun.Archive({ "hello.txt": "Hello, World!" });
// As Uint8Array
const bytes = await archive.bytes();
// As Blob
const blob = await archive.blob();
// With gzip compression (set at construction)
const gzipped = new Bun.Archive({ "hello.txt": "Hello, World!" }, { compress: "gzip" });
const gzippedBytes = await gzipped.bytes();
const gzippedBlob = await gzipped.blob();
```
## Extracting Archives
### From Existing Archive Data
Create an archive from existing tar/tar.gz data:
```ts
// From a file
const tarball = await Bun.file("package.tar.gz").bytes();
const archiveFromFile = new Bun.Archive(tarball);
```
```ts
// From a fetch response
const response = await fetch("https://example.com/archive.tar.gz");
const archiveFromFetch = new Bun.Archive(await response.blob());
```
### Extracting to Disk
Use `.extract()` to write all files to a directory:
```ts
const tarball = await Bun.file("package.tar.gz").bytes();
const archive = new Bun.Archive(tarball);
const count = await archive.extract("./extracted");
console.log(`Extracted ${count} entries`);
```
The target directory is created automatically if it doesn't exist. Existing files are overwritten. The returned count includes files, directories, and symlinks (on POSIX systems).
**Note**: On Windows, symbolic links in archives are always skipped during extraction. Bun does not attempt to create them regardless of privilege level. On Linux and macOS, symlinks are extracted normally.
**Security note**: Bun.Archive validates paths during extraction, rejecting absolute paths (POSIX `/`, Windows drive letters like `C:\` or `C:/`, and UNC paths like `\\server\share`). Path traversal components (`..`) are normalized away (e.g., `dir/sub/../file` becomes `dir/file`) to prevent directory escape attacks.
### Filtering Extracted Files
Use glob patterns to extract only specific files. Patterns are matched against archive entry paths normalized to use forward slashes (`/`). Positive patterns specify what to include, and negative patterns (prefixed with `!`) specify what to exclude. Negative patterns are applied after positive patterns, so **using only negative patterns will match nothing** (you must include a positive pattern like `**` first):
```ts
const tarball = await Bun.file("package.tar.gz").bytes();
const archive = new Bun.Archive(tarball);
// Extract only TypeScript files
const tsCount = await archive.extract("./extracted", { glob: "**/*.ts" });
// Extract files from multiple directories
const multiCount = await archive.extract("./extracted", {
glob: ["src/**", "lib/**"],
});
```
Use negative patterns (prefixed with `!`) to exclude files. When mixing positive and negative patterns, entries must match at least one positive pattern and not match any negative pattern:
```ts
// Extract everything except node_modules
const distCount = await archive.extract("./extracted", {
glob: ["**", "!node_modules/**"],
});
// Extract source files but exclude tests
const srcCount = await archive.extract("./extracted", {
glob: ["src/**", "!**/*.test.ts", "!**/__tests__/**"],
});
```
## Reading Archive Contents
### Get All Files
Use `.files()` to get archive contents as a `Map` of `File` objects without extracting to disk. Unlike `extract()` which processes all entry types, `files()` returns only regular files (no directories):
```ts
const tarball = await Bun.file("package.tar.gz").bytes();
const archive = new Bun.Archive(tarball);
const files = await archive.files();
for (const [path, file] of files) {
console.log(`${path}: ${file.size} bytes`);
console.log(await file.text());
}
```
Each `File` object includes:
- `name` - The file path within the archive (always uses forward slashes `/` as separators)
- `size` - File size in bytes
- `lastModified` - Modification timestamp
- Standard `Blob` methods: `text()`, `arrayBuffer()`, `stream()`, etc.
**Note**: `files()` loads file contents into memory. For large archives, consider using `extract()` to write directly to disk instead.
### Error Handling
Archive operations can fail due to corrupted data, I/O errors, or invalid paths. Use try/catch to handle these cases:
```ts
try {
const tarball = await Bun.file("package.tar.gz").bytes();
const archive = new Bun.Archive(tarball);
const count = await archive.extract("./output");
console.log(`Extracted ${count} entries`);
} catch (e: unknown) {
if (e instanceof Error) {
const error = e as Error & { code?: string };
if (error.code === "EACCES") {
console.error("Permission denied");
} else if (error.code === "ENOSPC") {
console.error("Disk full");
} else {
console.error("Archive error:", error.message);
}
} else {
console.error("Archive error:", String(e));
}
}
```
Common error scenarios:
- **Corrupted/truncated archives** - `new Archive()` loads the archive data; errors may be deferred until read/extract operations
- **Permission denied** - `extract()` throws if the target directory is not writable
- **Disk full** - `extract()` throws if there's insufficient space
- **Invalid paths** - Operations throw for malformed file paths
The count returned by `extract()` includes all successfully written entries (files, directories, and symlinks on POSIX systems).
**Security note**: Bun.Archive automatically validates paths during extraction. Absolute paths (POSIX `/`, Windows drive letters, UNC paths) and unsafe symlink targets are rejected. Path traversal components (`..`) are normalized away to prevent directory escape.
For additional security with untrusted archives, you can enumerate and validate paths before extraction:
```ts
const archive = new Bun.Archive(untrustedData);
const files = await archive.files();
// Optional: Custom validation for additional checks
for (const [path] of files) {
// Example: Reject hidden files
if (path.startsWith(".") || path.includes("/.")) {
throw new Error(`Hidden file rejected: ${path}`);
}
// Example: Whitelist specific directories
if (!path.startsWith("src/") && !path.startsWith("lib/")) {
throw new Error(`Unexpected path: ${path}`);
}
}
// Extract to a controlled destination
await archive.extract("./safe-output");
```
When using `files()` with a glob pattern, an empty `Map` is returned if no files match:
```ts
const matches = await archive.files("*.nonexistent");
if (matches.size === 0) {
console.log("No matching files found");
}
```
### Filtering with Glob Patterns
Pass a glob pattern to filter which files are returned:
```ts
// Get only TypeScript files
const tsFiles = await archive.files("**/*.ts");
// Get files in src directory
const srcFiles = await archive.files("src/*");
// Get all JSON files (recursive)
const jsonFiles = await archive.files("**/*.json");
// Get multiple file types with array of patterns
const codeFiles = await archive.files(["**/*.ts", "**/*.js"]);
```
Supported glob patterns (subset of [Bun.Glob](/docs/api/glob) syntax):
- `*` - Match any characters except `/`
- `**` - Match any characters including `/`
- `?` - Match single character
- `[abc]` - Match character set
- `{a,b}` - Match alternatives
- `!pattern` - Exclude files matching pattern (negation). Must be combined with positive patterns; using only negative patterns matches nothing.
See [Bun.Glob](/docs/api/glob) for the full glob syntax including escaping and advanced patterns.
## Compression
Bun.Archive creates uncompressed tar archives by default. Use `{ compress: "gzip" }` to enable gzip compression:
```ts
// Default: uncompressed tar
const archive = new Bun.Archive({ "hello.txt": "Hello, World!" });
// Reading: automatically detects gzip
const gzippedTarball = await Bun.file("archive.tar.gz").bytes();
const readArchive = new Bun.Archive(gzippedTarball);
// Enable gzip compression
const compressed = new Bun.Archive({ "hello.txt": "Hello, World!" }, { compress: "gzip" });
// Gzip with custom level (1-12)
const maxCompression = new Bun.Archive({ "hello.txt": "Hello, World!" }, { compress: "gzip", level: 12 });
```
The options accept:
- No options or `undefined` - Uncompressed tar (default)
- `{ compress: "gzip" }` - Enable gzip compression at level 6
- `{ compress: "gzip", level: number }` - Gzip with custom level 1-12 (1 = fastest, 12 = smallest)
## Examples
### Bundle Project Files
```ts
import { Glob } from "bun";
// Collect source files
const files: Record<string, string> = {};
const glob = new Glob("src/**/*.ts");
for await (const path of glob.scan(".")) {
// Normalize path separators to forward slashes for cross-platform compatibility
const archivePath = path.replaceAll("\\", "/");
files[archivePath] = await Bun.file(path).text();
}
// Add package.json
files["package.json"] = await Bun.file("package.json").text();
// Create compressed archive and write to disk
const archive = new Bun.Archive(files, { compress: "gzip" });
await Bun.write("bundle.tar.gz", archive);
```
### Extract and Process npm Package
```ts
const response = await fetch("https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz");
const archive = new Bun.Archive(await response.blob());
// Get package.json
const files = await archive.files("package/package.json");
const packageJson = files.get("package/package.json");
if (packageJson) {
const pkg = JSON.parse(await packageJson.text());
console.log(`Package: ${pkg.name}@${pkg.version}`);
}
```
### Create Archive from Directory
```ts
import { readdir } from "node:fs/promises";
import { join } from "node:path";
async function archiveDirectory(dir: string, compress = false): Promise<Bun.Archive> {
const files: Record<string, Blob> = {};
async function walk(currentDir: string, prefix: string = "") {
const entries = await readdir(currentDir, { withFileTypes: true });
for (const entry of entries) {
const fullPath = join(currentDir, entry.name);
const archivePath = prefix ? `${prefix}/${entry.name}` : entry.name;
if (entry.isDirectory()) {
await walk(fullPath, archivePath);
} else {
files[archivePath] = Bun.file(fullPath);
}
}
}
await walk(dir);
return new Bun.Archive(files, compress ? { compress: "gzip" } : undefined);
}
const archive = await archiveDirectory("./my-project", true);
await Bun.write("my-project.tar.gz", archive);
```
## Reference
> **Note**: The following type signatures are simplified for documentation purposes. See [`packages/bun-types/bun.d.ts`](https://github.com/oven-sh/bun/blob/main/packages/bun-types/bun.d.ts) for the full type definitions.
```ts
type ArchiveInput =
| Record<string, string | Blob | Bun.ArrayBufferView | ArrayBufferLike>
| Blob
| Bun.ArrayBufferView
| ArrayBufferLike;
type ArchiveOptions = {
/** Compression algorithm. Currently only "gzip" is supported. */
compress?: "gzip";
/** Compression level 1-12 (default 6 when gzip is enabled). */
level?: number;
};
interface ArchiveExtractOptions {
/** Glob pattern(s) to filter extraction. Supports negative patterns with "!" prefix. */
glob?: string | readonly string[];
}
class Archive {
/**
* Create an Archive from input data
* @param data - Files to archive (as object) or existing archive data (as bytes/blob)
* @param options - Compression options. Uncompressed by default.
* Pass { compress: "gzip" } to enable compression.
*/
constructor(data: ArchiveInput, options?: ArchiveOptions);
/**
* Extract archive to a directory
* @returns Number of entries extracted (files, directories, and symlinks)
*/
extract(path: string, options?: ArchiveExtractOptions): Promise<number>;
/**
* Get archive as a Blob (uses compression setting from constructor)
*/
blob(): Promise<Blob>;
/**
* Get archive as a Uint8Array (uses compression setting from constructor)
*/
bytes(): Promise<Uint8Array<ArrayBuffer>>;
/**
* Get archive contents as File objects (regular files only, no directories)
*/
files(glob?: string | readonly string[]): Promise<Map<string, File>>;
}
```

View File

@@ -358,8 +358,6 @@ Bun represents [pointers](<https://en.wikipedia.org/wiki/Pointer_(computer_progr
**Why not `BigInt`?** `BigInt` is slower. JavaScript engines allocate a separate `BigInt` which means they can't fit into a regular JavaScript value. If you pass a `BigInt` to a function, it will be converted to a `number`
**Windows Note**: The Windows API type HANDLE does not represent a virtual address, and using `ptr` for it will _not_ work as expected. Use `u64` to safely represent HANDLE values.
</Accordion>
To convert from a `TypedArray` to a pointer:

View File

@@ -1,188 +0,0 @@
---
title: JSONL
description: Parse newline-delimited JSON (JSONL) with Bun's built-in streaming parser
---
Bun has built-in support for parsing [JSONL](https://jsonlines.org/) (newline-delimited JSON), where each line is a separate JSON value. The parser is implemented in C++ using JavaScriptCore's optimized JSON parser and supports streaming use cases.
```ts
const results = Bun.JSONL.parse('{"name":"Alice"}\n{"name":"Bob"}\n');
// [{ name: "Alice" }, { name: "Bob" }]
```
---
## `Bun.JSONL.parse()`
Parse a complete JSONL input and return an array of all parsed values.
```ts
import { JSONL } from "bun";
const input = '{"id":1,"name":"Alice"}\n{"id":2,"name":"Bob"}\n{"id":3,"name":"Charlie"}\n';
const records = JSONL.parse(input);
console.log(records);
// [
// { id: 1, name: "Alice" },
// { id: 2, name: "Bob" },
// { id: 3, name: "Charlie" }
// ]
```
Input can be a string or a `Uint8Array`:
```ts
const buffer = new TextEncoder().encode('{"a":1}\n{"b":2}\n');
const results = Bun.JSONL.parse(buffer);
// [{ a: 1 }, { b: 2 }]
```
When passed a `Uint8Array`, a UTF-8 BOM at the start of the buffer is automatically skipped.
### Error handling
If the input contains invalid JSON, `Bun.JSONL.parse()` throws a `SyntaxError`:
```ts
try {
Bun.JSONL.parse('{"valid":true}\n{invalid}\n');
} catch (error) {
console.error(error); // SyntaxError: Failed to parse JSONL
}
```
---
## `Bun.JSONL.parseChunk()`
For streaming scenarios, `parseChunk` parses as many complete values as possible from the input and reports how far it got. This is useful when receiving data incrementally (e.g., from a network stream) and you need to know where to resume parsing.
```ts
const chunk = '{"id":1}\n{"id":2}\n{"id":3';
const result = Bun.JSONL.parseChunk(chunk);
console.log(result.values); // [{ id: 1 }, { id: 2 }]
console.log(result.read); // 17 — characters consumed
console.log(result.done); // false — incomplete value remains
console.log(result.error); // null — no parse error
```
### Return value
`parseChunk` returns an object with four properties:
| Property | Type | Description |
| -------- | --------------------- | ----------------------------------------------------------------------- |
| `values` | `any[]` | Array of successfully parsed JSON values |
| `read` | `number` | Number of bytes (for `Uint8Array`) or characters (for strings) consumed |
| `done` | `boolean` | `true` if the entire input was consumed with no remaining data |
| `error` | `SyntaxError \| null` | Parse error, or `null` if no error occurred |
### Streaming example
Use `read` to slice off consumed input and carry forward the remainder:
```ts
let buffer = "";
async function processStream(stream: ReadableStream<string>) {
for await (const chunk of stream) {
buffer += chunk;
const result = Bun.JSONL.parseChunk(buffer);
for (const value of result.values) {
handleRecord(value);
}
// Keep only the unconsumed portion
buffer = buffer.slice(result.read);
}
// Handle any remaining data
if (buffer.length > 0) {
const final = Bun.JSONL.parseChunk(buffer);
for (const value of final.values) {
handleRecord(value);
}
if (final.error) {
console.error("Parse error in final chunk:", final.error.message);
}
}
}
```
### Byte offsets with `Uint8Array`
When the input is a `Uint8Array`, you can pass optional `start` and `end` byte offsets:
```ts
const buf = new TextEncoder().encode('{"a":1}\n{"b":2}\n{"c":3}\n');
// Parse starting from byte 8
const result = Bun.JSONL.parseChunk(buf, 8);
console.log(result.values); // [{ b: 2 }, { c: 3 }]
console.log(result.read); // 24
// Parse a specific range
const partial = Bun.JSONL.parseChunk(buf, 0, 8);
console.log(partial.values); // [{ a: 1 }]
```
The `read` value is always a byte offset into the original buffer, making it easy to use with `TypedArray.subarray()` for zero-copy streaming:
```ts
let buf = new Uint8Array(0);
async function processBinaryStream(stream: ReadableStream<Uint8Array>) {
for await (const chunk of stream) {
// Append chunk to buffer
const newBuf = new Uint8Array(buf.length + chunk.length);
newBuf.set(buf);
newBuf.set(chunk, buf.length);
buf = newBuf;
const result = Bun.JSONL.parseChunk(buf);
for (const value of result.values) {
handleRecord(value);
}
// Keep unconsumed bytes
buf = buf.slice(result.read);
}
}
```
### Error recovery
Unlike `parse()`, `parseChunk()` does not throw on invalid JSON. Instead, it returns the error in the `error` property, along with any values that were successfully parsed before the error:
```ts
const input = '{"a":1}\n{invalid}\n{"b":2}\n';
const result = Bun.JSONL.parseChunk(input);
console.log(result.values); // [{ a: 1 }] — values parsed before the error
console.log(result.error); // SyntaxError
console.log(result.read); // 7 — position up to last successful parse
```
---
## Supported value types
Each line can be any valid JSON value, not just objects:
```ts
const input = '42\n"hello"\ntrue\nnull\n[1,2,3]\n{"key":"value"}\n';
const values = Bun.JSONL.parse(input);
// [42, "hello", true, null, [1, 2, 3], { key: "value" }]
```
---
## Performance notes
- **ASCII fast path**: Pure ASCII input is parsed directly without copying, using a zero-allocation `StringView`.
- **UTF-8 support**: Non-ASCII `Uint8Array` input is decoded to UTF-16 using SIMD-accelerated conversion.
- **BOM handling**: UTF-8 BOM (`0xEF 0xBB 0xBF`) at the start of a `Uint8Array` is automatically skipped.
- **Pre-built object shape**: The result object from `parseChunk` uses a cached structure for fast property access.

View File

@@ -131,7 +131,6 @@
stdenv = pkgs.clangStdenv;
}) {
inherit packages;
hardeningDisable = [ "fortify" ];
shellHook = ''
# Set up build environment

View File

@@ -1,7 +1,7 @@
{
"private": true,
"name": "bun",
"version": "1.3.7",
"version": "1.3.6",
"workspaces": [
"./packages/bun-types",
"./packages/@types/bun"

View File

@@ -71,23 +71,8 @@ async function buildRootModule(dryRun?: boolean) {
js: "// Source code: https://github.com/oven-sh/bun/blob/main/packages/bun-release/scripts/npm-postinstall.ts",
},
});
// Create placeholder scripts that print an error message if postinstall hasn't run.
// On Unix, these are executed as shell scripts despite the .exe extension.
// On Windows, npm creates .cmd wrappers that would fail anyway if the binary isn't valid.
const placeholderScript = `#!/bin/sh
echo "Error: Bun's postinstall script was not run." >&2
echo "" >&2
echo "This occurs when using --ignore-scripts during installation, or when using a" >&2
echo "package manager like pnpm that does not run postinstall scripts by default." >&2
echo "" >&2
echo "To fix this, run the postinstall script manually:" >&2
echo " cd node_modules/bun && node install.js" >&2
echo "" >&2
echo "Or reinstall bun without the --ignore-scripts flag." >&2
exit 1
`;
write(join(cwd, "bin", "bun.exe"), placeholderScript);
write(join(cwd, "bin", "bunx.exe"), placeholderScript);
write(join(cwd, "bin", "bun.exe"), "");
write(join(cwd, "bin", "bunx.exe"), "");
write(
join(cwd, "bin", "README.txt"),
`The 'bun.exe' file is a placeholder for the binary file, which

View File

@@ -610,97 +610,6 @@ declare module "bun" {
*/
function stripANSI(input: string): string;
interface WrapAnsiOptions {
/**
* If `true`, break words in the middle if they don't fit on a line.
* If `false`, only break at word boundaries.
*
* @default false
*/
hard?: boolean;
/**
* If `true`, wrap at word boundaries when possible.
* If `false`, don't perform word wrapping (only wrap at explicit newlines).
*
* @default true
*/
wordWrap?: boolean;
/**
* If `true`, trim leading and trailing whitespace from each line.
* If `false`, preserve whitespace.
*
* @default true
*/
trim?: boolean;
/**
* When it's ambiguous and `true`, count ambiguous width characters as 1 character wide.
* If `false`, count them as 2 characters wide.
*
* @default true
*/
ambiguousIsNarrow?: boolean;
}
/**
* Wrap a string to fit within the specified column width, preserving ANSI escape codes.
*
* This function is designed to be compatible with the popular "wrap-ansi" NPM package.
*
* Features:
* - Preserves ANSI escape codes (colors, styles) across line breaks
* - Supports SGR codes (colors, bold, italic, etc.) and OSC 8 hyperlinks
* - Respects Unicode display widths (full-width characters, emoji)
* - Word wrapping at word boundaries (configurable)
*
* @category Utilities
*
* @param input The string to wrap
* @param columns The maximum column width
* @param options Wrapping options
* @returns The wrapped string
*
* @example
* ```ts
* import { wrapAnsi } from "bun";
*
* console.log(wrapAnsi("hello world", 5));
* // Output:
* // hello
* // world
*
* // Preserves ANSI colors across line breaks
* console.log(wrapAnsi("\u001b[31mhello world\u001b[0m", 5));
* // Output:
* // \u001b[31mhello\u001b[0m
* // \u001b[31mworld\u001b[0m
*
* // Hard wrap long words
* console.log(wrapAnsi("abcdefghij", 3, { hard: true }));
* // Output:
* // abc
* // def
* // ghi
* // j
* ```
*/
function wrapAnsi(
/**
* The string to wrap
*/
input: string,
/**
* The maximum column width
*/
columns: number,
/**
* Wrapping options
*/
options?: WrapAnsiOptions,
): string;
/**
* TOML related APIs
*/
@@ -743,101 +652,6 @@ declare module "bun" {
export function parse(input: string): unknown;
}
/**
* JSONL (JSON Lines) related APIs.
*
* Each line in the input is expected to be a valid JSON value separated by newlines.
*/
namespace JSONL {
/**
* The result of `Bun.JSONL.parseChunk`.
*/
interface ParseChunkResult {
/** The successfully parsed JSON values. */
values: unknown[];
/** How far into the input was consumed. When the input is a string, this is a character offset. When the input is a `TypedArray`, this is a byte offset. Use `input.slice(read)` or `input.subarray(read)` to get the unconsumed remainder. */
read: number;
/** `true` if all input was consumed successfully. `false` if the input ends with an incomplete value or a parse error occurred. */
done: boolean;
/** A `SyntaxError` if a parse error occurred, otherwise `null`. Values parsed before the error are still available in `values`. */
error: SyntaxError | null;
}
/**
* Parse a JSONL (JSON Lines) string into an array of JavaScript values.
*
* If a parse error occurs and no values were successfully parsed, throws
* a `SyntaxError`. If values were parsed before the error, returns the
* successfully parsed values without throwing.
*
* Incomplete trailing values (e.g. from a partial chunk) are silently
* ignored and not included in the result.
*
* When a `TypedArray` is passed, the bytes are parsed directly without
* copying if the content is ASCII.
*
* @param input The JSONL string or typed array to parse
* @returns An array of parsed values
* @throws {SyntaxError} If the input starts with invalid JSON and no values could be parsed
*
* @example
* ```js
* const items = Bun.JSONL.parse('{"a":1}\n{"b":2}\n');
* // [{ a: 1 }, { b: 2 }]
*
* // From a Uint8Array (zero-copy for ASCII):
* const buf = new TextEncoder().encode('{"a":1}\n{"b":2}\n');
* const items = Bun.JSONL.parse(buf);
* // [{ a: 1 }, { b: 2 }]
*
* // Partial results on error after valid values:
* const partial = Bun.JSONL.parse('{"a":1}\n{bad}\n');
* // [{ a: 1 }]
*
* // Throws when no valid values precede the error:
* Bun.JSONL.parse('{bad}\n'); // throws SyntaxError
* ```
*/
export function parse(input: string | NodeJS.TypedArray | DataView<ArrayBuffer> | ArrayBufferLike): unknown[];
/**
* Parse a JSONL chunk, designed for streaming use.
*
* Never throws on parse errors. Instead, returns whatever values were
* successfully parsed along with an `error` property containing the
* `SyntaxError` (or `null` on success). Use `read` to determine how
* much input was consumed and `done` to check if all input was parsed.
*
* When a `TypedArray` is passed, the bytes are parsed directly without
* copying if the content is ASCII. Optional `start` and `end` parameters
* allow slicing without copying, and `read` will be a byte offset into
* the original typed array.
*
* @param input The JSONL string or typed array to parse
* @param start Byte offset to start parsing from (typed array only, default: 0)
* @param end Byte offset to stop parsing at (typed array only, default: input.byteLength)
* @returns An object with `values`, `read`, `done`, and `error` properties
*
* @example
* ```js
* let buffer = new Uint8Array(0);
* for await (const chunk of stream) {
* buffer = Buffer.concat([buffer, chunk]);
* const { values, read, error } = Bun.JSONL.parseChunk(buffer);
* if (error) throw error;
* for (const value of values) handle(value);
* buffer = buffer.subarray(read);
* }
* ```
*/
export function parseChunk(input: string): ParseChunkResult;
export function parseChunk(
input: NodeJS.TypedArray | DataView<ArrayBuffer> | ArrayBufferLike,
start?: number,
end?: number,
): ParseChunkResult;
}
/**
* YAML related APIs
*/
@@ -936,7 +750,7 @@ declare module "bun" {
*/
function write(
destination: BunFile | S3File | PathLike,
input: Blob | NodeJS.TypedArray | ArrayBufferLike | string | BlobPart[] | Archive,
input: Blob | NodeJS.TypedArray | ArrayBufferLike | string | BlobPart[],
options?: {
/**
* If writing to a PathLike, set the permissions of the file.
@@ -1028,20 +842,6 @@ declare module "bun" {
destination: BunFile,
input: BunFile,
options?: {
/**
* Set the file permissions of the destination when it is created or overwritten.
*
* Must be a valid Unix permission mode (0 to 0o777 / 511 in decimal).
* If omitted, defaults to the system default based on umask (typically 0o644).
*
* @throws {RangeError} If the mode is outside the valid range (0 to 0o777).
*
* @example
* ```ts
* await Bun.write(Bun.file("./secret.txt"), Bun.file("./source.txt"), { mode: 0o600 });
* ```
*/
mode?: number;
/**
* If `true`, create the parent directory if it doesn't exist. By default, this is `true`.
*
@@ -1075,20 +875,6 @@ declare module "bun" {
destinationPath: PathLike,
input: BunFile,
options?: {
/**
* Set the file permissions of the destination when it is created or overwritten.
*
* Must be a valid Unix permission mode (0 to 0o777 / 511 in decimal).
* If omitted, defaults to the system default based on umask (typically 0o644).
*
* @throws {RangeError} If the mode is outside the valid range (0 to 0o777).
*
* @example
* ```ts
* await Bun.write("./secret.txt", Bun.file("./source.txt"), { mode: 0o600 });
* ```
*/
mode?: number;
/**
* If `true`, create the parent directory if it doesn't exist. By default, this is `true`.
*
@@ -1840,17 +1626,6 @@ declare module "bun" {
* @default "warn"
*/
logLevel?: "verbose" | "debug" | "info" | "warn" | "error";
/**
* Enable REPL mode transforms:
* - Wraps top-level inputs that appear to be object literals (inputs starting with '{' without trailing ';') in parentheses
* - Hoists all declarations as var for REPL persistence across vm.runInContext calls
* - Wraps last expression in { __proto__: null, value: expr } for result capture
* - Wraps code in sync/async IIFE to avoid parentheses around object literals
*
* @default false
*/
replMode?: boolean;
}
/**
@@ -1957,7 +1732,7 @@ declare module "bun" {
type Architecture = "x64" | "arm64";
type Libc = "glibc" | "musl";
type SIMD = "baseline" | "modern";
type CompileTarget =
type Target =
| `bun-darwin-${Architecture}`
| `bun-darwin-x64-${SIMD}`
| `bun-linux-${Architecture}`
@@ -2299,7 +2074,7 @@ declare module "bun" {
}
interface CompileBuildOptions {
target?: Bun.Build.CompileTarget;
target?: Bun.Build.Target;
execArgv?: string[];
executablePath?: string;
outfile?: string;
@@ -2381,7 +2156,7 @@ declare module "bun" {
* });
* ```
*/
compile: boolean | Bun.Build.CompileTarget | CompileBuildOptions;
compile: boolean | Bun.Build.Target | CompileBuildOptions;
/**
* Splitting is not currently supported with `.compile`
@@ -3515,29 +3290,16 @@ declare module "bun" {
type WebSocketOptionsTLS = {
/**
* Options for the TLS connection.
*
* Supports full TLS configuration including custom CA certificates,
* client certificates, and other TLS settings (same as fetch).
*
* @example
* ```ts
* // Using BunFile for certificates
* const ws = new WebSocket("wss://example.com", {
* tls: {
* ca: Bun.file("./ca.pem")
* }
* });
*
* // Using Buffer
* const ws = new WebSocket("wss://example.com", {
* tls: {
* ca: fs.readFileSync("./ca.pem")
* }
* });
* ```
* Options for the TLS connection
*/
tls?: TLSOptions;
tls?: {
/**
* Whether to reject the connection if the certificate is not valid
*
* @default true
*/
rejectUnauthorized?: boolean;
};
};
type WebSocketOptionsHeaders = {
@@ -3547,57 +3309,10 @@ declare module "bun" {
headers?: import("node:http").OutgoingHttpHeaders;
};
type WebSocketOptionsProxy = {
/**
* HTTP proxy to use for the WebSocket connection.
*
* Can be a string URL or an object with `url` and optional `headers`.
*
* @example
* ```ts
* // String format
* const ws = new WebSocket("wss://example.com", {
* proxy: "http://proxy.example.com:8080"
* });
*
* // With credentials
* const ws = new WebSocket("wss://example.com", {
* proxy: "http://user:pass@proxy.example.com:8080"
* });
*
* // Object format with custom headers
* const ws = new WebSocket("wss://example.com", {
* proxy: {
* url: "http://proxy.example.com:8080",
* headers: {
* "Proxy-Authorization": "Bearer token"
* }
* }
* });
* ```
*/
proxy?:
| string
| {
/**
* The proxy URL (http:// or https://)
*/
url: string;
/**
* Custom headers to send to the proxy server.
* Supports plain objects or Headers class instances.
*/
headers?: import("node:http").OutgoingHttpHeaders | Headers;
};
};
/**
* Constructor options for the `Bun.WebSocket` client
*/
type WebSocketOptions = WebSocketOptionsProtocolsOrProtocol &
WebSocketOptionsTLS &
WebSocketOptionsHeaders &
WebSocketOptionsProxy;
type WebSocketOptions = WebSocketOptionsProtocolsOrProtocol & WebSocketOptionsTLS & WebSocketOptionsHeaders;
interface WebSocketEventMap {
close: CloseEvent;
@@ -7162,356 +6877,6 @@ declare module "bun" {
match(str: string): boolean;
}
/**
* Input data for creating an archive. Can be:
* - An object mapping paths to file contents (string, Blob, TypedArray, or ArrayBuffer)
* - A Blob containing existing archive data
* - A TypedArray or ArrayBuffer containing existing archive data
*/
type ArchiveInput = Record<string, BlobPart> | Blob | ArrayBufferView | ArrayBufferLike;
/**
* Compression format for archive output.
* Currently only `"gzip"` is supported.
*/
type ArchiveCompression = "gzip";
/**
* Options for creating an Archive instance.
*
* By default, archives are not compressed. Use `{ compress: "gzip" }` to enable compression.
*
* @example
* ```ts
* // No compression (default)
* new Bun.Archive(data);
*
* // Enable gzip with default level (6)
* new Bun.Archive(data, { compress: "gzip" });
*
* // Specify compression level
* new Bun.Archive(data, { compress: "gzip", level: 9 });
* ```
*/
interface ArchiveOptions {
/**
* Compression algorithm to use.
* Currently only "gzip" is supported.
* If not specified, no compression is applied.
*/
compress?: ArchiveCompression;
/**
* Compression level (1-12). Only applies when `compress` is set.
* - 1: Fastest compression, lowest ratio
* - 6: Default balance of speed and ratio
* - 12: Best compression ratio, slowest
*
* @default 6
*/
level?: number;
}
/**
* Options for extracting archive contents.
*/
interface ArchiveExtractOptions {
/**
* Glob pattern(s) to filter which entries are extracted.
* Uses the same syntax as {@link Bun.Glob}, including support for wildcards (`*`, `**`),
* character classes (`[abc]`), alternation (`{a,b}`), and negation (`!pattern`).
*
* Patterns are matched against archive entry paths normalized to use forward slashes (`/`),
* regardless of the host operating system. Always write patterns using `/` as the separator.
*
* - Positive patterns: Only entries matching at least one pattern will be extracted.
* - Negative patterns (prefixed with `!`): Entries matching these patterns will be excluded.
* Negative patterns are applied after positive patterns.
*
* If not specified, all entries are extracted.
*
* @example
* ```ts
* // Extract only TypeScript files
* await archive.extract("./out", { glob: "**" + "/*.ts" });
*
* // Extract files from multiple directories
* await archive.extract("./out", { glob: ["src/**", "lib/**"] });
*
* // Exclude node_modules using negative pattern
* await archive.extract("./out", { glob: ["**", "!node_modules/**"] });
*
* // Extract source files but exclude tests
* await archive.extract("./out", { glob: ["src/**", "!**" + "/*.test.ts"] });
* ```
*/
glob?: string | readonly string[];
}
/**
* A class for creating and extracting tar archives with optional gzip compression.
*
* `Bun.Archive` provides a fast, native implementation for working with tar archives.
* It supports creating archives from in-memory data or extracting existing archives
* to disk or memory.
*
* @example
* **Create an archive from an object:**
* ```ts
* const archive = new Bun.Archive({
* "hello.txt": "Hello, World!",
* "data.json": JSON.stringify({ foo: "bar" }),
* "binary.bin": new Uint8Array([1, 2, 3, 4]),
* });
* ```
*
* @example
* **Create a gzipped archive:**
* ```ts
* const archive = new Bun.Archive({
* "hello.txt": "Hello, World!",
* }, { compress: "gzip" });
*
* // Or with a specific compression level (1-12)
* const archive = new Bun.Archive(data, { compress: "gzip", level: 9 });
* ```
*
* @example
* **Extract an archive to disk:**
* ```ts
* const archive = new Bun.Archive(tarballBytes);
* const entryCount = await archive.extract("./output");
* console.log(`Extracted ${entryCount} entries`);
* ```
*
* @example
* **Get archive contents as a Map of File objects:**
* ```ts
* const archive = new Bun.Archive(tarballBytes);
* const entries = await archive.files();
* for (const [path, file] of entries) {
* console.log(path, await file.text());
* }
* ```
*
* @example
* **Write a gzipped archive directly to disk:**
* ```ts
* await Bun.Archive.write("bundle.tar.gz", {
* "src/index.ts": sourceCode,
* "package.json": packageJson,
* }, { compress: "gzip" });
* ```
*/
export class Archive {
/**
* Create an `Archive` instance from input data.
*
* By default, archives are not compressed. Use `{ compress: "gzip" }` to enable compression.
*
* @param data - The input data for the archive:
* - **Object**: Creates a new tarball with the object's keys as file paths and values as file contents
* - **Blob/TypedArray/ArrayBuffer**: Wraps existing archive data (tar or tar.gz)
* @param options - Optional archive options including compression settings.
* Defaults to no compression if omitted.
*
* @example
* **From an object (creates uncompressed tarball):**
* ```ts
* const archive = new Bun.Archive({
* "hello.txt": "Hello, World!",
* "nested/file.txt": "Nested content",
* });
* ```
*
* @example
* **With gzip compression:**
* ```ts
* const archive = new Bun.Archive(data, { compress: "gzip" });
* ```
*
* @example
* **With explicit gzip compression level:**
* ```ts
* const archive = new Bun.Archive(data, { compress: "gzip", level: 12 });
* ```
*
* @example
* **From existing archive data:**
* ```ts
* const response = await fetch("https://example.com/package.tar.gz");
* const archive = new Bun.Archive(await response.blob());
* ```
*/
constructor(data: ArchiveInput, options?: ArchiveOptions);
/**
* Create and write an archive directly to disk in one operation.
*
* This is more efficient than creating an archive and then writing it separately,
* as it streams the data directly to disk.
*
* @param path - The file path to write the archive to
* @param data - The input data for the archive (same as `new Archive()`)
* @param options - Optional archive options including compression settings
*
* @returns A promise that resolves when the write is complete
*
* @example
* **Write uncompressed tarball:**
* ```ts
* await Bun.Archive.write("output.tar", {
* "file1.txt": "content1",
* "file2.txt": "content2",
* });
* ```
*
* @example
* **Write gzipped tarball:**
* ```ts
* await Bun.Archive.write("output.tar.gz", files, { compress: "gzip" });
* ```
*/
static write(path: string, data: ArchiveInput | Archive, options?: ArchiveOptions): Promise<void>;
/**
* Extract the archive contents to a directory on disk.
*
* Creates the target directory and any necessary parent directories if they don't exist.
* Existing files will be overwritten.
*
* @param path - The directory path to extract to
* @param options - Optional extraction options
* @param options.glob - Glob pattern(s) to filter entries (positive patterns include, negative patterns starting with `!` exclude)
* @returns A promise that resolves with the number of entries extracted (files, directories, and symlinks)
*
* @example
* **Extract all entries:**
* ```ts
* const archive = new Bun.Archive(tarballBytes);
* const count = await archive.extract("./extracted");
* console.log(`Extracted ${count} entries`);
* ```
*
* @example
* **Extract only TypeScript files:**
* ```ts
* const count = await archive.extract("./src", { glob: "**" + "/*.ts" });
* ```
*
* @example
* **Extract everything except tests:**
* ```ts
* const count = await archive.extract("./dist", { glob: ["**", "!**" + "/*.test.*"] });
* ```
*
* @example
* **Extract source files but exclude tests:**
* ```ts
* const count = await archive.extract("./output", {
* glob: ["src/**", "lib/**", "!**" + "/*.test.ts", "!**" + "/__tests__/**"]
* });
* ```
*/
extract(path: string, options?: ArchiveExtractOptions): Promise<number>;
/**
* Get the archive contents as a `Blob`.
*
* Uses the compression settings specified when the Archive was created.
*
* @returns A promise that resolves with the archive data as a Blob
*
* @example
* **Get tarball as Blob:**
* ```ts
* const archive = new Bun.Archive(data);
* const blob = await archive.blob();
* ```
*
* @example
* **Get gzipped tarball as Blob:**
* ```ts
* const archive = new Bun.Archive(data, { compress: "gzip" });
* const gzippedBlob = await archive.blob();
* ```
*/
blob(): Promise<Blob>;
/**
* Get the archive contents as a `Uint8Array`.
*
* Uses the compression settings specified when the Archive was created.
*
* @returns A promise that resolves with the archive data as a Uint8Array
*
* @example
* **Get tarball bytes:**
* ```ts
* const archive = new Bun.Archive(data);
* const bytes = await archive.bytes();
* ```
*
* @example
* **Get gzipped tarball bytes:**
* ```ts
* const archive = new Bun.Archive(data, { compress: "gzip" });
* const gzippedBytes = await archive.bytes();
* ```
*/
bytes(): Promise<Uint8Array<ArrayBuffer>>;
/**
* Get the archive contents as a `Map` of `File` objects.
*
* Each file in the archive is returned as a `File` object with:
* - `name`: The file path within the archive
* - `lastModified`: The file's modification time from the archive
* - Standard Blob methods (`text()`, `arrayBuffer()`, `stream()`, etc.)
*
* Only regular files are included; directories are not returned.
* File contents are loaded into memory, so for large archives consider using `extract()` instead.
*
* @param glob - Optional glob pattern(s) to filter files. Supports the same syntax as {@link Bun.Glob},
* including negation patterns (prefixed with `!`). Patterns are matched against paths normalized
* to use forward slashes (`/`).
* @returns A promise that resolves with a Map where keys are file paths (always using forward slashes `/` as separators) and values are File objects
*
* @example
* **Get all files:**
* ```ts
* const entries = await archive.files();
* for (const [path, file] of entries) {
* console.log(`${path}: ${file.size} bytes`);
* }
* ```
*
* @example
* **Filter by glob pattern:**
* ```ts
* const tsFiles = await archive.files("**" + "/*.ts");
* const srcFiles = await archive.files(["src/**", "lib/**"]);
* ```
*
* @example
* **Exclude files with negative patterns:**
* ```ts
* // Get all source files except tests
* const srcFiles = await archive.files(["src/**", "!**" + "/*.test.ts"]);
* ```
*
* @example
* **Read file contents:**
* ```ts
* const entries = await archive.files();
* const readme = entries.get("README.md");
* if (readme) {
* console.log(await readme.text());
* }
* ```
*/
files(glob?: string | readonly string[]): Promise<Map<string, File>>;
}
/**
* Generate a UUIDv7, which is a sequential ID based on the current timestamp with a random component.
*

View File

@@ -11,9 +11,9 @@ declare module "bun" {
* If the file descriptor is not writable yet, the data is buffered.
*
* @param chunk The data to write
* @returns Number of bytes written or, if the write is pending, a Promise resolving to the number of bytes
* @returns Number of bytes written
*/
write(chunk: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer): number | Promise<number>;
write(chunk: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer): number;
/**
* Flush the internal buffer, committing the data to disk or the pipe.
*
@@ -78,9 +78,9 @@ declare module "bun" {
* If the network is not writable yet, the data is buffered.
*
* @param chunk The data to write
* @returns Number of bytes written or, if the write is pending, a Promise resolving to the number of bytes
* @returns Number of bytes written
*/
write(chunk: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer): number | Promise<number>;
write(chunk: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer): number;
/**
* Flush the internal buffer, committing the data to the network.
*
@@ -609,17 +609,7 @@ declare module "bun" {
* });
*/
write(
data:
| string
| ArrayBufferView
| ArrayBuffer
| SharedArrayBuffer
| Request
| Response
| BunFile
| S3File
| Blob
| Archive,
data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer | Request | Response | BunFile | S3File | Blob,
options?: S3Options,
): Promise<number>;
@@ -930,8 +920,7 @@ declare module "bun" {
| BunFile
| S3File
| Blob
| File
| Archive,
| File,
options?: S3Options,
): Promise<number>;
@@ -981,8 +970,7 @@ declare module "bun" {
| BunFile
| S3File
| Blob
| File
| Archive,
| File,
options?: S3Options,
): Promise<number>;

View File

@@ -315,15 +315,6 @@ int us_internal_ssl_socket_is_closed(struct us_internal_ssl_socket_t *s) {
return us_socket_is_closed(0, &s->s);
}
int us_internal_ssl_socket_is_handshake_finished(struct us_internal_ssl_socket_t *s) {
if (!s || !s->ssl) return 0;
return SSL_is_init_finished(s->ssl);
}
int us_internal_ssl_socket_handshake_callback_has_fired(struct us_internal_ssl_socket_t *s) {
if (!s) return 0;
return s->handshake_state == HANDSHAKE_COMPLETED;
}
void us_internal_trigger_handshake_callback_econnreset(struct us_internal_ssl_socket_t *s) {
struct us_internal_ssl_socket_context_t *context =

View File

@@ -439,8 +439,6 @@ void *us_internal_ssl_socket_ext(us_internal_ssl_socket_r s);
void *us_internal_connecting_ssl_socket_ext(struct us_connecting_socket_t *c);
int us_internal_ssl_socket_is_shut_down(us_internal_ssl_socket_r s);
int us_internal_ssl_socket_is_closed(us_internal_ssl_socket_r s);
int us_internal_ssl_socket_is_handshake_finished(us_internal_ssl_socket_r s);
int us_internal_ssl_socket_handshake_callback_has_fired(us_internal_ssl_socket_r s);
void us_internal_ssl_socket_shutdown(us_internal_ssl_socket_r s);
struct us_internal_ssl_socket_t *us_internal_ssl_socket_context_adopt_socket(

View File

@@ -457,12 +457,6 @@ int us_socket_is_shut_down(int ssl, us_socket_r s) nonnull_fn_decl;
/* Returns whether this socket has been closed. Only valid if memory has not yet been released. */
int us_socket_is_closed(int ssl, us_socket_r s) nonnull_fn_decl;
/* Returns 1 if the TLS handshake has completed, 0 otherwise. For non-SSL sockets, always returns 1. */
int us_socket_is_ssl_handshake_finished(int ssl, us_socket_r s) nonnull_fn_decl;
/* Returns 1 if the TLS handshake callback has been invoked, 0 otherwise. For non-SSL sockets, always returns 1. */
int us_socket_ssl_handshake_callback_has_fired(int ssl, us_socket_r s) nonnull_fn_decl;
/* Immediately closes the socket */
struct us_socket_t *us_socket_close(int ssl, us_socket_r s, int code, void *reason) __attribute__((nonnull(2)));

View File

@@ -128,26 +128,6 @@ int us_socket_is_closed(int ssl, struct us_socket_t *s) {
return s->flags.is_closed;
}
int us_socket_is_ssl_handshake_finished(int ssl, struct us_socket_t *s) {
#ifndef LIBUS_NO_SSL
if(ssl) {
return us_internal_ssl_socket_is_handshake_finished((struct us_internal_ssl_socket_t *) s);
}
#endif
// Non-SSL sockets are always "handshake finished"
return 1;
}
int us_socket_ssl_handshake_callback_has_fired(int ssl, struct us_socket_t *s) {
#ifndef LIBUS_NO_SSL
if(ssl) {
return us_internal_ssl_socket_handshake_callback_has_fired((struct us_internal_ssl_socket_t *) s);
}
#endif
// Non-SSL sockets are always "callback fired"
return 1;
}
int us_connecting_socket_is_closed(int ssl, struct us_connecting_socket_t *c) {
return c->closed;
}

View File

@@ -18,7 +18,7 @@
#pragma once
#ifndef UWS_HTTP_MAX_HEADERS_COUNT
#define UWS_HTTP_MAX_HEADERS_COUNT 200
#define UWS_HTTP_MAX_HEADERS_COUNT 100
#endif
// todo: HttpParser is in need of a few clean-ups and refactorings
@@ -30,7 +30,6 @@
#include <algorithm>
#include <climits>
#include <string_view>
#include <span>
#include <map>
#include "MoveOnlyFunction.h"
#include "ChunkedEncoding.h"
@@ -161,13 +160,6 @@ namespace uWS
std::map<std::string, unsigned short, std::less<>> *currentParameterOffsets = nullptr;
public:
/* Any data pipelined after the HTTP headers (before response).
* Used for Node.js compatibility: 'connect' and 'upgrade' events
* pass this as the 'head' Buffer parameter.
* WARNING: This points to data in the receive buffer and may be stack-allocated.
* Must be cloned before the request handler returns. */
std::span<const char> head;
bool isAncient()
{
return ancientHttp;
@@ -891,8 +883,6 @@ namespace uWS
/* If returned socket is not what we put in we need
* to break here as we either have upgraded to
* WebSockets or otherwise closed the socket. */
/* Store any remaining data as head for Node.js compat (connect/upgrade events) */
req->head = std::span<const char>(data, length);
void *returnedUser = requestHandler(user, req);
if (returnedUser != user) {
/* We are upgraded to WebSocket or otherwise broken */
@@ -938,13 +928,9 @@ namespace uWS
consumedTotal += emittable;
}
} else if(isConnectRequest) {
// This only serves to mark that the connect request read all headers
// and can start emitting data. Don't try to parse remaining data as HTTP -
// it's pipelined data that we've already captured in req->head.
// This only server to mark that the connect request read all headers
// and can starting emitting data
remainingStreamingBytes = STATE_IS_CHUNKED;
// Mark remaining data as consumed and break - it's not HTTP
consumedTotal += length;
break;
} else {
/* If we came here without a body; emit an empty data chunk to signal no data */
dataHandler(user, {}, true);

View File

@@ -39,7 +39,6 @@ add_compile_definitions(
CONFIG_TCC_PREDEFS
ONE_SOURCE=0
TCC_LIBTCC1="\\0"
CONFIG_TCC_BACKTRACE=0
)
if(APPLE)

View File

@@ -23,10 +23,7 @@ const OS_NAME = platform().toLowerCase();
const ARCH_NAME_RAW = arch();
const IS_MAC = OS_NAME === "darwin";
const IS_LINUX = OS_NAME === "linux";
const IS_WINDOWS = OS_NAME === "win32";
// On Windows, use PROCESSOR_ARCHITECTURE env var to get native arch (Bun may run under x64 emulation)
const NATIVE_ARCH = IS_WINDOWS ? (process.env.PROCESSOR_ARCHITECTURE || ARCH_NAME_RAW).toUpperCase() : ARCH_NAME_RAW;
const IS_ARM64 = NATIVE_ARCH === "ARM64" || NATIVE_ARCH === "AARCH64" || ARCH_NAME_RAW === "arm64";
const IS_ARM64 = ARCH_NAME_RAW === "arm64" || ARCH_NAME_RAW === "aarch64";
// Paths
const ROOT_DIR = resolve(import.meta.dir, "..");
@@ -36,54 +33,22 @@ const WEBKIT_RELEASE_DIR = join(WEBKIT_BUILD_DIR, "Release");
const WEBKIT_DEBUG_DIR = join(WEBKIT_BUILD_DIR, "Debug");
const WEBKIT_RELEASE_DIR_LTO = join(WEBKIT_BUILD_DIR, "ReleaseLTO");
// Windows ICU paths - use vcpkg static build
// Auto-detect triplet: prefer arm64 if it exists, otherwise x64
const VCPKG_ARM64_PATH = join(WEBKIT_DIR, "vcpkg_installed", "arm64-windows-static");
const VCPKG_X64_PATH = join(WEBKIT_DIR, "vcpkg_installed", "x64-windows-static");
const VCPKG_ROOT = existsSync(VCPKG_ARM64_PATH) ? VCPKG_ARM64_PATH : VCPKG_X64_PATH;
const ICU_INCLUDE_DIR = join(VCPKG_ROOT, "include");
// Get ICU library paths based on build config (debug uses 'd' suffix libraries)
function getICULibraryPaths(config: BuildConfig) {
const isDebug = config === "debug";
// vcpkg static ICU libraries: release in lib/, debug in debug/lib/ with 'd' suffix
const libDir = isDebug ? join(VCPKG_ROOT, "debug", "lib") : join(VCPKG_ROOT, "lib");
const suffix = isDebug ? "d" : "";
return {
ICU_LIBRARY: libDir,
ICU_DATA_LIBRARY: join(libDir, `sicudt${suffix}.lib`),
ICU_I18N_LIBRARY: join(libDir, `sicuin${suffix}.lib`),
ICU_UC_LIBRARY: join(libDir, `sicuuc${suffix}.lib`),
};
}
// Homebrew prefix detection
const HOMEBREW_PREFIX = IS_ARM64 ? "/opt/homebrew/" : "/usr/local/";
// Compiler detection
function findExecutable(names: string[]): string | null {
for (const name of names) {
const path = Bun.which(name);
if (path) return path;
const result = spawnSync("which", [name], { encoding: "utf8" });
if (result.status === 0) {
return result.stdout.trim();
}
}
return null;
}
// Detect ccache
const CCACHE = findExecutable(["ccache"]);
const HAS_CCACHE = CCACHE !== null;
// Configure compilers with ccache if available
// On Windows, use clang-cl for MSVC compatibility
const CC_BASE = IS_WINDOWS
? findExecutable(["clang-cl.exe", "clang-cl"]) || "clang-cl"
: findExecutable(["clang-19", "clang"]) || "clang";
const CXX_BASE = IS_WINDOWS
? findExecutable(["clang-cl.exe", "clang-cl"]) || "clang-cl"
: findExecutable(["clang++-19", "clang++"]) || "clang++";
const CC = HAS_CCACHE ? CCACHE : CC_BASE;
const CXX = HAS_CCACHE ? CCACHE : CXX_BASE;
const CC = findExecutable(["clang-19", "clang"]) || "clang";
const CXX = findExecutable(["clang++-19", "clang++"]) || "clang++";
// Build directory based on config
const getBuildDir = (config: BuildConfig) => {
@@ -98,7 +63,7 @@ const getBuildDir = (config: BuildConfig) => {
};
// Common CMake flags
const getCommonFlags = (config: BuildConfig) => {
const getCommonFlags = () => {
const flags = [
"-DPORT=JSCOnly",
"-DENABLE_STATIC_JSC=ON",
@@ -109,20 +74,10 @@ const getCommonFlags = (config: BuildConfig) => {
"-DENABLE_FTL_JIT=ON",
"-G",
"Ninja",
`-DCMAKE_C_COMPILER=${CC}`,
`-DCMAKE_CXX_COMPILER=${CXX}`,
];
// Configure compiler with ccache if available
if (HAS_CCACHE) {
flags.push(
`-DCMAKE_C_COMPILER_LAUNCHER=${CCACHE}`,
`-DCMAKE_CXX_COMPILER_LAUNCHER=${CCACHE}`,
`-DCMAKE_C_COMPILER=${CC_BASE}`,
`-DCMAKE_CXX_COMPILER=${CXX_BASE}`,
);
} else {
flags.push(`-DCMAKE_C_COMPILER=${CC}`, `-DCMAKE_CXX_COMPILER=${CXX}`);
}
if (IS_MAC) {
flags.push(
"-DENABLE_SINGLE_THREADED_VM_ENTRY_SCOPE=ON",
@@ -136,27 +91,6 @@ const getCommonFlags = (config: BuildConfig) => {
"-DUSE_VISIBILITY_ATTRIBUTE=1",
"-DENABLE_REMOTE_INSPECTOR=ON",
);
} else if (IS_WINDOWS) {
// Find lld-link for Windows builds
const lldLink = findExecutable(["lld-link.exe", "lld-link"]) || "lld-link";
// Get ICU library paths for this build config (debug uses 'd' suffix libraries)
const icuPaths = getICULibraryPaths(config);
flags.push(
"-DENABLE_REMOTE_INSPECTOR=ON",
"-DUSE_VISIBILITY_ATTRIBUTE=1",
"-DUSE_SYSTEM_MALLOC=ON",
`-DCMAKE_LINKER=${lldLink}`,
`-DICU_ROOT=${VCPKG_ROOT}`,
`-DICU_LIBRARY=${icuPaths.ICU_LIBRARY}`,
`-DICU_INCLUDE_DIR=${ICU_INCLUDE_DIR}`,
// Explicitly set ICU library paths to use vcpkg static libs (debug has 'd' suffix)
`-DICU_DATA_LIBRARY_RELEASE=${icuPaths.ICU_DATA_LIBRARY}`,
`-DICU_I18N_LIBRARY_RELEASE=${icuPaths.ICU_I18N_LIBRARY}`,
`-DICU_UC_LIBRARY_RELEASE=${icuPaths.ICU_UC_LIBRARY}`,
"-DCMAKE_C_FLAGS=/DU_STATIC_IMPLEMENTATION",
"-DCMAKE_CXX_FLAGS=/DU_STATIC_IMPLEMENTATION /clang:-fno-c++-static-destructors",
);
}
return flags;
@@ -164,7 +98,7 @@ const getCommonFlags = (config: BuildConfig) => {
// Build-specific CMake flags
const getBuildFlags = (config: BuildConfig) => {
const flags = [...getCommonFlags(config)];
const flags = [...getCommonFlags()];
switch (config) {
case "debug":
@@ -176,37 +110,20 @@ const getBuildFlags = (config: BuildConfig) => {
"-DUSE_VISIBILITY_ATTRIBUTE=1",
);
if (IS_MAC || IS_LINUX) {
// Enable address sanitizer by default on Mac/Linux debug builds
if (IS_MAC) {
// Enable address sanitizer by default on Mac debug builds
flags.push("-DENABLE_SANITIZERS=address");
// To disable asan, comment the line above and uncomment:
// flags.push("-DENABLE_MALLOC_HEAP_BREAKDOWN=ON");
}
if (IS_WINDOWS) {
flags.push("-DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreadedDebug");
}
break;
case "lto":
flags.push("-DCMAKE_BUILD_TYPE=Release");
if (IS_WINDOWS) {
// On Windows, append LTO flags to existing Windows-specific flags
flags.push(
"-DCMAKE_C_FLAGS=/DU_STATIC_IMPLEMENTATION -flto=full",
"-DCMAKE_CXX_FLAGS=/DU_STATIC_IMPLEMENTATION /clang:-fno-c++-static-destructors -flto=full",
"-DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreaded",
);
} else {
flags.push("-DCMAKE_C_FLAGS=-flto=full", "-DCMAKE_CXX_FLAGS=-flto=full");
}
flags.push("-DCMAKE_BUILD_TYPE=Release", "-DCMAKE_C_FLAGS=-flto=full", "-DCMAKE_CXX_FLAGS=-flto=full");
break;
default: // release
flags.push("-DCMAKE_BUILD_TYPE=RelWithDebInfo");
if (IS_WINDOWS) {
flags.push("-DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreaded");
}
break;
}
@@ -217,6 +134,17 @@ const getBuildFlags = (config: BuildConfig) => {
const getBuildEnv = () => {
const env = { ...process.env };
const cflags = ["-ffat-lto-objects"];
const cxxflags = ["-ffat-lto-objects"];
if (IS_LINUX && buildConfig !== "lto") {
cflags.push("-Wl,--whole-archive");
cxxflags.push("-Wl,--whole-archive", "-DUSE_BUN_JSC_ADDITIONS=ON", "-DUSE_BUN_EVENT_LOOP=ON");
}
env.CFLAGS = (env.CFLAGS || "") + " " + cflags.join(" ");
env.CXXFLAGS = (env.CXXFLAGS || "") + " " + cxxflags.join(" ");
if (IS_MAC) {
env.ICU_INCLUDE_DIRS = `${HOMEBREW_PREFIX}opt/icu4c/include`;
}
@@ -251,9 +179,6 @@ function buildJSC() {
console.log(`Building JSC with configuration: ${buildConfig}`);
console.log(`Build directory: ${buildDir}`);
if (HAS_CCACHE) {
console.log(`Using ccache for faster builds: ${CCACHE}`);
}
// Create build directories
if (!existsSync(buildDir)) {

View File

@@ -14,15 +14,6 @@ import {
startGroup,
} from "./utils.mjs";
// Detect Windows ARM64 - bun may run under x64 emulation (WoW64), so check multiple indicators
const isWindowsARM64 =
isWindows &&
(process.env.PROCESSOR_ARCHITECTURE === "ARM64" ||
process.env.VSCMD_ARG_HOST_ARCH === "arm64" ||
process.env.MSYSTEM_CARCH === "aarch64" ||
(process.env.PROCESSOR_IDENTIFIER || "").includes("ARMv8") ||
process.arch === "arm64");
if (globalThis.Bun) {
await import("./glob-sources.mjs");
}
@@ -92,23 +83,6 @@ async function build(args) {
generateOptions["--toolchain"] = toolchainPath;
}
// Windows ARM64: automatically set required options
if (isWindowsARM64) {
// Use clang-cl instead of MSVC cl.exe for proper ARM64 flag support
if (!generateOptions["-DCMAKE_C_COMPILER"]) {
generateOptions["-DCMAKE_C_COMPILER"] = "clang-cl";
}
if (!generateOptions["-DCMAKE_CXX_COMPILER"]) {
generateOptions["-DCMAKE_CXX_COMPILER"] = "clang-cl";
}
// Skip codegen by default since x64 bun crashes under WoW64 emulation
// Can be overridden with -DSKIP_CODEGEN=OFF once ARM64 bun is available
if (!generateOptions["-DSKIP_CODEGEN"]) {
generateOptions["-DSKIP_CODEGEN"] = "ON";
}
console.log("Windows ARM64 detected: using clang-cl and SKIP_CODEGEN=ON");
}
const generateArgs = Object.entries(generateOptions).flatMap(([flag, value]) =>
flag.startsWith("-D") ? [`${flag}=${value}`] : [flag, value],
);

View File

@@ -49,42 +49,9 @@ const colors = {
// Parse command line arguments
const args = process.argv.slice(2);
// Show help
if (args.includes("--help") || args.includes("-h")) {
console.log(`Usage: bun run scripts/buildkite-failures.ts [options] [build-id|branch|pr-url|buildkite-url]
Shows detailed error information from BuildKite build failures.
Full logs are saved to /tmp/bun-build-{number}-{platform}-{step}.log
Arguments:
build-id BuildKite build number (e.g., 35051)
branch Git branch name (e.g., main, claude/fix-bug)
pr-url GitHub PR URL (e.g., https://github.com/oven-sh/bun/pull/26173)
buildkite-url BuildKite build URL
#number GitHub PR number (e.g., #26173)
(none) Uses current git branch
Options:
--flaky, -f Include flaky test annotations
--warnings, -w Include warning annotations
--wait Poll continuously until build completes or fails
--help, -h Show this help message
Examples:
bun run scripts/buildkite-failures.ts # Current branch
bun run scripts/buildkite-failures.ts main # Main branch
bun run scripts/buildkite-failures.ts 35051 # Build #35051
bun run scripts/buildkite-failures.ts #26173 # PR #26173
bun run scripts/buildkite-failures.ts --wait # Wait for current branch build to complete
`);
process.exit(0);
}
const showWarnings = args.includes("--warnings") || args.includes("-w");
const showFlaky = args.includes("--flaky") || args.includes("-f");
const waitMode = args.includes("--wait");
const inputArg = args.find(arg => !arg.startsWith("-") && !arg.startsWith("--"));
const inputArg = args.find(arg => !arg.startsWith("-"));
// Determine what type of input we have
let buildNumber = null;
@@ -147,138 +114,38 @@ if (!buildNumber) {
buildNumber = match[1];
}
// Helper to format time ago
function formatTimeAgo(dateStr: string | null): string {
if (!dateStr) return "not started";
const date = new Date(dateStr);
const now = new Date();
const diffMs = now.getTime() - date.getTime();
const diffSecs = Math.floor(diffMs / 1000);
const diffMins = Math.floor(diffSecs / 60);
const diffHours = Math.floor(diffMins / 60);
const diffDays = Math.floor(diffHours / 24);
// Fetch build JSON
const buildResponse = await fetch(`https://buildkite.com/bun/bun/builds/${buildNumber}.json`);
const build = await buildResponse.json();
if (diffDays > 0) return `${diffDays} day${diffDays !== 1 ? "s" : ""} ago`;
if (diffHours > 0) return `${diffHours} hour${diffHours !== 1 ? "s" : ""} ago`;
if (diffMins > 0) return `${diffMins} minute${diffMins !== 1 ? "s" : ""} ago`;
return `${diffSecs} second${diffSecs !== 1 ? "s" : ""} ago`;
// Calculate time ago
const buildTime = new Date(build.started_at);
const now = new Date();
const diffMs = now.getTime() - buildTime.getTime();
const diffSecs = Math.floor(diffMs / 1000);
const diffMins = Math.floor(diffSecs / 60);
const diffHours = Math.floor(diffMins / 60);
const diffDays = Math.floor(diffHours / 24);
let timeAgo;
if (diffDays > 0) {
timeAgo = `${diffDays} day${diffDays !== 1 ? "s" : ""} ago`;
} else if (diffHours > 0) {
timeAgo = `${diffHours} hour${diffHours !== 1 ? "s" : ""} ago`;
} else if (diffMins > 0) {
timeAgo = `${diffMins} minute${diffMins !== 1 ? "s" : ""} ago`;
} else {
timeAgo = `${diffSecs} second${diffSecs !== 1 ? "s" : ""} ago`;
}
// Helper to clear line for updates
const clearLine = isTTY ? "\x1b[2K\r" : "";
// Poll for build status
let build: any;
let pollCount = 0;
const pollInterval = 10000; // 10 seconds
while (true) {
// Fetch build JSON
const buildResponse = await fetch(`https://buildkite.com/bun/bun/builds/${buildNumber}.json`);
build = await buildResponse.json();
// Check for failed jobs first (even if build is still running)
const failedJobsEarly =
build.jobs?.filter(
(job: any) => job.exit_status && job.exit_status > 0 && !job.soft_failed && job.type === "script",
) || [];
// In wait mode with failures, stop polling and show failures
if (waitMode && failedJobsEarly.length > 0) {
if (pollCount > 0) {
process.stdout.write(clearLine);
}
break;
}
// Calculate time ago (use created_at as fallback for scheduled/pending builds)
const timeAgo = formatTimeAgo(build.started_at || build.created_at);
// Check if build passed
if (build.state === "passed") {
if (pollCount > 0) {
process.stdout.write(clearLine);
}
console.log(`${timeAgo} - build #${buildNumber} https://buildkite.com/bun/bun/builds/${buildNumber}\n`);
console.log(`${colors.green}✅ Passed!${colors.reset}`);
process.exit(0);
}
// Check if build was canceled
if (build.state === "canceled" || build.state === "canceling") {
if (pollCount > 0) {
process.stdout.write(clearLine);
}
console.log(`${timeAgo} - build #${buildNumber} https://buildkite.com/bun/bun/builds/${buildNumber}\n`);
console.log(`${colors.dim}🚫 Build was canceled${colors.reset}`);
process.exit(0);
}
// Check if build is pending/running/scheduled
if (
build.state === "scheduled" ||
build.state === "running" ||
build.state === "creating" ||
build.state === "started"
) {
const runningJobs = build.jobs?.filter((job: any) => job.state === "running") || [];
const pendingJobs = build.jobs?.filter((job: any) => job.state === "scheduled" || job.state === "waiting") || [];
const passedJobs = build.jobs?.filter((job: any) => job.state === "passed") || [];
const totalJobs = build.jobs?.filter((job: any) => job.type === "script")?.length || 0;
if (waitMode) {
// In wait mode, show a single updating line
let statusMsg = "";
if (build.state === "scheduled" || build.state === "creating") {
statusMsg = `⏳ Waiting... (scheduled ${formatTimeAgo(build.created_at)})`;
} else {
statusMsg = `🔄 Running... ${passedJobs.length}/${totalJobs} passed, ${runningJobs.length} running`;
}
process.stdout.write(`${clearLine}${colors.dim}${statusMsg}${colors.reset}`);
pollCount++;
await Bun.sleep(pollInterval);
continue;
} else {
// Not in wait mode, show full status and exit
console.log(`${timeAgo} - build #${buildNumber} https://buildkite.com/bun/bun/builds/${buildNumber}\n`);
if (build.state === "scheduled" || build.state === "creating") {
console.log(`${colors.dim}⏳ Build is scheduled/pending${colors.reset}`);
if (build.created_at) {
console.log(`${colors.dim} Created: ${formatTimeAgo(build.created_at)}${colors.reset}`);
}
} else {
console.log(`${colors.dim}🔄 Build is running${colors.reset}`);
if (build.started_at) {
console.log(`${colors.dim} Started: ${formatTimeAgo(build.started_at)}${colors.reset}`);
}
console.log(
`${colors.dim} Progress: ${passedJobs.length}/${totalJobs} jobs passed, ${runningJobs.length} running, ${pendingJobs.length} pending${colors.reset}`,
);
if (runningJobs.length > 0) {
console.log(`\n${colors.dim}Running jobs:${colors.reset}`);
for (const job of runningJobs.slice(0, 5)) {
const name = job.name || job.label || "Unknown";
console.log(` ${colors.dim}${name}${colors.reset}`);
}
if (runningJobs.length > 5) {
console.log(` ${colors.dim}... and ${runningJobs.length - 5} more${colors.reset}`);
}
}
}
process.exit(0);
}
}
// Build is in a terminal state (failed, etc.) - break out of loop
break;
}
// Print header for failed build
const timeAgo = formatTimeAgo(build.started_at || build.created_at);
console.log(`${timeAgo} - build #${buildNumber} https://buildkite.com/bun/bun/builds/${buildNumber}\n`);
// Check if build passed
if (build.state === "passed") {
console.log(`${colors.green}✅ Passed!${colors.reset}`);
process.exit(0);
}
// Get failed jobs
const failedJobs =
build.jobs?.filter(job => job.exit_status && job.exit_status > 0 && !job.soft_failed && job.type === "script") || [];
@@ -867,212 +734,7 @@ if (registerRequestIndex !== -1) {
console.log(` https://buildkite.com/bun/bun/builds/${buildNumber}#annotations`);
}
} else {
// No annotations found - show detailed job failure information
if (failedJobs.length > 0) {
console.log(`\n${colors.red}${colors.bold}${failedJobs.length} job failures${colors.reset}\n`);
// Show annotation counts if available
const annotationCounts = build.annotation_counts_by_style;
if (annotationCounts) {
const errors = annotationCounts.error || 0;
const warnings = annotationCounts.warning || 0;
if (errors > 0 || warnings > 0) {
const parts = [];
if (errors > 0) parts.push(`${errors} error${errors !== 1 ? "s" : ""}`);
if (warnings > 0) parts.push(`${warnings} warning${warnings !== 1 ? "s" : ""}`);
console.log(
`${colors.dim}Annotations: ${parts.join(", ")} - view at https://buildkite.com/bun/bun/builds/${buildNumber}#annotations${colors.reset}\n`,
);
}
}
// Group jobs by type
const buildJobs = failedJobs.filter(job => (job.name || job.label || "").includes("build-"));
const testJobs = failedJobs.filter(job => (job.name || job.label || "").includes("test"));
const otherJobs = failedJobs.filter(
job => !(job.name || job.label || "").includes("build-") && !(job.name || job.label || "").includes("test"),
);
// Display build failures
if (buildJobs.length > 0) {
console.log(
`${colors.bgRed}${colors.white}${colors.bold} Build Failures (${buildJobs.length}) ${colors.reset}\n`,
);
for (const job of buildJobs) {
const name = (job.name || job.label || "Unknown").replace(/^:([^:]+):/, (_, emoji) => {
const platform = emoji.toLowerCase();
return platformMap[platform] || `:${emoji}:`;
});
const duration =
job.started_at && job.finished_at
? `${((new Date(job.finished_at).getTime() - new Date(job.started_at).getTime()) / 1000).toFixed(0)}s`
: "N/A";
console.log(` ${colors.red}${colors.reset} ${name}`);
console.log(` ${colors.dim}Duration: ${duration} | Exit: ${job.exit_status}${colors.reset}`);
console.log(` ${colors.dim}https://buildkite.com${job.path}${colors.reset}`);
console.log();
}
}
// Display test failures
if (testJobs.length > 0) {
console.log(`${colors.bgBlue}${colors.white}${colors.bold} Test Failures (${testJobs.length}) ${colors.reset}\n`);
for (const job of testJobs) {
const name = (job.name || job.label || "Unknown").replace(/^:([^:]+):/, (_, emoji) => {
const platform = emoji.toLowerCase();
return platformMap[platform] || `:${emoji}:`;
});
const duration =
job.started_at && job.finished_at
? `${((new Date(job.finished_at).getTime() - new Date(job.started_at).getTime()) / 1000).toFixed(0)}s`
: "N/A";
console.log(` ${colors.red}${colors.reset} ${name}`);
console.log(` ${colors.dim}Duration: ${duration} | Exit: ${job.exit_status}${colors.reset}`);
console.log(` ${colors.dim}https://buildkite.com${job.path}${colors.reset}`);
console.log();
}
}
// Display other failures
if (otherJobs.length > 0) {
console.log(
`${colors.bgBlue}${colors.white}${colors.bold} Other Failures (${otherJobs.length}) ${colors.reset}\n`,
);
for (const job of otherJobs) {
const name = (job.name || job.label || "Unknown").replace(/^:([^:]+):/, (_, emoji) => {
const platform = emoji.toLowerCase();
return platformMap[platform] || `:${emoji}:`;
});
const duration =
job.started_at && job.finished_at
? `${((new Date(job.finished_at).getTime() - new Date(job.started_at).getTime()) / 1000).toFixed(0)}s`
: "N/A";
console.log(` ${colors.red}${colors.reset} ${name}`);
console.log(` ${colors.dim}Duration: ${duration} | Exit: ${job.exit_status}${colors.reset}`);
console.log(` ${colors.dim}https://buildkite.com${job.path}${colors.reset}`);
console.log();
}
}
// Fetch and display logs for all failed jobs
// Use the public BuildKite log endpoint
console.log(`${colors.dim}Fetching logs for ${failedJobs.length} failed jobs...${colors.reset}\n`);
for (const job of failedJobs) {
const name = (job.name || job.label || "Unknown").replace(/^:([^:]+):/, (_, emoji) => {
const platform = emoji.toLowerCase();
return platformMap[platform] || `:${emoji}:`;
});
// Create a sanitized filename from the job name
// e.g., ":darwin: aarch64 - build-cpp" -> "darwin-aarch64-build-cpp"
const sanitizedName = (job.name || job.label || "unknown")
.replace(/^:([^:]+):\s*/, "$1-") // :darwin: -> darwin-
.replace(/\s+-\s+/g, "-") // " - " -> "-"
.replace(/[^a-zA-Z0-9-]/g, "-") // Replace other chars with -
.replace(/-+/g, "-") // Collapse multiple -
.replace(/^-|-$/g, "") // Remove leading/trailing -
.toLowerCase();
const logFilePath = `/tmp/bun-build-${buildNumber}-${sanitizedName}.log`;
try {
const logResponse = await fetch(
`https://buildkite.com/organizations/bun/pipelines/bun/builds/${buildNumber}/jobs/${job.id}/log`,
);
if (logResponse.ok) {
const logData = await logResponse.json();
let output = logData.output || "";
// Convert HTML to readable text (without ANSI codes for file output)
const plainOutput = output
// Remove timestamp tags
.replace(/<time[^>]*>[^<]*<\/time>/g, "")
// Remove all span tags
.replace(/<span[^>]*>([^<]*)<\/span>/g, "$1")
// Remove remaining HTML tags
.replace(/<[^>]+>/g, "")
// Decode HTML entities
.replace(/&amp;/g, "&")
.replace(/&lt;/g, "<")
.replace(/&gt;/g, ">")
.replace(/&quot;/g, '"')
.replace(/&#39;/g, "'")
.replace(/&#47;/g, "/")
.replace(/&nbsp;/g, " ");
// Write the full log to a file
await Bun.write(logFilePath, plainOutput);
// Extract unique error messages for display
const lines = plainOutput.split("\n");
const uniqueErrors = new Set<string>();
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
// Look for actual error messages
const isError =
(line.includes("error:") && !line.includes('error: script "') && !line.includes("error: exit")) ||
line.includes("fatal error:") ||
line.includes("panic:") ||
line.includes("undefined reference");
if (isError) {
// Extract just the error message part (remove path prefixes and timestamps)
const errorMsg = line
.replace(/^.*?\d{4}-\d{2}-\d{2}T[\d:.]+Z/, "") // Remove timestamps
.replace(/^.*?\/[^\s]*:\d+:\d+:\s*/, "") // Remove file paths
.trim();
if (errorMsg && !uniqueErrors.has(errorMsg)) {
uniqueErrors.add(errorMsg);
}
}
}
// Display job info with log file path
console.log(`${colors.bgBlue}${colors.white}${colors.bold} ${name} ${colors.reset}`);
console.log(` ${colors.dim}Log: ${logFilePath}${colors.reset}`);
if (uniqueErrors.size > 0) {
console.log(` ${colors.red}Errors (${uniqueErrors.size}):${colors.reset}`);
let count = 0;
for (const err of uniqueErrors) {
if (count >= 5) {
console.log(` ${colors.dim}... and ${uniqueErrors.size - 5} more${colors.reset}`);
break;
}
console.log(` ${colors.red}${colors.reset} ${err.slice(0, 120)}${err.length > 120 ? "..." : ""}`);
count++;
}
} else {
// Show last few lines as a preview
const lastLines = lines.slice(-5).filter(l => l.trim());
if (lastLines.length > 0) {
console.log(` ${colors.dim}Last output:${colors.reset}`);
for (const line of lastLines) {
console.log(` ${colors.dim}${line.slice(0, 100)}${line.length > 100 ? "..." : ""}${colors.reset}`);
}
}
}
if (logData.truncated) {
console.log(` ${colors.dim}(Log was truncated by BuildKite)${colors.reset}`);
}
} else {
console.log(`${colors.bgBlue}${colors.white}${colors.bold} ${name} ${colors.reset}`);
console.log(` ${colors.dim}Failed to fetch log: ${logResponse.status}${colors.reset}`);
}
} catch (e) {
console.log(`${colors.bgBlue}${colors.white}${colors.bold} ${name} ${colors.reset}`);
console.log(` ${colors.dim}Error fetching log: ${e.message}${colors.reset}`);
}
console.log();
}
} else {
console.log("View detailed results at:");
console.log(` https://buildkite.com/bun/bun/builds/${buildNumber}#annotations`);
}
console.log(`\n${colors.red}${colors.bold}${failedJobs.length} job failures${colors.reset}\n`);
console.log("View detailed results at:");
console.log(` https://buildkite.com/bun/bun/builds/${buildNumber}#annotations`);
}

View File

@@ -1585,9 +1585,6 @@ function isNodeTest(path) {
if (isCI && isMacOS && isX64) {
return false;
}
if (!isJavaScript(path)) {
return false;
}
const unixPath = path.replaceAll(sep, "/");
return (
unixPath.includes("js/node/test/parallel/") ||

View File

@@ -1,82 +0,0 @@
#!/bin/bash
# Updates the vendored uucode library and regenerates grapheme tables.
#
# Usage:
# ./scripts/update-uucode.sh # update from default URL
# ./scripts/update-uucode.sh /path/to/uucode # update from local directory
# ./scripts/update-uucode.sh https://url.tar.gz # update from URL
#
# After running, verify with:
# bun bd test test/js/bun/util/stringWidth.test.ts
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
BUN_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
UUCODE_DIR="$BUN_ROOT/src/deps/uucode"
ZIG="$BUN_ROOT/vendor/zig/zig"
if [ ! -x "$ZIG" ]; then
echo "error: zig not found at $ZIG"
echo " run scripts/bootstrap.sh first"
exit 1
fi
update_from_dir() {
local src="$1"
echo "Updating uucode from: $src"
rm -rf "$UUCODE_DIR"
mkdir -p "$UUCODE_DIR"
cp -r "$src"/* "$UUCODE_DIR/"
}
update_from_url() {
local url="$1"
local tmp
tmp=$(mktemp -d)
trap "rm -rf $tmp" EXIT
echo "Downloading uucode from: $url"
curl -fsSL "$url" | tar -xz -C "$tmp" --strip-components=1
update_from_dir "$tmp"
}
# Handle source argument
if [ $# -ge 1 ]; then
SOURCE="$1"
if [ -d "$SOURCE" ]; then
update_from_dir "$SOURCE"
elif [[ "$SOURCE" == http* ]]; then
update_from_url "$SOURCE"
else
echo "error: argument must be a directory or URL"
exit 1
fi
else
# Default: use the zig global cache if available
CACHED=$(find "$HOME/.cache/zig/p" -maxdepth 1 -name "uucode-*" -type d 2>/dev/null | sort -V | tail -1)
if [ -n "$CACHED" ]; then
update_from_dir "$CACHED"
else
echo "error: no uucode source specified and none found in zig cache"
echo ""
echo "usage: $0 <path-to-uucode-dir-or-url>"
exit 1
fi
fi
echo ""
echo "Regenerating grapheme tables..."
cd "$BUN_ROOT"
"$ZIG" build generate-grapheme-tables
echo ""
echo "Done. Updated files:"
echo " src/deps/uucode/ (vendored library)"
echo " src/string/immutable/grapheme_tables.zig (regenerated)"
echo ""
echo "Next steps:"
echo " 1. bun bd test test/js/bun/util/stringWidth.test.ts"
echo " 2. git add src/deps/uucode src/string/immutable/grapheme_tables.zig"
echo " 3. git commit -m 'Update uucode to <version>'"

View File

@@ -3,10 +3,6 @@
$ErrorActionPreference = "Stop"
# Detect system architecture
$script:IsARM64 = [System.Runtime.InteropServices.RuntimeInformation]::OSArchitecture -eq [System.Runtime.InteropServices.Architecture]::Arm64
$script:VsArch = if ($script:IsARM64) { "arm64" } else { "amd64" }
if($env:VSINSTALLDIR -eq $null) {
Write-Host "Loading Visual Studio environment, this may take a second..."
@@ -27,14 +23,14 @@ if($env:VSINSTALLDIR -eq $null) {
Push-Location $vsDir
try {
$vsShell = (Join-Path -Path $vsDir -ChildPath "Common7\Tools\Launch-VsDevShell.ps1")
. $vsShell -Arch $script:VsArch -HostArch $script:VsArch
. $vsShell -Arch amd64 -HostArch amd64
} finally {
Pop-Location
}
}
if($env:VSCMD_ARG_TGT_ARCH -eq "x86") {
throw "Visual Studio environment is targeting 32 bit x86, but only 64-bit architectures (x64/arm64) are supported."
throw "Visual Studio environment is targeting 32 bit, but only 64 bit is supported."
}
if ($args.Count -gt 0) {

View File

@@ -429,49 +429,16 @@ pub const StandaloneModuleGraph = struct {
const bytecode: StringPointer = brk: {
if (output_file.bytecode_index != std.math.maxInt(u32)) {
// Bytecode alignment for JSC bytecode cache deserialization.
// Not aligning correctly causes a runtime assertion error or segfault.
//
// PLATFORM-SPECIFIC ALIGNMENT:
// - PE (Windows) and Mach-O (macOS): The module graph data is embedded in
// a dedicated section with an 8-byte size header. At runtime, the section
// is memory-mapped at a page-aligned address (hence 128-byte aligned).
// The data buffer starts 8 bytes after the section start.
// For bytecode at offset O to be 128-byte aligned:
// (section_va + 8 + O) % 128 == 0
// => O % 128 == 120
//
// - ELF (Linux): The module graph data is appended to the executable and
// read into a heap-allocated buffer at runtime. The allocator provides
// natural alignment, and there's no 8-byte section header offset.
// However, using target_mod=120 is still safe because:
// - If the buffer is 128-aligned: bytecode at offset 120 is at (128n + 120),
// which when loaded at a 128-aligned address gives proper alignment.
// - The extra 120 bytes of padding is acceptable overhead.
//
// This alignment strategy (target_mod=120) works for all platforms because
// it's the worst-case offset needed for the 8-byte header scenario.
// Use up to 256 byte alignment for bytecode
// Not aligning it correctly will cause a runtime assertion error, or a segfault.
const bytecode = output_files[output_file.bytecode_index].value.buffer.bytes;
const current_offset = string_builder.len;
// Calculate padding so that (current_offset + padding) % 128 == 120
// This accounts for the 8-byte section header on PE/Mach-O platforms.
const target_mod: usize = 128 - @sizeOf(u64); // 120 = accounts for 8-byte header
const current_mod = current_offset % 128;
const padding = if (current_mod <= target_mod)
target_mod - current_mod
else
128 - current_mod + target_mod;
// Zero the padding bytes to ensure deterministic output
const writable = string_builder.writable();
@memset(writable[0..padding], 0);
string_builder.len += padding;
const aligned_offset = string_builder.len;
const writable_after_padding = string_builder.writable();
@memcpy(writable_after_padding[0..bytecode.len], bytecode[0..bytecode.len]);
const unaligned_space = writable_after_padding[bytecode.len..];
const aligned = std.mem.alignInSlice(string_builder.writable(), 128).?;
@memcpy(aligned[0..bytecode.len], bytecode[0..bytecode.len]);
const unaligned_space = aligned[bytecode.len..];
const offset = @intFromPtr(aligned.ptr) - @intFromPtr(string_builder.ptr.?);
const len = bytecode.len + @min(unaligned_space.len, 128);
string_builder.len += len;
break :brk StringPointer{ .offset = @truncate(aligned_offset), .length = @truncate(len) };
break :brk StringPointer{ .offset = @truncate(offset), .length = @truncate(len) };
} else {
break :brk .{};
}
@@ -1192,9 +1159,7 @@ pub const StandaloneModuleGraph = struct {
return .success;
}
/// Loads the standalone module graph from the executable, allocates it on the heap,
/// sets it globally, and returns the pointer.
pub fn fromExecutable(allocator: std.mem.Allocator) !?*StandaloneModuleGraph {
pub fn fromExecutable(allocator: std.mem.Allocator) !?StandaloneModuleGraph {
if (comptime Environment.isMac) {
const macho_bytes = Macho.getData() orelse return null;
if (macho_bytes.len < @sizeOf(Offsets) + trailer.len) {
@@ -1208,7 +1173,7 @@ pub const StandaloneModuleGraph = struct {
return null;
}
const offsets = std.mem.bytesAsValue(Offsets, macho_bytes_slice).*;
return try fromBytesAlloc(allocator, @constCast(macho_bytes), offsets);
return try StandaloneModuleGraph.fromBytes(allocator, @constCast(macho_bytes), offsets);
}
if (comptime Environment.isWindows) {
@@ -1224,7 +1189,7 @@ pub const StandaloneModuleGraph = struct {
return null;
}
const offsets = std.mem.bytesAsValue(Offsets, pe_bytes_slice).*;
return try fromBytesAlloc(allocator, @constCast(pe_bytes), offsets);
return try StandaloneModuleGraph.fromBytes(allocator, @constCast(pe_bytes), offsets);
}
// Do not invoke libuv here.
@@ -1319,15 +1284,7 @@ pub const StandaloneModuleGraph = struct {
}
}
return try fromBytesAlloc(allocator, to_read, offsets);
}
/// Allocates a StandaloneModuleGraph on the heap, populates it from bytes, sets it globally, and returns the pointer.
fn fromBytesAlloc(allocator: std.mem.Allocator, raw_bytes: []u8, offsets: Offsets) !*StandaloneModuleGraph {
const graph_ptr = try allocator.create(StandaloneModuleGraph);
graph_ptr.* = try StandaloneModuleGraph.fromBytes(allocator, raw_bytes, offsets);
graph_ptr.set();
return graph_ptr;
return try StandaloneModuleGraph.fromBytes(allocator, to_read, offsets);
}
/// heuristic: `bun build --compile` won't be supported if the name is "bun", "bunx", or "node".

View File

@@ -166,7 +166,12 @@ pub const WatchEvent = struct {
pub fn merge(this: *WatchEvent, other: WatchEvent) void {
this.name_len += other.name_len;
this.op = Op.merge(this.op, other.op);
this.op = Op{
.delete = this.op.delete or other.op.delete,
.metadata = this.op.metadata or other.op.metadata,
.rename = this.op.rename or other.op.rename,
.write = this.op.write or other.op.write,
};
}
pub const Op = packed struct(u8) {
@@ -175,8 +180,7 @@ pub const WatchEvent = struct {
rename: bool = false,
write: bool = false,
move_to: bool = false,
create: bool = false,
_padding: u2 = 0,
_padding: u3 = 0,
pub fn merge(before: Op, after: Op) Op {
return .{
@@ -185,7 +189,6 @@ pub const WatchEvent = struct {
.metadata = before.metadata or after.metadata,
.rename = before.rename or after.rename,
.move_to = before.move_to or after.move_to,
.create = before.create or after.create,
};
}

View File

@@ -11,7 +11,6 @@ pub const AllocationScopeIn = allocation_scope.AllocationScopeIn;
pub const NullableAllocator = @import("./allocators/NullableAllocator.zig");
pub const MaxHeapAllocator = @import("./allocators/MaxHeapAllocator.zig");
pub const LinuxMemFdAllocator = @import("./allocators/LinuxMemFdAllocator.zig");
pub const BufferFallbackAllocator = @import("./allocators/BufferFallbackAllocator.zig");
pub const MaybeOwned = @import("./allocators/maybe_owned.zig").MaybeOwned;
pub fn isSliceInBufferT(comptime T: type, slice: []const T, buffer: []const T) bool {

View File

@@ -1,85 +0,0 @@
/// An allocator that attempts to allocate from a provided buffer first,
/// falling back to another allocator when the buffer is exhausted.
/// Unlike `std.heap.StackFallbackAllocator`, this does not own the buffer.
const BufferFallbackAllocator = @This();
#fallback_allocator: Allocator,
#fixed_buffer_allocator: FixedBufferAllocator,
pub fn init(buffer: []u8, fallback_allocator: Allocator) BufferFallbackAllocator {
return .{
.#fallback_allocator = fallback_allocator,
.#fixed_buffer_allocator = FixedBufferAllocator.init(buffer),
};
}
pub fn allocator(self: *BufferFallbackAllocator) Allocator {
return .{
.ptr = self,
.vtable = &.{
.alloc = alloc,
.resize = resize,
.remap = remap,
.free = free,
},
};
}
fn alloc(ctx: *anyopaque, len: usize, alignment: std.mem.Alignment, ra: usize) ?[*]u8 {
const self: *BufferFallbackAllocator = @ptrCast(@alignCast(ctx));
return FixedBufferAllocator.alloc(
&self.#fixed_buffer_allocator,
len,
alignment,
ra,
) orelse self.#fallback_allocator.rawAlloc(len, alignment, ra);
}
fn resize(ctx: *anyopaque, buf: []u8, alignment: std.mem.Alignment, new_len: usize, ra: usize) bool {
const self: *BufferFallbackAllocator = @ptrCast(@alignCast(ctx));
if (self.#fixed_buffer_allocator.ownsPtr(buf.ptr)) {
return FixedBufferAllocator.resize(
&self.#fixed_buffer_allocator,
buf,
alignment,
new_len,
ra,
);
}
return self.#fallback_allocator.rawResize(buf, alignment, new_len, ra);
}
fn remap(ctx: *anyopaque, memory: []u8, alignment: std.mem.Alignment, new_len: usize, ra: usize) ?[*]u8 {
const self: *BufferFallbackAllocator = @ptrCast(@alignCast(ctx));
if (self.#fixed_buffer_allocator.ownsPtr(memory.ptr)) {
return FixedBufferAllocator.remap(
&self.#fixed_buffer_allocator,
memory,
alignment,
new_len,
ra,
);
}
return self.#fallback_allocator.rawRemap(memory, alignment, new_len, ra);
}
fn free(ctx: *anyopaque, buf: []u8, alignment: std.mem.Alignment, ra: usize) void {
const self: *BufferFallbackAllocator = @ptrCast(@alignCast(ctx));
if (self.#fixed_buffer_allocator.ownsPtr(buf.ptr)) {
return FixedBufferAllocator.free(
&self.#fixed_buffer_allocator,
buf,
alignment,
ra,
);
}
return self.#fallback_allocator.rawFree(buf, alignment, ra);
}
pub fn reset(self: *BufferFallbackAllocator) void {
self.#fixed_buffer_allocator.reset();
}
const std = @import("std");
const Allocator = std.mem.Allocator;
const FixedBufferAllocator = std.heap.FixedBufferAllocator;

View File

@@ -2,10 +2,7 @@
const Self = @This();
const safety_checks = bun.Environment.isDebug or bun.Environment.enable_asan;
#heap: *mimalloc.Heap,
thread_id: if (safety_checks) std.Thread.Id else void,
#heap: if (safety_checks) Owned(*DebugHeap) else *mimalloc.Heap,
/// Uses the default thread-local heap. This type is zero-sized.
///
@@ -23,18 +20,18 @@ pub const Default = struct {
///
/// This type is a `GenericAllocator`; see `src/allocators.zig`.
pub const Borrowed = struct {
#heap: *mimalloc.Heap,
#heap: BorrowedHeap,
pub fn allocator(self: Borrowed) std.mem.Allocator {
return .{ .ptr = self.#heap, .vtable = c_allocator_vtable };
return .{ .ptr = self.#heap, .vtable = &c_allocator_vtable };
}
pub fn getDefault() Borrowed {
return .{ .#heap = mimalloc.mi_heap_main() };
return .{ .#heap = getThreadHeap() };
}
pub fn gc(self: Borrowed) void {
mimalloc.mi_heap_collect(self.#heap, false);
mimalloc.mi_heap_collect(self.getMimallocHeap(), false);
}
pub fn helpCatchMemoryIssues(self: Borrowed) void {
@@ -44,17 +41,30 @@ pub const Borrowed = struct {
}
}
pub fn ownsPtr(self: Borrowed, ptr: *const anyopaque) bool {
return mimalloc.mi_heap_check_owned(self.getMimallocHeap(), ptr);
}
fn fromOpaque(ptr: *anyopaque) Borrowed {
return .{ .#heap = @ptrCast(@alignCast(ptr)) };
}
fn getMimallocHeap(self: Borrowed) *mimalloc.Heap {
return if (comptime safety_checks) self.#heap.inner else self.#heap;
}
fn assertThreadLock(self: Borrowed) void {
if (comptime safety_checks) self.#heap.thread_lock.assertLocked();
}
fn alignedAlloc(self: Borrowed, len: usize, alignment: Alignment) ?[*]u8 {
log("Malloc: {d}\n", .{len});
const heap = self.getMimallocHeap();
const ptr: ?*anyopaque = if (mimalloc.mustUseAlignedAlloc(alignment))
mimalloc.mi_heap_malloc_aligned(self.#heap, len, alignment.toByteUnits())
mimalloc.mi_heap_malloc_aligned(heap, len, alignment.toByteUnits())
else
mimalloc.mi_heap_malloc(self.#heap, len);
mimalloc.mi_heap_malloc(heap, len);
if (comptime bun.Environment.isDebug) {
const usable = mimalloc.mi_malloc_usable_size(ptr);
@@ -79,17 +89,42 @@ pub const Borrowed = struct {
}
};
const BorrowedHeap = if (safety_checks) *DebugHeap else *mimalloc.Heap;
const DebugHeap = struct {
inner: *mimalloc.Heap,
thread_lock: bun.safety.ThreadLock,
pub const deinit = void;
};
threadlocal var thread_heap: if (safety_checks) ?DebugHeap else void = if (safety_checks) null;
fn getThreadHeap() BorrowedHeap {
if (comptime !safety_checks) return mimalloc.mi_heap_get_default();
if (thread_heap == null) {
thread_heap = .{
.inner = mimalloc.mi_heap_get_default(),
.thread_lock = .initLocked(),
};
}
return &thread_heap.?;
}
const log = bun.Output.scoped(.mimalloc, .hidden);
pub fn allocator(self: Self) std.mem.Allocator {
self.assertThreadOwnership();
return self.borrow().allocator();
}
pub fn borrow(self: Self) Borrowed {
return .{ .#heap = self.#heap };
return .{ .#heap = if (comptime safety_checks) self.#heap.get() else self.#heap };
}
/// Internally, mimalloc calls mi_heap_get_default()
/// to get the default heap.
/// It uses pthread_getspecific to do that.
/// We can save those extra calls if we just do it once in here
pub fn getThreadLocalDefault() std.mem.Allocator {
if (bun.Environment.enable_asan) return bun.default_allocator;
return Borrowed.getDefault().allocator();
@@ -122,15 +157,22 @@ pub fn dumpStats(_: Self) void {
}
pub fn deinit(self: *Self) void {
mimalloc.mi_heap_destroy(self.#heap);
const mimalloc_heap = self.borrow().getMimallocHeap();
if (comptime safety_checks) {
self.#heap.deinit();
}
mimalloc.mi_heap_destroy(mimalloc_heap);
self.* = undefined;
}
pub fn init() Self {
return .{
.#heap = mimalloc.mi_heap_new() orelse bun.outOfMemory(),
.thread_id = if (safety_checks) std.Thread.getCurrentId() else {},
};
const mimalloc_heap = mimalloc.mi_heap_new() orelse bun.outOfMemory();
if (comptime !safety_checks) return .{ .#heap = mimalloc_heap };
const heap: Owned(*DebugHeap) = .new(.{
.inner = mimalloc_heap,
.thread_lock = .initLocked(),
});
return .{ .#heap = heap };
}
pub fn gc(self: Self) void {
@@ -141,16 +183,8 @@ pub fn helpCatchMemoryIssues(self: Self) void {
self.borrow().helpCatchMemoryIssues();
}
fn assertThreadOwnership(self: Self) void {
if (comptime safety_checks) {
const current_thread = std.Thread.getCurrentId();
if (current_thread != self.thread_id) {
std.debug.panic(
"MimallocArena used from wrong thread: arena belongs to thread {d}, but current thread is {d}",
.{ self.thread_id, current_thread },
);
}
}
pub fn ownsPtr(self: Self, ptr: *const anyopaque) bool {
return self.borrow().ownsPtr(ptr);
}
fn alignedAllocSize(ptr: [*]u8) usize {
@@ -159,10 +193,13 @@ fn alignedAllocSize(ptr: [*]u8) usize {
fn vtable_alloc(ptr: *anyopaque, len: usize, alignment: Alignment, _: usize) ?[*]u8 {
const self: Borrowed = .fromOpaque(ptr);
self.assertThreadLock();
return self.alignedAlloc(len, alignment);
}
fn vtable_resize(_: *anyopaque, buf: []u8, _: Alignment, new_len: usize, _: usize) bool {
fn vtable_resize(ptr: *anyopaque, buf: []u8, _: Alignment, new_len: usize, _: usize) bool {
const self: Borrowed = .fromOpaque(ptr);
self.assertThreadLock();
return mimalloc.mi_expand(buf.ptr, new_len) != null;
}
@@ -186,17 +223,39 @@ fn vtable_free(
}
}
/// Attempt to expand or shrink memory, allowing relocation.
///
/// `memory.len` must equal the length requested from the most recent
/// successful call to `alloc`, `resize`, or `remap`. `alignment` must
/// equal the same value that was passed as the `alignment` parameter to
/// the original `alloc` call.
///
/// A non-`null` return value indicates the resize was successful. The
/// allocation may have same address, or may have been relocated. In either
/// case, the allocation now has size of `new_len`. A `null` return value
/// indicates that the resize would be equivalent to allocating new memory,
/// copying the bytes from the old memory, and then freeing the old memory.
/// In such case, it is more efficient for the caller to perform the copy.
///
/// `new_len` must be greater than zero.
///
/// `ret_addr` is optionally provided as the first return address of the
/// allocation call stack. If the value is `0` it means no return address
/// has been provided.
fn vtable_remap(ptr: *anyopaque, buf: []u8, alignment: Alignment, new_len: usize, _: usize) ?[*]u8 {
const self: Borrowed = .fromOpaque(ptr);
const value = mimalloc.mi_heap_realloc_aligned(self.#heap, buf.ptr, new_len, alignment.toByteUnits());
self.assertThreadLock();
const heap = self.getMimallocHeap();
const aligned_size = alignment.toByteUnits();
const value = mimalloc.mi_heap_realloc_aligned(heap, buf.ptr, new_len, aligned_size);
return @ptrCast(value);
}
pub fn isInstance(alloc: std.mem.Allocator) bool {
return alloc.vtable == c_allocator_vtable;
return alloc.vtable == &c_allocator_vtable;
}
const c_allocator_vtable = &std.mem.Allocator.VTable{
const c_allocator_vtable = std.mem.Allocator.VTable{
.alloc = vtable_alloc,
.resize = vtable_resize,
.remap = vtable_remap,
@@ -209,3 +268,5 @@ const Alignment = std.mem.Alignment;
const bun = @import("bun");
const assert = bun.assert;
const mimalloc = bun.mimalloc;
const Owned = bun.ptr.Owned;
const safety_checks = bun.Environment.ci_assert;

View File

@@ -60,29 +60,17 @@ pub const Heap = opaque {
return mi_heap_realloc(self, p, newsize);
}
pub fn isOwned(self: *Heap, p: ?*const anyopaque) bool {
return mi_heap_contains(self, p);
pub fn isOwned(self: *Heap, p: ?*anyopaque) bool {
return mi_heap_check_owned(self, p);
}
};
pub extern fn mi_heap_new() ?*Heap;
pub extern fn mi_heap_delete(heap: *Heap) void;
pub extern fn mi_heap_destroy(heap: *Heap) void;
pub extern fn mi_heap_set_default(heap: *Heap) *Heap;
pub extern fn mi_heap_get_default() *Heap;
pub extern fn mi_heap_get_backing() *Heap;
pub extern fn mi_heap_collect(heap: *Heap, force: bool) void;
pub extern fn mi_heap_main() *Heap;
// Thread-local heap (theap) API - new in mimalloc v3
pub const THeap = opaque {};
pub extern fn mi_theap_get_default() *THeap;
pub extern fn mi_theap_set_default(theap: *THeap) *THeap;
pub extern fn mi_theap_collect(theap: *THeap, force: bool) void;
pub extern fn mi_theap_malloc(theap: *THeap, size: usize) ?*anyopaque;
pub extern fn mi_theap_zalloc(theap: *THeap, size: usize) ?*anyopaque;
pub extern fn mi_theap_calloc(theap: *THeap, count: usize, size: usize) ?*anyopaque;
pub extern fn mi_theap_malloc_small(theap: *THeap, size: usize) ?*anyopaque;
pub extern fn mi_theap_malloc_aligned(theap: *THeap, size: usize, alignment: usize) ?*anyopaque;
pub extern fn mi_theap_realloc(theap: *THeap, p: ?*anyopaque, newsize: usize) ?*anyopaque;
pub extern fn mi_theap_destroy(theap: *THeap) void;
pub extern fn mi_heap_theap(heap: *Heap) *THeap;
pub extern fn mi_heap_malloc(heap: *Heap, size: usize) ?*anyopaque;
pub extern fn mi_heap_zalloc(heap: *Heap, size: usize) ?*anyopaque;
pub extern fn mi_heap_calloc(heap: *Heap, count: usize, size: usize) ?*anyopaque;
@@ -114,7 +102,8 @@ pub extern fn mi_heap_rezalloc_aligned(heap: *Heap, p: ?*anyopaque, newsize: usi
pub extern fn mi_heap_rezalloc_aligned_at(heap: *Heap, p: ?*anyopaque, newsize: usize, alignment: usize, offset: usize) ?*anyopaque;
pub extern fn mi_heap_recalloc_aligned(heap: *Heap, p: ?*anyopaque, newcount: usize, size: usize, alignment: usize) ?*anyopaque;
pub extern fn mi_heap_recalloc_aligned_at(heap: *Heap, p: ?*anyopaque, newcount: usize, size: usize, alignment: usize, offset: usize) ?*anyopaque;
pub extern fn mi_heap_contains(heap: *const Heap, p: ?*const anyopaque) bool;
pub extern fn mi_heap_contains_block(heap: *Heap, p: *const anyopaque) bool;
pub extern fn mi_heap_check_owned(heap: *Heap, p: *const anyopaque) bool;
pub extern fn mi_check_owned(p: ?*const anyopaque) bool;
pub const struct_mi_heap_area_s = extern struct {
blocks: ?*anyopaque,

View File

@@ -6467,11 +6467,6 @@ pub fn NewParser_(
parts.items[0].stmts = top_level_stmts;
}
// REPL mode transforms
if (p.options.repl_mode) {
try repl_transforms.ReplTransforms(P).apply(p, parts, allocator);
}
var top_level_symbols_to_parts = js_ast.Ast.TopLevelSymbolToParts{};
var top_level = &top_level_symbols_to_parts;
@@ -6532,9 +6527,7 @@ pub fn NewParser_(
break :brk p.hmr_api_ref;
}
// When code splitting is enabled, always create wrapper_ref to match esbuild behavior.
// Otherwise, use needsWrapperRef() to optimize away unnecessary wrappers.
if (p.options.bundle and (p.options.code_splitting or p.needsWrapperRef(parts.items))) {
if (p.options.bundle and p.needsWrapperRef(parts.items)) {
break :brk p.newSymbol(
.other,
std.fmt.allocPrint(
@@ -6767,8 +6760,6 @@ var falseValueExpr = Expr.Data{ .e_boolean = E.Boolean{ .value = false } };
const string = []const u8;
const repl_transforms = @import("./repl_transforms.zig");
const Define = @import("../defines.zig").Define;
const DefineData = @import("../defines.zig").DefineData;

View File

@@ -19,7 +19,6 @@ pub const Parser = struct {
tree_shaking: bool = false,
bundle: bool = false,
code_splitting: bool = false,
package_version: string = "",
macro_context: *MacroContextType() = undefined,
@@ -39,13 +38,6 @@ pub const Parser = struct {
/// able to customize what import sources are used.
framework: ?*bun.bake.Framework = null,
/// REPL mode: transforms code for interactive evaluation
/// - Wraps lone object literals `{...}` in parentheses
/// - Hoists variable declarations for REPL persistence
/// - Wraps last expression in { value: expr } for result capture
/// - Wraps code with await in async IIFE
repl_mode: bool = false,
pub fn hashForRuntimeTranspiler(this: *const Options, hasher: *std.hash.Wyhash, did_use_jsx: bool) void {
bun.assert(!this.bundle);
@@ -791,39 +783,9 @@ pub const Parser = struct {
// else
// module.exports = require('./foo.dev.js')
//
// Find the part containing the actual module.exports = require() statement,
// skipping over parts that only contain comments, directives, and empty statements.
// This handles files like:
//
// /*!
// * express
// * MIT Licensed
// */
// 'use strict';
// module.exports = require('./lib/express');
//
// When tree-shaking is enabled, each statement becomes its own part, so we need
// to look across all parts to find the single meaningful statement.
const StmtAndPart = struct { stmt: Stmt, part_idx: usize };
const stmt_and_part: ?StmtAndPart = brk: {
var found: ?StmtAndPart = null;
for (parts.items, 0..) |part, part_idx| {
for (part.stmts) |s| {
switch (s.data) {
.s_comment, .s_directive, .s_empty => continue,
else => {
// If we already found a non-trivial statement, there's more than one
if (found != null) break :brk null;
found = .{ .stmt = s, .part_idx = part_idx };
},
}
}
}
break :brk found;
};
if (stmt_and_part) |found| {
const stmt = found.stmt;
var part = &parts.items[found.part_idx];
if (parts.items.len == 1 and parts.items[0].stmts.len == 1) {
var part = &parts.items[0];
const stmt: Stmt = part.stmts[0];
if (p.symbols.items[p.module_ref.innerIndex()].use_count_estimate == 1) {
if (stmt.data == .s_expr) {
const value: Expr = stmt.data.s_expr.value;
@@ -838,13 +800,13 @@ pub const Parser = struct {
left.data.e_dot.target.data == .e_identifier and
left.data.e_dot.target.data.e_identifier.ref.eql(p.module_ref))
{
const redirect_import_record_index: ?u32 = inner_brk: {
const redirect_import_record_index: ?u32 = brk: {
// general case:
//
// module.exports = require("foo");
//
if (right.data == .e_require_string) {
break :inner_brk right.data.e_require_string.import_record_index;
break :brk right.data.e_require_string.import_record_index;
}
// special case: a module for us to unwrap
@@ -863,10 +825,10 @@ pub const Parser = struct {
{
// We know it's 0 because there is only one import in the whole file
// so that one import must be the one we're looking for
break :inner_brk 0;
break :brk 0;
}
break :inner_brk null;
break :brk null;
};
if (redirect_import_record_index) |id| {
part.symbol_uses = .{};

View File

@@ -1,365 +0,0 @@
/// REPL Transform module - transforms code for interactive REPL evaluation
///
/// This module provides transformations for REPL mode:
/// - Wraps the last expression in { value: expr } for result capture
/// - Wraps code with await in async IIFE with variable hoisting
/// - Hoists declarations for variable persistence across REPL lines
pub fn ReplTransforms(comptime P: type) type {
return struct {
const Self = @This();
/// Apply REPL-mode transforms to the AST.
/// This transforms code for interactive evaluation:
/// - Wraps the last expression in { value: expr } for result capture
/// - Wraps code with await in async IIFE with variable hoisting
pub fn apply(p: *P, parts: *ListManaged(js_ast.Part), allocator: Allocator) !void {
// Skip transform if there's a top-level return (indicates module pattern)
if (p.has_top_level_return) {
return;
}
// Collect all statements
var total_stmts_count: usize = 0;
for (parts.items) |part| {
total_stmts_count += part.stmts.len;
}
if (total_stmts_count == 0) {
return;
}
// Check if there's top-level await
const has_top_level_await = p.top_level_await_keyword.len > 0;
// Collect all statements into a single array
var all_stmts = bun.handleOom(allocator.alloc(Stmt, total_stmts_count));
var stmt_idx: usize = 0;
for (parts.items) |part| {
for (part.stmts) |stmt| {
all_stmts[stmt_idx] = stmt;
stmt_idx += 1;
}
}
// Apply transform with is_async based on presence of top-level await
try transformWithHoisting(p, parts, all_stmts, allocator, has_top_level_await);
}
/// Transform code with hoisting and IIFE wrapper
/// @param is_async: true for async IIFE (when top-level await present), false for sync IIFE
fn transformWithHoisting(
p: *P,
parts: *ListManaged(js_ast.Part),
all_stmts: []Stmt,
allocator: Allocator,
is_async: bool,
) !void {
if (all_stmts.len == 0) return;
// Lists for hoisted declarations and inner statements
var hoisted_stmts = ListManaged(Stmt).init(allocator);
var inner_stmts = ListManaged(Stmt).init(allocator);
try hoisted_stmts.ensureTotalCapacity(all_stmts.len);
try inner_stmts.ensureTotalCapacity(all_stmts.len);
// Process each statement - hoist all declarations for REPL persistence
for (all_stmts) |stmt| {
switch (stmt.data) {
.s_local => |local| {
// Hoist all declarations as var so they become context properties
// In sloppy mode, var at top level becomes a property of the global/context object
// This is essential for REPL variable persistence across vm.runInContext calls
const kind: S.Local.Kind = .k_var;
// Extract individual identifiers from binding patterns for hoisting
var hoisted_decl_list = ListManaged(G.Decl).init(allocator);
for (local.decls.slice()) |decl| {
try extractIdentifiersFromBinding(p, decl.binding, &hoisted_decl_list);
}
if (hoisted_decl_list.items.len > 0) {
try hoisted_stmts.append(p.s(S.Local{
.kind = kind,
.decls = Decl.List.fromOwnedSlice(hoisted_decl_list.items),
}, stmt.loc));
}
// Create assignment expressions for the inner statements
for (local.decls.slice()) |decl| {
if (decl.value) |value| {
// Create assignment expression: binding = value
const assign_expr = createBindingAssignment(p, decl.binding, value, allocator);
try inner_stmts.append(p.s(S.SExpr{ .value = assign_expr }, stmt.loc));
}
}
},
.s_function => |func| {
// For function declarations:
// Hoist as: var funcName;
// Inner: this.funcName = funcName; function funcName() {}
if (func.func.name) |name_loc| {
try hoisted_stmts.append(p.s(S.Local{
.kind = .k_var,
.decls = Decl.List.fromOwnedSlice(bun.handleOom(allocator.dupe(G.Decl, &.{
G.Decl{
.binding = p.b(B.Identifier{ .ref = name_loc.ref.? }, name_loc.loc),
.value = null,
},
}))),
}, stmt.loc));
// Add this.funcName = funcName assignment
const this_expr = p.newExpr(E.This{}, stmt.loc);
const this_dot = p.newExpr(E.Dot{
.target = this_expr,
.name = p.symbols.items[name_loc.ref.?.innerIndex()].original_name,
.name_loc = name_loc.loc,
}, stmt.loc);
const func_id = p.newExpr(E.Identifier{ .ref = name_loc.ref.? }, name_loc.loc);
const assign = p.newExpr(E.Binary{
.op = .bin_assign,
.left = this_dot,
.right = func_id,
}, stmt.loc);
try inner_stmts.append(p.s(S.SExpr{ .value = assign }, stmt.loc));
}
// Add the function declaration itself
try inner_stmts.append(stmt);
},
.s_class => |class| {
// For class declarations:
// Hoist as: var ClassName; (use var so it persists to vm context)
// Inner: ClassName = class ClassName {}
if (class.class.class_name) |name_loc| {
try hoisted_stmts.append(p.s(S.Local{
.kind = .k_var,
.decls = Decl.List.fromOwnedSlice(bun.handleOom(allocator.dupe(G.Decl, &.{
G.Decl{
.binding = p.b(B.Identifier{ .ref = name_loc.ref.? }, name_loc.loc),
.value = null,
},
}))),
}, stmt.loc));
// Convert class declaration to assignment: ClassName = class ClassName {}
const class_expr = p.newExpr(class.class, stmt.loc);
const class_id = p.newExpr(E.Identifier{ .ref = name_loc.ref.? }, name_loc.loc);
const assign = p.newExpr(E.Binary{
.op = .bin_assign,
.left = class_id,
.right = class_expr,
}, stmt.loc);
try inner_stmts.append(p.s(S.SExpr{ .value = assign }, stmt.loc));
} else {
try inner_stmts.append(stmt);
}
},
.s_directive => |directive| {
// In REPL mode, treat directives (string literals) as expressions
const str_expr = p.newExpr(E.String{ .data = directive.value }, stmt.loc);
try inner_stmts.append(p.s(S.SExpr{ .value = str_expr }, stmt.loc));
},
else => {
try inner_stmts.append(stmt);
},
}
}
// Wrap the last expression in return { value: expr }
wrapLastExpressionWithReturn(p, &inner_stmts, allocator);
// Create the IIFE: (() => { ...inner_stmts... })() or (async () => { ... })()
const arrow = p.newExpr(E.Arrow{
.args = &.{},
.body = .{ .loc = logger.Loc.Empty, .stmts = inner_stmts.items },
.is_async = is_async,
}, logger.Loc.Empty);
const iife = p.newExpr(E.Call{
.target = arrow,
.args = ExprNodeList{},
}, logger.Loc.Empty);
// Final output: hoisted declarations + IIFE call
const final_stmts_count = hoisted_stmts.items.len + 1;
var final_stmts = bun.handleOom(allocator.alloc(Stmt, final_stmts_count));
for (hoisted_stmts.items, 0..) |stmt, j| {
final_stmts[j] = stmt;
}
final_stmts[hoisted_stmts.items.len] = p.s(S.SExpr{ .value = iife }, logger.Loc.Empty);
// Update parts
if (parts.items.len > 0) {
parts.items[0].stmts = final_stmts;
parts.items.len = 1;
}
}
/// Wrap the last expression in return { value: expr }
fn wrapLastExpressionWithReturn(p: *P, inner_stmts: *ListManaged(Stmt), allocator: Allocator) void {
if (inner_stmts.items.len > 0) {
var last_idx: usize = inner_stmts.items.len;
while (last_idx > 0) {
last_idx -= 1;
const last_stmt = inner_stmts.items[last_idx];
switch (last_stmt.data) {
.s_empty, .s_comment => continue,
.s_expr => |expr_data| {
// Wrap in return { value: expr }
const wrapped = wrapExprInValueObject(p, expr_data.value, allocator);
inner_stmts.items[last_idx] = p.s(S.Return{ .value = wrapped }, last_stmt.loc);
break;
},
else => break,
}
}
}
}
/// Extract individual identifiers from a binding pattern for hoisting
fn extractIdentifiersFromBinding(p: *P, binding: Binding, decls: *ListManaged(G.Decl)) !void {
switch (binding.data) {
.b_identifier => |ident| {
try decls.append(G.Decl{
.binding = p.b(B.Identifier{ .ref = ident.ref }, binding.loc),
.value = null,
});
},
.b_array => |arr| {
for (arr.items) |item| {
try extractIdentifiersFromBinding(p, item.binding, decls);
}
},
.b_object => |obj| {
for (obj.properties) |prop| {
try extractIdentifiersFromBinding(p, prop.value, decls);
}
},
.b_missing => {},
}
}
/// Create { __proto__: null, value: expr } wrapper object
/// Uses null prototype to create a clean data object
fn wrapExprInValueObject(p: *P, expr: Expr, allocator: Allocator) Expr {
var properties = bun.handleOom(allocator.alloc(G.Property, 2));
// __proto__: null - creates null-prototype object
properties[0] = G.Property{
.key = p.newExpr(E.String{ .data = "__proto__" }, expr.loc),
.value = p.newExpr(E.Null{}, expr.loc),
};
// value: expr - the actual result value
properties[1] = G.Property{
.key = p.newExpr(E.String{ .data = "value" }, expr.loc),
.value = expr,
};
return p.newExpr(E.Object{
.properties = G.Property.List.fromOwnedSlice(properties),
}, expr.loc);
}
/// Create assignment expression from binding pattern
fn createBindingAssignment(p: *P, binding: Binding, value: Expr, allocator: Allocator) Expr {
switch (binding.data) {
.b_identifier => |ident| {
return p.newExpr(E.Binary{
.op = .bin_assign,
.left = p.newExpr(E.Identifier{ .ref = ident.ref }, binding.loc),
.right = value,
}, binding.loc);
},
.b_array => {
// For array destructuring, create: [a, b] = value
return p.newExpr(E.Binary{
.op = .bin_assign,
.left = convertBindingToExpr(p, binding, allocator),
.right = value,
}, binding.loc);
},
.b_object => {
// For object destructuring, create: {a, b} = value
return p.newExpr(E.Binary{
.op = .bin_assign,
.left = convertBindingToExpr(p, binding, allocator),
.right = value,
}, binding.loc);
},
.b_missing => {
// Return Missing expression to match convertBindingToExpr
return p.newExpr(E.Missing{}, binding.loc);
},
}
}
/// Convert a binding pattern to an expression (for assignment targets)
/// Handles spread/rest patterns in arrays and objects to match Binding.toExpr behavior
fn convertBindingToExpr(p: *P, binding: Binding, allocator: Allocator) Expr {
switch (binding.data) {
.b_identifier => |ident| {
return p.newExpr(E.Identifier{ .ref = ident.ref }, binding.loc);
},
.b_array => |arr| {
var items = bun.handleOom(allocator.alloc(Expr, arr.items.len));
for (arr.items, 0..) |item, i| {
const expr = convertBindingToExpr(p, item.binding, allocator);
// Check for spread pattern: if has_spread and this is the last element
if (arr.has_spread and i == arr.items.len - 1) {
items[i] = p.newExpr(E.Spread{ .value = expr }, expr.loc);
} else if (item.default_value) |default_val| {
items[i] = p.newExpr(E.Binary{
.op = .bin_assign,
.left = expr,
.right = default_val,
}, item.binding.loc);
} else {
items[i] = expr;
}
}
return p.newExpr(E.Array{
.items = ExprNodeList.fromOwnedSlice(items),
.is_single_line = arr.is_single_line,
}, binding.loc);
},
.b_object => |obj| {
var properties = bun.handleOom(allocator.alloc(G.Property, obj.properties.len));
for (obj.properties, 0..) |prop, i| {
properties[i] = G.Property{
.flags = prop.flags,
.key = prop.key,
// Set kind to .spread if the property has spread flag
.kind = if (prop.flags.contains(.is_spread)) .spread else .normal,
.value = convertBindingToExpr(p, prop.value, allocator),
.initializer = prop.default_value,
};
}
return p.newExpr(E.Object{
.properties = G.Property.List.fromOwnedSlice(properties),
.is_single_line = obj.is_single_line,
}, binding.loc);
},
.b_missing => {
return p.newExpr(E.Missing{}, binding.loc);
},
}
}
};
}
const std = @import("std");
const Allocator = std.mem.Allocator;
const ListManaged = std.array_list.Managed;
const bun = @import("bun");
const logger = bun.logger;
const js_ast = bun.ast;
const B = js_ast.B;
const Binding = js_ast.Binding;
const E = js_ast.E;
const Expr = js_ast.Expr;
const ExprNodeList = js_ast.ExprNodeList;
const S = js_ast.S;
const Stmt = js_ast.Stmt;
const G = js_ast.G;
const Decl = G.Decl;

View File

@@ -1355,7 +1355,7 @@ fn computeArgumentsForFrameworkRequest(
const relative_path_buf = bun.path_buffer_pool.get();
defer bun.path_buffer_pool.put(relative_path_buf);
var route_name = bun.String.cloneUTF8(dev.relativePath(relative_path_buf, keys[fromOpaqueFileId(.server, route.file_page.unwrap().?).get()]));
try arr.putIndex(global, 0, try route_name.transferToJS(global));
try arr.putIndex(global, 0, route_name.transferToJS(global));
}
n = 1;
while (true) {
@@ -1366,7 +1366,7 @@ fn computeArgumentsForFrameworkRequest(
relative_path_buf,
keys[fromOpaqueFileId(.server, layout).get()],
));
try arr.putIndex(global, @intCast(n), try layout_name.transferToJS(global));
try arr.putIndex(global, @intCast(n), layout_name.transferToJS(global));
n += 1;
}
route = dev.router.routePtr(route.parent.unwrap() orelse break);
@@ -1383,7 +1383,7 @@ fn computeArgumentsForFrameworkRequest(
std.mem.asBytes(&generation),
}) catch |err| bun.handleOom(err);
defer str.deref();
const js = try str.toJS(dev.vm.global);
const js = str.toJS(dev.vm.global);
framework_bundle.cached_client_bundle_url = .create(js, dev.vm.global);
break :str js;
},
@@ -2091,7 +2091,7 @@ fn generateCssJSArray(dev: *DevServer, route_bundle: *RouteBundle) bun.JSError!j
}) catch unreachable;
const str = bun.String.cloneUTF8(path);
defer str.deref();
try arr.putIndex(dev.vm.global, @intCast(i), try str.toJS(dev.vm.global));
try arr.putIndex(dev.vm.global, @intCast(i), str.toJS(dev.vm.global));
}
return arr;
}
@@ -2136,7 +2136,7 @@ fn makeArrayForServerComponentsPatch(dev: *DevServer, global: *jsc.JSGlobalObjec
defer bun.path_buffer_pool.put(relative_path_buf);
const str = bun.String.cloneUTF8(dev.relativePath(relative_path_buf, names[item.get()]));
defer str.deref();
try arr.putIndex(global, @intCast(i), try str.toJS(global));
try arr.putIndex(global, @intCast(i), str.toJS(global));
}
return arr;
}
@@ -2845,7 +2845,7 @@ pub fn finalizeBundle(
if (dev.bundling_failures.count() == 0) {
if (current_bundle.had_reload_event) {
const clear_terminal = !debug.isVisible() and !dev.vm.transpiler.env.hasSetNoClearTerminalOnReload(false);
const clear_terminal = !debug.isVisible();
if (clear_terminal) {
Output.disableBuffering();
Output.resetTerminalAll();

View File

@@ -839,7 +839,7 @@ pub const MatchedParams = struct {
const value_str = bun.String.cloneUTF8(param.value);
defer value_str.deref();
_ = obj.putBunStringOneOrArray(global, &key_str, value_str.toJS(global) catch unreachable) catch unreachable;
_ = obj.putBunStringOneOrArray(global, &key_str, value_str.toJS(global)) catch unreachable;
}
return obj;
}
@@ -1247,7 +1247,7 @@ pub const JSFrameworkRouter = struct {
for (params_out.params.slice()) |param| {
const value = bun.String.cloneUTF8(param.value);
defer value.deref();
obj.put(global, param.key, try value.toJS(global));
obj.put(global, param.key, value.toJS(global));
}
break :params obj;
} else .null,
@@ -1271,8 +1271,8 @@ pub const JSFrameworkRouter = struct {
const route = jsfr.router.routePtr(route_index);
return (try jsc.JSObject.create(.{
.part = try partToJS(global, route.part, allocator),
.page = try jsfr.fileIdToJS(global, route.file_page),
.layout = try jsfr.fileIdToJS(global, route.file_layout),
.page = jsfr.fileIdToJS(global, route.file_page),
.layout = jsfr.fileIdToJS(global, route.file_layout),
// .notFound = jsfr.fileIdToJS(global, route.file_not_found),
.children = brk: {
var len: usize = 0;
@@ -1295,8 +1295,8 @@ pub const JSFrameworkRouter = struct {
const route = jsfr.router.routePtr(route_index);
return (try jsc.JSObject.create(.{
.part = try partToJS(global, route.part, allocator),
.page = try jsfr.fileIdToJS(global, route.file_page),
.layout = try jsfr.fileIdToJS(global, route.file_layout),
.page = jsfr.fileIdToJS(global, route.file_page),
.layout = jsfr.fileIdToJS(global, route.file_layout),
// .notFound = jsfr.fileIdToJS(global, route.file_not_found),
.parent = if (route.parent.unwrap()) |parent|
try routeToJsonInverse(jsfr, global, parent, allocator)
@@ -1341,8 +1341,8 @@ pub const JSFrameworkRouter = struct {
var out = bun.String.init(rendered.items);
const obj = JSValue.createEmptyObject(global, 2);
obj.put(global, "kind", try bun.String.static(@tagName(parsed.kind)).toJS(global));
obj.put(global, "pattern", try out.transferToJS(global));
obj.put(global, "kind", bun.String.static(@tagName(parsed.kind)).toJS(global));
obj.put(global, "pattern", out.transferToJS(global));
return obj;
}
@@ -1352,7 +1352,7 @@ pub const JSFrameworkRouter = struct {
var it = pattern.iterate();
while (it.next()) |part| try part.toStringForInternalUse(rendered.writer());
var str = bun.String.cloneUTF8(rendered.items);
return try str.transferToJS(global);
return str.transferToJS(global);
}
fn partToJS(global: *JSGlobalObject, part: Part, temp_allocator: Allocator) !JSValue {
@@ -1360,7 +1360,7 @@ pub const JSFrameworkRouter = struct {
defer rendered.deinit();
try part.toStringForInternalUse(rendered.writer());
var str = bun.String.cloneUTF8(rendered.items);
return try str.transferToJS(global);
return str.transferToJS(global);
}
pub fn getFileIdForRouter(jsfr: *JSFrameworkRouter, abs_path: []const u8, _: Route.Index, _: Route.FileKind) !OpaqueFileId {
@@ -1377,7 +1377,7 @@ pub const JSFrameworkRouter = struct {
});
}
pub fn fileIdToJS(jsfr: *JSFrameworkRouter, global: *JSGlobalObject, id: OpaqueFileId.Optional) bun.JSError!JSValue {
pub fn fileIdToJS(jsfr: *JSFrameworkRouter, global: *JSGlobalObject, id: OpaqueFileId.Optional) JSValue {
return jsfr.files.items[(id.unwrap() orelse return .null).get()].toJS(global);
}
};

View File

@@ -182,7 +182,7 @@ pub fn buildWithVm(ctx: bun.cli.Command.Context, cwd: []const u8, vm: *VirtualMa
.pending => unreachable,
.fulfilled => |resolved| config: {
bun.assert(resolved.isUndefined());
const default = BakeGetDefaultExportFromModule(vm.global, try config_entry_point_string.toJS(vm.global));
const default = BakeGetDefaultExportFromModule(vm.global, config_entry_point_string.toJS(vm.global));
if (!default.isObject()) {
return global.throwInvalidArguments(
@@ -485,7 +485,7 @@ pub fn buildWithVm(ctx: bun.cli.Command.Context, cwd: []const u8, vm: *VirtualMa
for (router.types, 0..) |router_type, i| {
if (router_type.client_file.unwrap()) |client_file| {
const str = try (try bun.String.createFormat("{s}{s}", .{
const str = (try bun.String.createFormat("{s}{s}", .{
public_path,
pt.outputFile(client_file).dest_path,
})).toJS(global);
@@ -542,7 +542,7 @@ pub fn buildWithVm(ctx: bun.cli.Command.Context, cwd: []const u8, vm: *VirtualMa
bun.assert(output_file.dest_path[0] != '.');
// CSS chunks must be in contiguous order!!
bun.assert(output_file.loader.isCSS());
str.* = try (try bun.String.createFormat("{s}{s}", .{ public_path, output_file.dest_path })).toJS(global);
str.* = (try bun.String.createFormat("{s}{s}", .{ public_path, output_file.dest_path })).toJS(global);
}
// Route URL patterns with parameter placeholders.
@@ -659,10 +659,10 @@ pub fn buildWithVm(ctx: bun.cli.Command.Context, cwd: []const u8, vm: *VirtualMa
// Init the items
var pattern_string = bun.String.cloneUTF8(pattern.slice());
defer pattern_string.deref();
try route_patterns.putIndex(global, @intCast(nav_index), try pattern_string.toJS(global));
try route_patterns.putIndex(global, @intCast(nav_index), pattern_string.toJS(global));
var src_path = bun.String.cloneUTF8(bun.path.relative(cwd, pt.inputFile(main_file_route_index).absPath()));
try route_source_files.putIndex(global, @intCast(nav_index), try src_path.transferToJS(global));
try route_source_files.putIndex(global, @intCast(nav_index), src_path.transferToJS(global));
try route_nested_files.putIndex(global, @intCast(nav_index), file_list);
try route_type_and_flags.putIndex(global, @intCast(nav_index), JSValue.jsNumberFromInt32(@bitCast(TypeAndFlags{
@@ -993,7 +993,7 @@ pub const PerThread = struct {
return try loadModule(
pt.vm,
pt.vm.global,
try pt.module_keys[id.get()].toJS(pt.vm.global),
pt.module_keys[id.get()].toJS(pt.vm.global),
);
}
@@ -1010,7 +1010,7 @@ pub const PerThread = struct {
try pt.all_server_files.putIndex(
pt.vm.global,
@intCast(id.get()),
try pt.module_keys[id.get()].toJS(pt.vm.global),
pt.module_keys[id.get()].toJS(pt.vm.global),
);
}

View File

@@ -13,18 +13,21 @@ pub const Run = struct {
var run: Run = undefined;
pub fn bootStandalone(ctx: Command.Context, entry_path: string, graph_ptr: *bun.StandaloneModuleGraph) !void {
pub fn bootStandalone(ctx: Command.Context, entry_path: string, graph: bun.StandaloneModuleGraph) !void {
jsc.markBinding(@src());
bun.jsc.initialize(false);
bun.analytics.Features.standalone_executable += 1;
const graph_ptr = try bun.default_allocator.create(bun.StandaloneModuleGraph);
graph_ptr.* = graph;
graph_ptr.set();
js_ast.Expr.Data.Store.create();
js_ast.Stmt.Data.Store.create();
const arena = Arena.init();
// Load bunfig.toml unless disabled by compile flags
// Note: config loading with execArgv is handled earlier in cli.zig via loadConfig
if (!ctx.debug.loaded_bunfig and !graph_ptr.flags.disable_autoload_bunfig) {
if (!ctx.debug.loaded_bunfig and !graph.flags.disable_autoload_bunfig) {
try bun.cli.Arguments.loadConfigPath(ctx.allocator, true, "bunfig.toml", ctx, .RunCommand);
}
@@ -84,7 +87,7 @@ pub const Run = struct {
// If .env loading is disabled, only load process env vars
// Otherwise, load all .env files
if (graph_ptr.flags.disable_default_env_files) {
if (graph.flags.disable_default_env_files) {
b.options.env.behavior = .disable;
} else {
b.options.env.behavior = .load_all_without_inlining;
@@ -92,8 +95,8 @@ pub const Run = struct {
// Control loading of tsconfig.json and package.json at runtime
// By default, these are disabled for standalone executables
b.resolver.opts.load_tsconfig_json = !graph_ptr.flags.disable_autoload_tsconfig;
b.resolver.opts.load_package_json = !graph_ptr.flags.disable_autoload_package_json;
b.resolver.opts.load_tsconfig_json = !graph.flags.disable_autoload_tsconfig;
b.resolver.opts.load_package_json = !graph.flags.disable_autoload_package_json;
b.configureDefines() catch {
failWithBuildError(vm);
@@ -283,25 +286,11 @@ pub const Run = struct {
vm.cpu_profiler_config = CPUProfiler.CPUProfilerConfig{
.name = cpu_prof_opts.name,
.dir = cpu_prof_opts.dir,
.md_format = cpu_prof_opts.md_format,
.json_format = cpu_prof_opts.json_format,
};
CPUProfiler.startCPUProfiler(vm.jsc_vm);
bun.analytics.Features.cpu_profile += 1;
}
// Set up heap profiler config if enabled (actual profiling happens on exit)
if (this.ctx.runtime_options.heap_prof.enabled) {
const heap_prof_opts = this.ctx.runtime_options.heap_prof;
vm.heap_profiler_config = HeapProfiler.HeapProfilerConfig{
.name = heap_prof_opts.name,
.dir = heap_prof_opts.dir,
.text_format = heap_prof_opts.text_format,
};
bun.analytics.Features.heap_snapshot += 1;
}
this.addConditionalGlobals();
do_redis_preconnect: {
// This must happen within the API lock, which is why it's not in the "doPreconnect" function
@@ -563,7 +552,6 @@ const VirtualMachine = jsc.VirtualMachine;
const string = []const u8;
const CPUProfiler = @import("./bun.js/bindings/BunCPUProfiler.zig");
const HeapProfiler = @import("./bun.js/bindings/BunHeapProfiler.zig");
const options = @import("./options.zig");
const std = @import("std");
const Command = @import("./cli.zig").Command;

View File

@@ -94,7 +94,7 @@ pub const BuildMessage = struct {
_: *jsc.CallFrame,
) bun.JSError!jsc.JSValue {
var object = jsc.JSValue.createEmptyObject(globalThis, 4);
object.put(globalThis, ZigString.static("name"), try bun.String.static("BuildMessage").toJS(globalThis));
object.put(globalThis, ZigString.static("name"), bun.String.static("BuildMessage").toJS(globalThis));
object.put(globalThis, ZigString.static("position"), this.getPosition(globalThis));
object.put(globalThis, ZigString.static("message"), this.getMessage(globalThis));
object.put(globalThis, ZigString.static("level"), this.getLevel(globalThis));

View File

@@ -2596,9 +2596,6 @@ pub const Formatter = struct {
} else if (value.as(jsc.WebCore.S3Client)) |s3client| {
s3client.writeFormat(ConsoleObject.Formatter, this, writer_, enable_ansi_colors) catch {};
return;
} else if (value.as(jsc.API.Archive)) |archive| {
archive.writeFormat(ConsoleObject.Formatter, this, writer_, enable_ansi_colors) catch {};
return;
} else if (value.as(bun.webcore.FetchHeaders) != null) {
if (try value.get(this.globalThis, "toJSON")) |toJSONFunction| {
this.addForNewLine("Headers ".len);

View File

@@ -316,7 +316,6 @@ pub const TestReporterAgent = struct {
pub const Handle = opaque {
extern "c" fn Bun__TestReporterAgentReportTestFound(agent: *Handle, callFrame: *jsc.CallFrame, testId: c_int, name: *bun.String, item_type: TestType, parentId: c_int) void;
extern "c" fn Bun__TestReporterAgentReportTestFoundWithLocation(agent: *Handle, testId: c_int, name: *bun.String, item_type: TestType, parentId: c_int, sourceURL: *bun.String, line: c_int) void;
extern "c" fn Bun__TestReporterAgentReportTestStart(agent: *Handle, testId: c_int) void;
extern "c" fn Bun__TestReporterAgentReportTestEnd(agent: *Handle, testId: c_int, bunTestStatus: TestStatus, elapsed: f64) void;
@@ -324,10 +323,6 @@ pub const TestReporterAgent = struct {
Bun__TestReporterAgentReportTestFound(this, callFrame, testId, name, item_type, parentId);
}
pub fn reportTestFoundWithLocation(this: *Handle, testId: i32, name: *bun.String, item_type: TestType, parentId: i32, sourceURL: *bun.String, line: i32) void {
Bun__TestReporterAgentReportTestFoundWithLocation(this, testId, name, item_type, parentId, sourceURL, line);
}
pub fn reportTestStart(this: *Handle, testId: c_int) void {
Bun__TestReporterAgentReportTestStart(this, testId);
}
@@ -340,88 +335,8 @@ pub const TestReporterAgent = struct {
if (VirtualMachine.get().debugger) |*debugger| {
debug("enable", .{});
debugger.test_reporter_agent.handle = agent;
// Retroactively report any tests that were already discovered before the debugger connected
retroactivelyReportDiscoveredTests(agent);
}
}
/// When TestReporter.enable is called after test collection has started/finished,
/// we need to retroactively assign test IDs and report discovered tests.
fn retroactivelyReportDiscoveredTests(agent: *Handle) void {
const Jest = jsc.Jest.Jest;
const runner = Jest.runner orelse return;
const active_file = runner.bun_test_root.active_file.get() orelse return;
// Only report if we're in collection or execution phase (tests have been discovered)
switch (active_file.phase) {
.collection, .execution => {},
.done => return,
}
// Get the file path for source location info
const file_path = runner.files.get(active_file.file_id).source.path.text;
var source_url = bun.String.init(file_path);
// Track the maximum ID we assign
var max_id: i32 = 0;
// Recursively report all discovered tests starting from root scope
const root_scope = active_file.collection.root_scope;
retroactivelyReportScope(agent, root_scope, -1, &max_id, &source_url);
debug("retroactively reported {} tests", .{max_id});
}
fn retroactivelyReportScope(agent: *Handle, scope: *bun_test.DescribeScope, parent_id: i32, max_id: *i32, source_url: *bun.String) void {
for (scope.entries.items) |*entry| {
switch (entry.*) {
.describe => |describe| {
// Only report and assign ID if not already assigned
if (describe.base.test_id_for_debugger == 0) {
max_id.* += 1;
const test_id = max_id.*;
// Assign the ID so start/end events will fire during execution
describe.base.test_id_for_debugger = test_id;
var name = bun.String.init(describe.base.name orelse "(unnamed)");
agent.reportTestFoundWithLocation(
test_id,
&name,
.describe,
parent_id,
source_url,
@intCast(describe.base.line_no),
);
// Recursively report children with this describe as parent
retroactivelyReportScope(agent, describe, test_id, max_id, source_url);
} else {
// Already has ID, just recurse with existing ID as parent
retroactivelyReportScope(agent, describe, describe.base.test_id_for_debugger, max_id, source_url);
}
},
.test_callback => |test_entry| {
// Only report and assign ID if not already assigned
if (test_entry.base.test_id_for_debugger == 0) {
max_id.* += 1;
const test_id = max_id.*;
// Assign the ID so start/end events will fire during execution
test_entry.base.test_id_for_debugger = test_id;
var name = bun.String.init(test_entry.base.name orelse "(unnamed)");
agent.reportTestFoundWithLocation(
test_id,
&name,
.@"test",
parent_id,
source_url,
@intCast(test_entry.base.line_no),
);
}
},
}
}
}
const bun_test = jsc.Jest.bun_test;
pub export fn Bun__TestReporterAgentDisable(_: *Handle) void {
if (VirtualMachine.get().debugger) |*debugger| {
debug("disable", .{});

View File

@@ -65,7 +65,6 @@ pub const HardcodedModule = enum {
@"node:trace_events",
@"node:repl",
@"node:inspector",
@"node:inspector/promises",
@"node:http2",
@"node:diagnostics_channel",
@"node:dgram",
@@ -122,7 +121,6 @@ pub const HardcodedModule = enum {
.{ "node:http2", .@"node:http2" },
.{ "node:https", .@"node:https" },
.{ "node:inspector", .@"node:inspector" },
.{ "node:inspector/promises", .@"node:inspector/promises" },
.{ "node:module", .@"node:module" },
.{ "node:net", .@"node:net" },
.{ "node:readline", .@"node:readline" },
@@ -232,7 +230,6 @@ pub const HardcodedModule = enum {
nodeEntry("node:http2"),
nodeEntry("node:https"),
nodeEntry("node:inspector"),
nodeEntry("node:inspector/promises"),
nodeEntry("node:module"),
nodeEntry("node:net"),
nodeEntry("node:os"),
@@ -288,7 +285,6 @@ pub const HardcodedModule = enum {
nodeEntry("http2"),
nodeEntry("https"),
nodeEntry("inspector"),
nodeEntry("inspector/promises"),
nodeEntry("module"),
nodeEntry("net"),
nodeEntry("os"),
@@ -370,6 +366,10 @@ pub const HardcodedModule = enum {
.{ "bun:internal-for-testing", .{ .path = "bun:internal-for-testing" } },
.{ "ffi", .{ .path = "bun:ffi" } },
// inspector/promises is not implemented, it is an alias of inspector
.{ "node:inspector/promises", .{ .path = "node:inspector", .node_builtin = true } },
.{ "inspector/promises", .{ .path = "node:inspector", .node_builtin = true } },
// Thirdparty packages we override
.{ "@vercel/fetch", .{ .path = "@vercel/fetch" } },
.{ "isomorphic-fetch", .{ .path = "isomorphic-fetch" } },
@@ -394,7 +394,12 @@ pub const HardcodedModule = enum {
.{ "vitest", .{ .path = "bun:test" } },
};
const node_aliases = bun.ComptimeStringMap(Alias, common_alias_kvs);
const node_extra_alias_kvs = [_]struct { string, Alias }{
nodeEntry("node:inspector/promises"),
nodeEntry("inspector/promises"),
};
const node_aliases = bun.ComptimeStringMap(Alias, common_alias_kvs ++ node_extra_alias_kvs);
pub const bun_aliases = bun.ComptimeStringMap(Alias, common_alias_kvs ++ bun_extra_alias_kvs);
const bun_test_aliases = bun.ComptimeStringMap(Alias, common_alias_kvs ++ bun_extra_alias_kvs ++ bun_test_extra_alias_kvs);

View File

@@ -30,20 +30,45 @@ pub fn resetArena(this: *ModuleLoader, jsc_vm: *VirtualMachine) void {
}
}
pub fn resolveEmbeddedFile(vm: *VirtualMachine, path_buf: *bun.PathBuffer, input_path: []const u8, extname: []const u8) ?[]const u8 {
fn resolveEmbeddedNodeFileViaMemfd(file: *bun.StandaloneModuleGraph.File, path_buffer: *bun.PathBuffer, fd: *i32) ![]const u8 {
var label_buf: [128]u8 = undefined;
const count = struct {
pub var counter = std.atomic.Value(u32).init(0);
pub fn get() u32 {
return counter.fetchAdd(1, .seq_cst);
}
}.get();
const label = std.fmt.bufPrintZ(&label_buf, "node-addon-{d}", .{count}) catch "";
const memfd = try bun.sys.memfd_create(label, .executable).unwrap();
errdefer memfd.close();
fd.* = @intCast(memfd.cast());
errdefer fd.* = -1;
try bun.sys.ftruncate(memfd, @intCast(file.contents.len)).unwrap();
try bun.sys.File.writeAll(.{ .handle = memfd }, file.contents).unwrap();
return try std.fmt.bufPrint(path_buffer, "/proc/self/fd/{d}", .{memfd.cast()});
}
pub fn resolveEmbeddedFile(vm: *VirtualMachine, path_buf: *bun.PathBuffer, linux_memfd: *i32, input_path: []const u8, extname: []const u8) ?[]const u8 {
if (input_path.len == 0) return null;
var graph = vm.standalone_module_graph orelse return null;
const file = graph.find(input_path) orelse return null;
if (comptime Environment.isLinux) {
// TODO: use /proc/fd/12346 instead! Avoid the copy!
// Best-effort: use memfd to avoid hitting the disk
if (resolveEmbeddedNodeFileViaMemfd(file, path_buf, linux_memfd)) |path| {
return path;
} else |_| {
// fall back to temp file
}
}
// atomically write to a tmpfile and then move it to the final destination
const tmpname_buf = bun.path_buffer_pool.get();
defer bun.path_buffer_pool.put(tmpname_buf);
const tmpfilename = bun.fs.FileSystem.tmpname(extname, tmpname_buf, bun.hash(file.name)) catch return null;
const tmpdir: bun.FD = .fromStdDir(bun.fs.FileSystem.instance.tmpdir() catch return null);
// First we open the tmpfile, to avoid any other work in the event of failure.
@@ -1301,14 +1326,14 @@ pub const FetchFlags = enum {
};
/// Support embedded .node files
export fn Bun__resolveEmbeddedNodeFile(vm: *VirtualMachine, in_out_str: *bun.String) bool {
export fn Bun__resolveEmbeddedNodeFile(vm: *VirtualMachine, in_out_str: *bun.String, linux_memfd_fd_to_close: *i32) bool {
if (vm.standalone_module_graph == null) return false;
const input_path = in_out_str.toUTF8(bun.default_allocator);
defer input_path.deinit();
const path_buf = bun.path_buffer_pool.get();
defer bun.path_buffer_pool.put(path_buf);
const result = ModuleLoader.resolveEmbeddedFile(vm, path_buf, input_path.slice(), "node") orelse return false;
const path_buffer = bun.path_buffer_pool.get();
defer bun.path_buffer_pool.put(path_buffer);
const result = ModuleLoader.resolveEmbeddedFile(vm, path_buffer, linux_memfd_fd_to_close, input_path.slice(), "node") orelse return false;
in_out_str.* = bun.String.cloneUTF8(result);
return true;
}

View File

@@ -13,7 +13,7 @@ pub const ResolveMessage = struct {
return globalThis.throw("ResolveMessage is not constructable", .{});
}
pub fn getCode(this: *ResolveMessage, globalObject: *jsc.JSGlobalObject) bun.JSError!jsc.JSValue {
pub fn getCode(this: *ResolveMessage, globalObject: *jsc.JSGlobalObject) jsc.JSValue {
switch (this.msg.metadata) {
.resolve => |resolve| {
const code: []const u8 = brk: {
@@ -153,7 +153,7 @@ pub const ResolveMessage = struct {
_: *jsc.CallFrame,
) bun.JSError!jsc.JSValue {
var object = jsc.JSValue.createEmptyObject(globalThis, 7);
object.put(globalThis, ZigString.static("name"), try bun.String.static("ResolveMessage").toJS(globalThis));
object.put(globalThis, ZigString.static("name"), bun.String.static("ResolveMessage").toJS(globalThis));
object.put(globalThis, ZigString.static("position"), this.getPosition(globalThis));
object.put(globalThis, ZigString.static("message"), this.getMessage(globalThis));
object.put(globalThis, ZigString.static("level"), this.getLevel(globalThis));

View File

@@ -49,7 +49,6 @@ standalone_module_graph: ?*bun.StandaloneModuleGraph = null,
smol: bool = false,
dns_result_order: DNSResolver.Order = .verbatim,
cpu_profiler_config: ?CPUProfilerConfig = null,
heap_profiler_config: ?HeapProfilerConfig = null,
counters: Counters = .{},
hot_reload: bun.cli.Command.HotReload = .none,
@@ -844,15 +843,6 @@ pub fn onExit(this: *VirtualMachine) void {
};
}
// Write heap profile if profiling was enabled - do this after CPU profile but before shutdown
// Grab the config and null it out to make this idempotent
if (this.heap_profiler_config) |config| {
this.heap_profiler_config = null;
HeapProfiler.generateAndWriteProfile(this.jsc_vm, config) catch |err| {
Output.err(err, "Failed to write heap profile", .{});
};
}
this.exit_handler.dispatchOnExit();
this.is_shutting_down = true;
@@ -3725,9 +3715,6 @@ const Allocator = std.mem.Allocator;
const CPUProfiler = @import("./bindings/BunCPUProfiler.zig");
const CPUProfilerConfig = CPUProfiler.CPUProfilerConfig;
const HeapProfiler = @import("./bindings/BunHeapProfiler.zig");
const HeapProfilerConfig = HeapProfiler.HeapProfilerConfig;
const bun = @import("bun");
const Async = bun.Async;
const DotEnv = bun.DotEnv;

View File

@@ -38,7 +38,6 @@ pub const dns = @import("./api/bun/dns.zig");
pub const FFI = @import("./api/ffi.zig").FFI;
pub const HTMLRewriter = @import("./api/html_rewriter.zig");
pub const FileSystemRouter = @import("./api/filesystem_router.zig").FileSystemRouter;
pub const Archive = @import("./api/Archive.zig");
pub const Glob = @import("./api/glob.zig");
pub const H2FrameParser = @import("./api/bun/h2_frame_parser.zig").H2FrameParser;
pub const JSBundler = @import("./api/JSBundler.zig").JSBundler;

View File

@@ -1,35 +0,0 @@
import { define } from "../../codegen/class-definitions";
export default [
define({
name: "Archive",
construct: true,
finalize: true,
configurable: false,
JSType: "0b11101110",
klass: {
write: {
fn: "write",
length: 2,
},
},
proto: {
extract: {
fn: "extract",
length: 2,
},
blob: {
fn: "blob",
length: 0,
},
bytes: {
fn: "bytes",
length: 0,
},
files: {
fn: "files",
length: 0,
},
},
}),
];

File diff suppressed because it is too large Load Diff

View File

@@ -16,6 +16,7 @@ pub const BunObject = struct {
pub const connect = toJSCallback(host_fn.wrapStaticMethod(api.Listener, "connect", false));
pub const createParsedShellScript = toJSCallback(bun.shell.ParsedShellScript.createParsedShellScript);
pub const createShellInterpreter = toJSCallback(bun.shell.Interpreter.createShellInterpreter);
pub const traceShellScript = toJSCallback(bun.shell.TraceInterpreter.traceShellScript);
pub const deflateSync = toJSCallback(JSZlib.deflateSync);
pub const file = toJSCallback(WebCore.Blob.constructBunFile);
pub const gunzipSync = toJSCallback(JSZlib.gunzipSync);
@@ -48,7 +49,6 @@ pub const BunObject = struct {
// --- Callbacks ---
// --- Lazy property callbacks ---
pub const Archive = toJSLazyPropertyCallback(Bun.getArchiveConstructor);
pub const CryptoHasher = toJSLazyPropertyCallback(Crypto.CryptoHasher.getter);
pub const CSRF = toJSLazyPropertyCallback(Bun.getCSRFObject);
pub const FFI = toJSLazyPropertyCallback(Bun.FFIObject.getter);
@@ -116,7 +116,6 @@ pub const BunObject = struct {
}
// --- Lazy property callbacks ---
@export(&BunObject.Archive, .{ .name = lazyPropertyCallbackName("Archive") });
@export(&BunObject.CryptoHasher, .{ .name = lazyPropertyCallbackName("CryptoHasher") });
@export(&BunObject.CSRF, .{ .name = lazyPropertyCallbackName("CSRF") });
@export(&BunObject.FFI, .{ .name = lazyPropertyCallbackName("FFI") });
@@ -157,6 +156,7 @@ pub const BunObject = struct {
@export(&BunObject.connect, .{ .name = callbackName("connect") });
@export(&BunObject.createParsedShellScript, .{ .name = callbackName("createParsedShellScript") });
@export(&BunObject.createShellInterpreter, .{ .name = callbackName("createShellInterpreter") });
@export(&BunObject.traceShellScript, .{ .name = callbackName("traceShellScript") });
@export(&BunObject.deflateSync, .{ .name = callbackName("deflateSync") });
@export(&BunObject.file, .{ .name = callbackName("file") });
@export(&BunObject.gunzipSync, .{ .name = callbackName("gunzipSync") });
@@ -606,7 +606,7 @@ fn getMain(globalThis: *jsc.JSGlobalObject) callconv(jsc.conv) jsc.JSValue {
}
}
return vm.main_resolved_path.toJS(globalThis) catch .zero;
return vm.main_resolved_path.toJS(globalThis);
}
return ZigString.init(vm.main).toJS(globalThis);
@@ -1103,7 +1103,7 @@ pub export fn Bun__escapeHTML16(globalObject: *jsc.JSGlobalObject, input_value:
assert(len > 0);
const input_slice = ptr[0..len];
const escaped = strings.escapeHTMLForUTF16Input(globalObject.bunVM().allocator, input_slice) catch {
return globalObject.throwValue(ZigString.init("Out of memory").toErrorInstance(globalObject)) catch return .zero;
return globalObject.throwValue(bun.String.static("Out of memory").toJS(globalObject)) catch .zero;
};
return switch (escaped) {
@@ -1121,7 +1121,7 @@ pub export fn Bun__escapeHTML8(globalObject: *jsc.JSGlobalObject, input_value: J
const allocator = if (input_slice.len <= 32) stack_allocator.get() else stack_allocator.fallback_allocator;
const escaped = strings.escapeHTMLForLatin1Input(allocator, input_slice) catch {
return globalObject.throwValue(ZigString.init("Out of memory").toErrorInstance(globalObject)) catch return .zero;
return globalObject.throwValue(bun.String.static("Out of memory").toJS(globalObject)) catch .zero;
};
switch (escaped) {
@@ -1238,7 +1238,7 @@ pub fn mmapFile(globalThis: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) bun.
.result => |map| map,
.err => |err| {
return globalThis.throwValue(try err.toJS(globalThis));
return globalThis.throwValue(err.toJS(globalThis));
},
};
@@ -1273,10 +1273,6 @@ pub fn getYAMLObject(globalThis: *jsc.JSGlobalObject, _: *jsc.JSObject) jsc.JSVa
return YAMLObject.create(globalThis);
}
pub fn getArchiveConstructor(globalThis: *jsc.JSGlobalObject, _: *jsc.JSObject) jsc.JSValue {
return jsc.API.Archive.js.getConstructor(globalThis);
}
pub fn getGlobConstructor(globalThis: *jsc.JSGlobalObject, _: *jsc.JSObject) jsc.JSValue {
return jsc.API.Glob.js.getConstructor(globalThis);
}

View File

@@ -1081,28 +1081,6 @@ pub const JSBundler = struct {
return globalThis.throwInvalidArguments("Expected a config object to be passed to Bun.build", .{});
}
const vm = globalThis.bunVM();
// Detect and prevent calling Bun.build from within a macro during bundling.
// This would cause a deadlock because:
// 1. The bundler thread (singleton) is processing the outer Bun.build
// 2. During parsing, it encounters a macro and evaluates it
// 3. The macro calls Bun.build, which tries to enqueue to the same singleton thread
// 4. The singleton thread is blocked waiting for the macro to complete -> deadlock
if (vm.macro_mode) {
return globalThis.throw(
\\Bun.build cannot be called from within a macro during bundling.
\\
\\This would cause a deadlock because the bundler is waiting for the macro to complete,
\\but the macro's Bun.build call is waiting for the bundler.
\\
\\To bundle code at compile time in a macro, use Bun.spawnSync to invoke the CLI:
\\ const result = Bun.spawnSync(["bun", "build", entrypoint, "--format=esm"]);
,
.{},
);
}
var plugins: ?*Plugin = null;
const config = try Config.fromJS(globalThis, arguments[0], &plugins, bun.default_allocator);
@@ -1110,7 +1088,7 @@ pub const JSBundler = struct {
config,
plugins,
globalThis,
vm.eventLoop(),
globalThis.bunVM().eventLoop(),
bun.default_allocator,
);
}

View File

@@ -22,11 +22,6 @@ pub fn parse(
var arena = bun.ArenaAllocator.init(globalThis.allocator());
const allocator = arena.allocator();
defer arena.deinit();
var ast_memory_allocator = bun.handleOom(allocator.create(ast.ASTMemoryAllocator));
var ast_scope = ast_memory_allocator.enter(allocator);
defer ast_scope.exit();
var log = logger.Log.init(default_allocator);
defer log.deinit();
const input_value = callframe.argument(0);
@@ -37,10 +32,7 @@ pub fn parse(
var input_slice = try input_value.toSlice(globalThis, bun.default_allocator);
defer input_slice.deinit();
const source = &logger.Source.initPathString("input.jsonc", input_slice.slice());
const parse_result = json.parseTSConfig(source, &log, allocator, true) catch |err| {
if (err == error.StackOverflow) {
return globalThis.throwStackOverflow();
}
const parse_result = json.parseTSConfig(source, &log, allocator, true) catch {
return globalThis.throwValue(try log.toJS(globalThis, default_allocator, "Failed to parse JSONC"));
};
@@ -54,7 +46,6 @@ pub fn parse(
}
const bun = @import("bun");
const ast = bun.ast;
const default_allocator = bun.default_allocator;
const logger = bun.logger;
const json = bun.interchange.json;

View File

@@ -42,7 +42,6 @@ pub const Config = struct {
minify_identifiers: bool = false,
minify_syntax: bool = false,
no_macros: bool = false,
repl_mode: bool = false,
pub fn fromJS(this: *Config, globalThis: *jsc.JSGlobalObject, object: jsc.JSValue, allocator: std.mem.Allocator) bun.JSError!void {
if (object.isUndefinedOrNull()) {
@@ -246,10 +245,6 @@ pub const Config = struct {
this.dead_code_elimination = flag;
}
if (try object.getBooleanLoose(globalThis, "replMode")) |flag| {
this.repl_mode = flag;
}
if (try object.getTruthy(globalThis, "minify")) |minify| {
if (minify.isBoolean()) {
this.minify_whitespace = minify.toBoolean();
@@ -574,10 +569,7 @@ pub const TransformTask = struct {
}
fn finish(this: *TransformTask, promise: *jsc.JSPromise) bun.JSTerminated!void {
const value = this.output_code.transferToJS(this.global) catch |e| {
return promise.reject(this.global, this.global.takeException(e));
};
return promise.resolve(this.global, value);
return promise.resolve(this.global, this.output_code.transferToJS(this.global));
}
pub fn deinit(this: *TransformTask) void {
@@ -706,8 +698,7 @@ pub fn constructor(globalThis: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) b
transpiler.options.macro_remap = config.macro_map;
}
// REPL mode disables DCE to preserve expressions like `42`
transpiler.options.dead_code_elimination = config.dead_code_elimination and !config.repl_mode;
transpiler.options.dead_code_elimination = config.dead_code_elimination;
transpiler.options.minify_whitespace = config.minify_whitespace;
// Keep defaults for these
@@ -726,7 +717,6 @@ pub fn constructor(globalThis: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) b
transpiler.options.inlining = config.runtime.inlining;
transpiler.options.hot_module_reloading = config.runtime.hot_module_reloading;
transpiler.options.react_fast_refresh = false;
transpiler.options.repl_mode = config.repl_mode;
return this;
}
@@ -748,47 +738,9 @@ pub fn deinit(this: *JSTranspiler) void {
bun.destroy(this);
}
/// Check if code looks like an object literal that would be misinterpreted as a block
/// Returns true if code starts with { (after whitespace) and doesn't end with ;
/// This matches Node.js REPL behavior for object literal disambiguation
fn isLikelyObjectLiteral(code: []const u8) bool {
// Skip leading whitespace
var start: usize = 0;
while (start < code.len and (code[start] == ' ' or code[start] == '\t' or code[start] == '\n' or code[start] == '\r')) {
start += 1;
}
// Check if starts with {
if (start >= code.len or code[start] != '{') {
return false;
}
// Skip trailing whitespace
var end: usize = code.len;
while (end > 0 and (code[end - 1] == ' ' or code[end - 1] == '\t' or code[end - 1] == '\n' or code[end - 1] == '\r')) {
end -= 1;
}
// Check if ends with semicolon - if so, it's likely a block statement
if (end > 0 and code[end - 1] == ';') {
return false;
}
return true;
}
fn getParseResult(this: *JSTranspiler, allocator: std.mem.Allocator, code: []const u8, loader: ?Loader, macro_js_ctx: Transpiler.MacroJSValueType) ?Transpiler.ParseResult {
const name = this.config.default_loader.stdinName();
// In REPL mode, wrap potential object literals in parentheses
// If code starts with { and doesn't end with ; it might be an object literal
// that would otherwise be parsed as a block statement
const processed_code: []const u8 = if (this.config.repl_mode and isLikelyObjectLiteral(code))
std.fmt.allocPrint(allocator, "({s})", .{code}) catch code
else
code;
const source = &logger.Source.initPathString(name, processed_code);
const source = &logger.Source.initPathString(name, code);
const jsx = if (this.config.tsconfig != null)
this.config.tsconfig.?.mergeJSX(this.transpiler.options.jsx)

View File

@@ -22,11 +22,6 @@ pub fn parse(
var arena = bun.ArenaAllocator.init(globalThis.allocator());
const allocator = arena.allocator();
defer arena.deinit();
var ast_memory_allocator = bun.handleOom(allocator.create(ast.ASTMemoryAllocator));
var ast_scope = ast_memory_allocator.enter(allocator);
defer ast_scope.exit();
var log = logger.Log.init(default_allocator);
const arguments = callframe.arguments_old(1).slice();
if (arguments.len == 0 or arguments[0].isEmptyOrUndefinedOrNull()) {
@@ -36,10 +31,7 @@ pub fn parse(
var input_slice = try arguments[0].toSlice(globalThis, bun.default_allocator);
defer input_slice.deinit();
const source = &logger.Source.initPathString("input.toml", input_slice.slice());
const parse_result = TOML.parse(source, &log, allocator, false) catch |err| {
if (err == error.StackOverflow) {
return globalThis.throwStackOverflow();
}
const parse_result = TOML.parse(source, &log, allocator, false) catch {
return globalThis.throwValue(try log.toJS(globalThis, default_allocator, "Failed to parse toml"));
};
@@ -66,7 +58,6 @@ pub fn parse(
}
const bun = @import("bun");
const ast = bun.ast;
const default_allocator = bun.default_allocator;
const js_printer = bun.js_printer;
const logger = bun.logger;

View File

@@ -351,15 +351,13 @@ pub const All = struct {
},
};
var warning_type_string = bun.String.createAtomIfPossible(@tagName(warning_type));
// Ignore errors from transferToJS since this is just a warning and shouldn't interrupt execution
const warning_js = warning_string.transferToJS(globalThis) catch return;
const warning_type_js = warning_type_string.transferToJS(globalThis) catch return;
// these arguments are valid so emitWarning won't throw
globalThis.emitWarning(
warning_js,
warning_type_js,
warning_string.transferToJS(globalThis),
warning_type_string.transferToJS(globalThis),
.js_undefined,
.js_undefined,
) catch {};
) catch unreachable;
}
const CountdownOverflowBehavior = enum(u8) {

View File

@@ -118,14 +118,6 @@ pub fn set_repeat(_: *Self, thisValue: JSValue, globalThis: *JSGlobalObject, val
Self.js.repeatSetCached(thisValue, globalThis, value);
}
pub fn get_idleStart(_: *Self, thisValue: JSValue, _: *JSGlobalObject) JSValue {
return Self.js.idleStartGetCached(thisValue).?;
}
pub fn set_idleStart(_: *Self, thisValue: JSValue, globalThis: *JSGlobalObject, value: JSValue) void {
Self.js.idleStartSetCached(thisValue, globalThis, value);
}
pub fn dispose(self: *Self, globalThis: *JSGlobalObject, _: *jsc.CallFrame) bun.JSError!JSValue {
self.internals.cancel(globalThis.bunVM());
return .js_undefined;

Some files were not shown because too many files have changed in this diff Show More