mirror of
https://github.com/oven-sh/bun
synced 2026-02-02 15:08:46 +00:00
Compare commits
149 Commits
claude/fix
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ddefa11070 | ||
|
|
35f8154319 | ||
|
|
9d68ec882a | ||
|
|
1337f5dba4 | ||
|
|
56b5be4ba4 | ||
|
|
6c119d608e | ||
|
|
a14a89ca95 | ||
|
|
a5246344fa | ||
|
|
f648483fe7 | ||
|
|
01fa61045f | ||
|
|
71ce550cfa | ||
|
|
8f61adf494 | ||
|
|
b4b7cc6d78 | ||
|
|
3feea91087 | ||
|
|
bb4d5b9af5 | ||
|
|
adc1a6b05c | ||
|
|
8a11a03297 | ||
|
|
baea21f0c7 | ||
|
|
209923a65c | ||
|
|
cd4d98338c | ||
|
|
b64edcb490 | ||
|
|
4feede90f5 | ||
|
|
fc4624c672 | ||
|
|
1bfe5c6b37 | ||
|
|
aded701d1d | ||
|
|
7ebfdf97a8 | ||
|
|
4cd3b241bc | ||
|
|
cae67a17e2 | ||
|
|
a394063a7d | ||
|
|
c9ebb17921 | ||
|
|
2f510724a9 | ||
|
|
9a16f4c345 | ||
|
|
ba426210c2 | ||
|
|
bd63fb9ef6 | ||
|
|
9d6ef0af1d | ||
|
|
d08e4bae09 | ||
|
|
b59c77a6e7 | ||
|
|
6130aa8168 | ||
|
|
a595fe1cca | ||
|
|
799907362f | ||
|
|
2c0721eabe | ||
|
|
02680b69bf | ||
|
|
7c50164987 | ||
|
|
a553fda32b | ||
|
|
f87fa27fac | ||
|
|
4071624edd | ||
|
|
bfe40e8760 | ||
|
|
bcaae48a95 | ||
|
|
6b3403a2b4 | ||
|
|
70fe76209b | ||
|
|
ab3df344b8 | ||
|
|
4680e89a91 | ||
|
|
f88f60af5a | ||
|
|
232e0df956 | ||
|
|
9f0e78fc42 | ||
|
|
043fafeefa | ||
|
|
ce173b1112 | ||
|
|
0c3b5e501b | ||
|
|
5dc72bc1d8 | ||
|
|
dfc36a8255 | ||
|
|
827c7091d9 | ||
|
|
13f78a7044 | ||
|
|
d8d8182e8e | ||
|
|
e8b2455f11 | ||
|
|
c4f6874960 | ||
|
|
c63415c9c9 | ||
|
|
86d4d87beb | ||
|
|
3b1c3bfe97 | ||
|
|
2582e6f98e | ||
|
|
85080f7949 | ||
|
|
2a9980076d | ||
|
|
1da41b7f91 | ||
|
|
136d345752 | ||
|
|
93d5cc6e56 | ||
|
|
37c41137f8 | ||
|
|
dc203e853a | ||
|
|
2febdb5b49 | ||
|
|
b6b3626c14 | ||
|
|
7f70b01259 | ||
|
|
08103aa2ff | ||
|
|
bb4d150aed | ||
|
|
45af3335e6 | ||
|
|
dbad2857ea | ||
|
|
6140eb5faf | ||
|
|
497a4d4818 | ||
|
|
66d8397bd7 | ||
|
|
170f8f7962 | ||
|
|
5f470278d1 | ||
|
|
3a4daa95ac | ||
|
|
d3f8bec565 | ||
|
|
62834e1bfe | ||
|
|
704252e85f | ||
|
|
04f441453d | ||
|
|
362839c987 | ||
|
|
0dda0f6310 | ||
|
|
5e8a84e5e7 | ||
|
|
01fac4a63c | ||
|
|
f8adf01f51 | ||
|
|
d85d40ea29 | ||
|
|
c47f84348a | ||
|
|
f8a049e9f2 | ||
|
|
12a45b7cbf | ||
|
|
039c89442f | ||
|
|
c3b4e5568c | ||
|
|
3d46ae2fa4 | ||
|
|
716801e92d | ||
|
|
939f5cf7af | ||
|
|
496aeb97f9 | ||
|
|
3b5f2fe756 | ||
|
|
f833f11afa | ||
|
|
b2e5c6c7d1 | ||
|
|
1344151576 | ||
|
|
44df912d37 | ||
|
|
05434add3e | ||
|
|
7e9fa4ab08 | ||
|
|
6f6f76f0c0 | ||
|
|
8da29af1ae | ||
|
|
bcbb4fc35d | ||
|
|
ad4aabf486 | ||
|
|
5b25a3abdb | ||
|
|
12243b9715 | ||
|
|
5d3f37d7ae | ||
|
|
2a483631fb | ||
|
|
cdcff11221 | ||
|
|
dfa704cc62 | ||
|
|
f01467d3dc | ||
|
|
b268004715 | ||
|
|
97feb66189 | ||
|
|
8466f12671 | ||
|
|
ed75a0e2d1 | ||
|
|
fdb956f2fe | ||
|
|
d4e5197208 | ||
|
|
7d640cccd1 | ||
|
|
65a9a2a580 | ||
|
|
393198d190 | ||
|
|
798b48c898 | ||
|
|
6104705f5f | ||
|
|
11aedbe402 | ||
|
|
05df51ff84 | ||
|
|
b72af3d329 | ||
|
|
f27c6768ce | ||
|
|
c57d0f73b4 | ||
|
|
6a27a25e5b | ||
|
|
2b86ab0cd3 | ||
|
|
6e6896510a | ||
|
|
5a71ead8a2 | ||
|
|
a9b5f5cbd1 | ||
|
|
7333500df8 | ||
|
|
e6733333f0 |
@@ -26,7 +26,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
wget curl git python3 python3-pip ninja-build \
|
||||
software-properties-common apt-transport-https \
|
||||
ca-certificates gnupg lsb-release unzip \
|
||||
libxml2-dev ruby ruby-dev bison gawk perl make golang ccache \
|
||||
libxml2-dev ruby ruby-dev bison gawk perl make golang ccache qemu-user-static \
|
||||
&& add-apt-repository ppa:ubuntu-toolchain-r/test \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y gcc-13 g++-13 libgcc-13-dev libstdc++-13-dev \
|
||||
|
||||
@@ -114,6 +114,8 @@ const buildPlatforms = [
|
||||
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.22" },
|
||||
{ os: "windows", arch: "x64", release: "2019" },
|
||||
{ os: "windows", arch: "x64", baseline: true, release: "2019" },
|
||||
// TODO: Enable when Windows ARM64 CI runners are ready
|
||||
// { os: "windows", arch: "aarch64", release: "2019" },
|
||||
];
|
||||
|
||||
/**
|
||||
@@ -136,6 +138,8 @@ const testPlatforms = [
|
||||
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.22", tier: "latest" },
|
||||
{ os: "windows", arch: "x64", release: "2019", tier: "oldest" },
|
||||
{ os: "windows", arch: "x64", release: "2019", baseline: true, tier: "oldest" },
|
||||
// TODO: Enable when Windows ARM64 CI runners are ready
|
||||
// { os: "windows", arch: "aarch64", release: "2019", tier: "oldest" },
|
||||
];
|
||||
|
||||
/**
|
||||
@@ -533,6 +537,109 @@ function getLinkBunStep(platform, options) {
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the artifact triplet for a platform, e.g. "bun-linux-aarch64" or "bun-linux-x64-musl-baseline".
|
||||
* Matches the naming convention in cmake/targets/BuildBun.cmake.
|
||||
* @param {Platform} platform
|
||||
* @returns {string}
|
||||
*/
|
||||
function getTargetTriplet(platform) {
|
||||
const { os, arch, abi, baseline } = platform;
|
||||
let triplet = `bun-${os}-${arch}`;
|
||||
if (abi === "musl") {
|
||||
triplet += "-musl";
|
||||
}
|
||||
if (baseline) {
|
||||
triplet += "-baseline";
|
||||
}
|
||||
return triplet;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if a platform needs QEMU-based baseline CPU verification.
|
||||
* x64 baseline builds verify no AVX/AVX2 instructions snuck in.
|
||||
* aarch64 builds verify no LSE/SVE instructions snuck in.
|
||||
* @param {Platform} platform
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function needsBaselineVerification(platform) {
|
||||
const { os, arch, baseline } = platform;
|
||||
if (os !== "linux") return false;
|
||||
return (arch === "x64" && baseline) || arch === "aarch64";
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @param {PipelineOptions} options
|
||||
* @returns {Step}
|
||||
*/
|
||||
function getVerifyBaselineStep(platform, options) {
|
||||
const { arch } = platform;
|
||||
const targetKey = getTargetKey(platform);
|
||||
const archArg = arch === "x64" ? "x64" : "aarch64";
|
||||
|
||||
return {
|
||||
key: `${targetKey}-verify-baseline`,
|
||||
label: `${getTargetLabel(platform)} - verify-baseline`,
|
||||
depends_on: [`${targetKey}-build-bun`],
|
||||
agents: getLinkBunAgent(platform, options),
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
timeout_in_minutes: 5,
|
||||
command: [
|
||||
`buildkite-agent artifact download '*.zip' . --step ${targetKey}-build-bun`,
|
||||
`unzip -o '${getTargetTriplet(platform)}.zip'`,
|
||||
`unzip -o '${getTargetTriplet(platform)}-profile.zip'`,
|
||||
`chmod +x ${getTargetTriplet(platform)}/bun ${getTargetTriplet(platform)}-profile/bun-profile`,
|
||||
`./scripts/verify-baseline-cpu.sh --arch ${archArg} --binary ${getTargetTriplet(platform)}/bun`,
|
||||
`./scripts/verify-baseline-cpu.sh --arch ${archArg} --binary ${getTargetTriplet(platform)}-profile/bun-profile`,
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the PR modifies SetupWebKit.cmake (WebKit version changes).
|
||||
* JIT stress tests under QEMU should run when WebKit is updated to catch
|
||||
* JIT-generated code that uses unsupported CPU instructions.
|
||||
* @param {PipelineOptions} options
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function hasWebKitChanges(options) {
|
||||
const { changedFiles = [] } = options;
|
||||
return changedFiles.some(file => file.includes("SetupWebKit.cmake"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a step that runs JSC JIT stress tests under QEMU.
|
||||
* This verifies that JIT-compiled code doesn't use CPU instructions
|
||||
* beyond the baseline target (no AVX on x64, no LSE on aarch64).
|
||||
* @param {Platform} platform
|
||||
* @param {PipelineOptions} options
|
||||
* @returns {Step}
|
||||
*/
|
||||
function getJitStressTestStep(platform, options) {
|
||||
const { arch } = platform;
|
||||
const targetKey = getTargetKey(platform);
|
||||
const archArg = arch === "x64" ? "x64" : "aarch64";
|
||||
|
||||
return {
|
||||
key: `${targetKey}-jit-stress-qemu`,
|
||||
label: `${getTargetLabel(platform)} - jit-stress-qemu`,
|
||||
depends_on: [`${targetKey}-build-bun`],
|
||||
agents: getLinkBunAgent(platform, options),
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
// JIT stress tests are slow under QEMU emulation
|
||||
timeout_in_minutes: 30,
|
||||
command: [
|
||||
`buildkite-agent artifact download '*.zip' . --step ${targetKey}-build-bun`,
|
||||
`unzip -o '${getTargetTriplet(platform)}.zip'`,
|
||||
`chmod +x ${getTargetTriplet(platform)}/bun`,
|
||||
`./scripts/verify-jit-stress-qemu.sh --arch ${archArg} --binary ${getTargetTriplet(platform)}/bun`,
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @param {PipelineOptions} options
|
||||
@@ -770,6 +877,7 @@ function getBenchmarkStep() {
|
||||
* @property {Platform[]} [buildPlatforms]
|
||||
* @property {Platform[]} [testPlatforms]
|
||||
* @property {string[]} [testFiles]
|
||||
* @property {string[]} [changedFiles]
|
||||
*/
|
||||
|
||||
/**
|
||||
@@ -1122,6 +1230,14 @@ async function getPipeline(options = {}) {
|
||||
steps.push(getBuildZigStep(target, options));
|
||||
steps.push(getLinkBunStep(target, options));
|
||||
|
||||
if (needsBaselineVerification(target)) {
|
||||
steps.push(getVerifyBaselineStep(target, options));
|
||||
// Run JIT stress tests under QEMU when WebKit is updated
|
||||
if (hasWebKitChanges(options)) {
|
||||
steps.push(getJitStressTestStep(target, options));
|
||||
}
|
||||
}
|
||||
|
||||
return getStepWithDependsOn(
|
||||
{
|
||||
key: getTargetKey(target),
|
||||
@@ -1219,6 +1335,7 @@ async function main() {
|
||||
console.log(`- PR is only docs, skipping tests!`);
|
||||
return;
|
||||
}
|
||||
options.changedFiles = allFiles;
|
||||
}
|
||||
|
||||
startGroup("Generating pipeline...");
|
||||
|
||||
@@ -36,16 +36,20 @@ function Log-Debug {
|
||||
}
|
||||
}
|
||||
|
||||
# Detect system architecture
|
||||
$script:IsARM64 = [System.Runtime.InteropServices.RuntimeInformation]::OSArchitecture -eq [System.Runtime.InteropServices.Architecture]::Arm64
|
||||
$script:VsArch = if ($script:IsARM64) { "arm64" } else { "amd64" }
|
||||
|
||||
# Load Visual Studio environment if not already loaded
|
||||
function Ensure-VSEnvironment {
|
||||
if ($null -eq $env:VSINSTALLDIR) {
|
||||
Log-Info "Loading Visual Studio environment..."
|
||||
|
||||
Log-Info "Loading Visual Studio environment for $script:VsArch..."
|
||||
|
||||
$vswhere = "C:\Program Files (x86)\Microsoft Visual Studio\Installer\vswhere.exe"
|
||||
if (!(Test-Path $vswhere)) {
|
||||
throw "Command not found: vswhere (did you install Visual Studio?)"
|
||||
}
|
||||
|
||||
|
||||
$vsDir = & $vswhere -prerelease -latest -property installationPath
|
||||
if ($null -eq $vsDir) {
|
||||
$vsDir = Get-ChildItem -Path "C:\Program Files\Microsoft Visual Studio\2022" -Directory -ErrorAction SilentlyContinue
|
||||
@@ -54,20 +58,20 @@ function Ensure-VSEnvironment {
|
||||
}
|
||||
$vsDir = $vsDir.FullName
|
||||
}
|
||||
|
||||
|
||||
Push-Location $vsDir
|
||||
try {
|
||||
$vsShell = Join-Path -Path $vsDir -ChildPath "Common7\Tools\Launch-VsDevShell.ps1"
|
||||
. $vsShell -Arch amd64 -HostArch amd64
|
||||
. $vsShell -Arch $script:VsArch -HostArch $script:VsArch
|
||||
} finally {
|
||||
Pop-Location
|
||||
}
|
||||
|
||||
|
||||
Log-Success "Visual Studio environment loaded"
|
||||
}
|
||||
|
||||
|
||||
if ($env:VSCMD_ARG_TGT_ARCH -eq "x86") {
|
||||
throw "Visual Studio environment is targeting 32 bit, but only 64 bit is supported."
|
||||
throw "Visual Studio environment is targeting 32 bit x86, but only 64-bit architectures (x64/arm64) are supported."
|
||||
}
|
||||
}
|
||||
|
||||
@@ -186,8 +190,10 @@ function Install-KeyLocker {
|
||||
}
|
||||
|
||||
# Download MSI installer
|
||||
$msiUrl = "https://bun-ci-assets.bun.sh/Keylockertools-windows-x64.msi"
|
||||
$msiPath = Join-Path $env:TEMP "Keylockertools-windows-x64.msi"
|
||||
# Note: KeyLocker tools currently only available for x64, but works on ARM64 via emulation
|
||||
$msiArch = "x64"
|
||||
$msiUrl = "https://bun-ci-assets.bun.sh/Keylockertools-windows-${msiArch}.msi"
|
||||
$msiPath = Join-Path $env:TEMP "Keylockertools-windows-${msiArch}.msi"
|
||||
|
||||
Log-Info "Downloading MSI from: $msiUrl"
|
||||
Log-Info "Downloading to: $msiPath"
|
||||
|
||||
@@ -219,6 +219,9 @@ function create_release() {
|
||||
bun-windows-x64-profile.zip
|
||||
bun-windows-x64-baseline.zip
|
||||
bun-windows-x64-baseline-profile.zip
|
||||
# TODO: Enable when Windows ARM64 CI runners are ready
|
||||
# bun-windows-aarch64.zip
|
||||
# bun-windows-aarch64-profile.zip
|
||||
)
|
||||
|
||||
function upload_artifact() {
|
||||
|
||||
@@ -6,8 +6,7 @@ To do that:
|
||||
- git fetch upstream
|
||||
- git merge upstream main
|
||||
- Fix the merge conflicts
|
||||
- cd ../../ (back to bun)
|
||||
- make jsc-build (this will take about 7 minutes)
|
||||
- bun build.ts debug
|
||||
- While it compiles, in another task review the JSC commits between the last version of Webkit and the new version. Write up a summary of the webkit changes in a file called "webkit-changes.md"
|
||||
- bun run build:local (build a build of Bun with the new Webkit, make sure it compiles)
|
||||
- After making sure it compiles, run some code to make sure things work. something like ./build/debug-local/bun-debug --print '42' should be all you need
|
||||
@@ -21,3 +20,7 @@ To do that:
|
||||
- commit + push (without adding the webkit-changes.md file)
|
||||
- create PR titled "Upgrade Webkit to the <commit-sha>", paste your webkit-changes.md into the PR description
|
||||
- delete the webkit-changes.md file
|
||||
|
||||
Things to check for a successful upgrade:
|
||||
- Did JSType in vendor/WebKit/Source/JavaScriptCore have any recent changes? Does the enum values align with whats present in src/bun.js/bindings/JSType.zig?
|
||||
- Were there any changes to the webcore code generator? If there are C++ compilation errors, check for differences in some of the generated code in like vendor/WebKit/source/WebCore/bindings/scripts/test/JS/
|
||||
|
||||
2
.github/workflows/update-cares.yml
vendored
2
.github/workflows/update-cares.yml
vendored
@@ -88,7 +88,7 @@ jobs:
|
||||
commit-message: "deps: update c-ares to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update c-ares to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-cares-${{ github.run_number }}
|
||||
branch: deps/update-cares
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
|
||||
2
.github/workflows/update-hdrhistogram.yml
vendored
2
.github/workflows/update-hdrhistogram.yml
vendored
@@ -91,7 +91,7 @@ jobs:
|
||||
commit-message: "deps: update hdrhistogram to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update hdrhistogram to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-hdrhistogram-${{ github.run_number }}
|
||||
branch: deps/update-hdrhistogram
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
|
||||
2
.github/workflows/update-highway.yml
vendored
2
.github/workflows/update-highway.yml
vendored
@@ -107,7 +107,7 @@ jobs:
|
||||
commit-message: "deps: update highway to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update highway to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-highway-${{ github.run_number }}
|
||||
branch: deps/update-highway
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
|
||||
2
.github/workflows/update-libarchive.yml
vendored
2
.github/workflows/update-libarchive.yml
vendored
@@ -88,7 +88,7 @@ jobs:
|
||||
commit-message: "deps: update libarchive to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update libarchive to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-libarchive-${{ github.run_number }}
|
||||
branch: deps/update-libarchive
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
|
||||
2
.github/workflows/update-libdeflate.yml
vendored
2
.github/workflows/update-libdeflate.yml
vendored
@@ -88,7 +88,7 @@ jobs:
|
||||
commit-message: "deps: update libdeflate to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update libdeflate to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-libdeflate-${{ github.run_number }}
|
||||
branch: deps/update-libdeflate
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
|
||||
2
.github/workflows/update-lolhtml.yml
vendored
2
.github/workflows/update-lolhtml.yml
vendored
@@ -100,7 +100,7 @@ jobs:
|
||||
commit-message: "deps: update lolhtml to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update lolhtml to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-lolhtml-${{ github.run_number }}
|
||||
branch: deps/update-lolhtml
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
|
||||
2
.github/workflows/update-lshpack.yml
vendored
2
.github/workflows/update-lshpack.yml
vendored
@@ -105,7 +105,7 @@ jobs:
|
||||
commit-message: "deps: update lshpack to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update lshpack to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-lshpack-${{ github.run_number }}
|
||||
branch: deps/update-lshpack
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
|
||||
2
.github/workflows/update-root-certs.yml
vendored
2
.github/workflows/update-root-certs.yml
vendored
@@ -74,7 +74,7 @@ jobs:
|
||||
```
|
||||
${{ env.changed_files }}
|
||||
```
|
||||
branch: certs/update-root-certs-${{ github.run_number }}
|
||||
branch: certs/update-root-certs
|
||||
base: main
|
||||
delete-branch: true
|
||||
labels:
|
||||
|
||||
2
.github/workflows/update-sqlite3.yml
vendored
2
.github/workflows/update-sqlite3.yml
vendored
@@ -83,7 +83,7 @@ jobs:
|
||||
commit-message: "deps: update sqlite to ${{ steps.check-version.outputs.latest }}"
|
||||
title: "deps: update sqlite to ${{ steps.check-version.outputs.latest }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-sqlite-${{ steps.check-version.outputs.latest }}
|
||||
branch: deps/update-sqlite
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
|
||||
2
.github/workflows/update-vendor.yml
vendored
2
.github/workflows/update-vendor.yml
vendored
@@ -68,7 +68,7 @@ jobs:
|
||||
commit-message: "deps: update ${{ matrix.package }} to ${{ steps.check-version.outputs.latest }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update ${{ matrix.package }} to ${{ steps.check-version.outputs.latest }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-${{ matrix.package }}-${{ github.run_number }}
|
||||
branch: deps/update-${{ matrix.package }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
|
||||
2
.github/workflows/update-zstd.yml
vendored
2
.github/workflows/update-zstd.yml
vendored
@@ -88,7 +88,7 @@ jobs:
|
||||
commit-message: "deps: update zstd to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update zstd to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-zstd-${{ github.run_number }}
|
||||
branch: deps/update-zstd
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,4 +1,5 @@
|
||||
.claude/settings.local.json
|
||||
.direnv
|
||||
.DS_Store
|
||||
.env
|
||||
.envrc
|
||||
|
||||
23
CLAUDE.md
23
CLAUDE.md
@@ -10,6 +10,8 @@ This is the Bun repository - an all-in-one JavaScript runtime & toolkit designed
|
||||
- **Run tests with your debug build**: `bun bd test <test-file>`
|
||||
- **CRITICAL**: Never use `bun test` directly - it won't include your changes
|
||||
- **Run any command with debug build**: `bun bd <command>`
|
||||
- **Run with JavaScript exception scope verification**: `BUN_JSC_validateExceptionChecks=1
|
||||
BUN_JSC_dumpSimulatedThrows=1 bun bd <command>`
|
||||
|
||||
Tip: Bun is already installed and in $PATH. The `bd` subcommand is a package.json script.
|
||||
|
||||
@@ -209,3 +211,24 @@ Built-in JavaScript modules use special syntax and are organized as:
|
||||
12. **Branch names must start with `claude/`** - This is a requirement for the CI to work.
|
||||
|
||||
**ONLY** push up changes after running `bun bd test <file>` and ensuring your tests pass.
|
||||
|
||||
## Debugging CI Failures
|
||||
|
||||
Use `scripts/buildkite-failures.ts` to fetch and analyze CI build failures:
|
||||
|
||||
```bash
|
||||
# View failures for current branch
|
||||
bun run scripts/buildkite-failures.ts
|
||||
|
||||
# View failures for a specific build number
|
||||
bun run scripts/buildkite-failures.ts 35051
|
||||
|
||||
# View failures for a GitHub PR
|
||||
bun run scripts/buildkite-failures.ts #26173
|
||||
bun run scripts/buildkite-failures.ts https://github.com/oven-sh/bun/pull/26173
|
||||
|
||||
# Wait for build to complete (polls every 10s until pass/fail)
|
||||
bun run scripts/buildkite-failures.ts --wait
|
||||
```
|
||||
|
||||
The script fetches logs from BuildKite's public API and saves complete logs to `/tmp/bun-build-{number}-{platform}-{step}.log`. It displays a summary of errors and the file path for each failed job. Use `--wait` to poll continuously until the build completes or fails.
|
||||
|
||||
@@ -259,18 +259,13 @@ $ git clone https://github.com/oven-sh/WebKit vendor/WebKit
|
||||
# Check out the commit hash specified in `set(WEBKIT_VERSION <commit_hash>)` in cmake/tools/SetupWebKit.cmake
|
||||
$ git -C vendor/WebKit checkout <commit_hash>
|
||||
|
||||
# Make a debug build of JSC. This will output build artifacts in ./vendor/WebKit/WebKitBuild/Debug
|
||||
# Optionally, you can use `bun run jsc:build` for a release build
|
||||
$ bun run jsc:build:debug && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
|
||||
|
||||
# After an initial run of `make jsc-debug`, you can rebuild JSC with:
|
||||
$ cmake --build vendor/WebKit/WebKitBuild/Debug --target jsc && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
|
||||
|
||||
# Build bun with the local JSC build
|
||||
# Build bun with the local JSC build — this automatically configures and builds JSC
|
||||
$ bun run build:local
|
||||
```
|
||||
|
||||
Using `bun run build:local` will build Bun in the `./build/debug-local` directory (instead of `./build/debug`), you'll have to change a couple of places to use this new directory:
|
||||
`bun run build:local` handles everything: configuring JSC, building JSC, and building Bun. On subsequent runs, JSC will incrementally rebuild if any WebKit sources changed. `ninja -Cbuild/debug-local` also works after the first build, and will build Bun+JSC.
|
||||
|
||||
The build output goes to `./build/debug-local` (instead of `./build/debug`), so you'll need to update a couple of places:
|
||||
|
||||
- The first line in [`src/js/builtins.d.ts`](/src/js/builtins.d.ts)
|
||||
- The `CompilationDatabase` line in [`.clangd` config](/.clangd) should be `CompilationDatabase: build/debug-local`
|
||||
@@ -281,7 +276,7 @@ Note that the WebKit folder, including build artifacts, is 8GB+ in size.
|
||||
|
||||
If you are using a JSC debug build and using VScode, make sure to run the `C/C++: Select a Configuration` command to configure intellisense to find the debug headers.
|
||||
|
||||
Note that if you change make changes to our [WebKit fork](https://github.com/oven-sh/WebKit), you will also have to change [`SetupWebKit.cmake`](/cmake/tools/SetupWebKit.cmake) to point to the commit hash.
|
||||
Note that if you make changes to our [WebKit fork](https://github.com/oven-sh/WebKit), you will also have to change [`SetupWebKit.cmake`](/cmake/tools/SetupWebKit.cmake) to point to the commit hash.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
|
||||
@@ -36,6 +36,7 @@ Bun statically links these libraries:
|
||||
| [`libbase64`](https://github.com/aklomp/base64/blob/master/LICENSE) | BSD 2-Clause |
|
||||
| [`libuv`](https://github.com/libuv/libuv) (on Windows) | MIT |
|
||||
| [`libdeflate`](https://github.com/ebiggers/libdeflate) | MIT |
|
||||
| [`uucode`](https://github.com/jacobsandlund/uucode) | MIT |
|
||||
| A fork of [`uWebsockets`](https://github.com/jarred-sumner/uwebsockets) | Apache 2.0 licensed |
|
||||
| Parts of [Tigerbeetle's IO code](https://github.com/tigerbeetle/tigerbeetle/blob/532c8b70b9142c17e07737ab6d3da68d7500cbca/src/io/windows.zig#L1) | Apache 2.0 licensed |
|
||||
|
||||
|
||||
207
bench/bun.lock
207
bench/bun.lock
@@ -18,13 +18,18 @@
|
||||
"fast-glob": "3.3.1",
|
||||
"fastify": "^5.0.0",
|
||||
"fdir": "^6.1.0",
|
||||
"mitata": "^1.0.25",
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"marked": "^17.0.1",
|
||||
"mitata": "1.0.20",
|
||||
"react": "^19",
|
||||
"react-dom": "^19",
|
||||
"react-markdown": "^9.0.3",
|
||||
"remark": "^15.0.1",
|
||||
"remark-html": "^16.0.1",
|
||||
"string-width": "7.1.0",
|
||||
"strip-ansi": "^7.1.0",
|
||||
"tar": "^7.4.3",
|
||||
"tinycolor2": "^1.6.0",
|
||||
"wrap-ansi": "^9.0.0",
|
||||
"zx": "^7.2.3",
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -149,18 +154,36 @@
|
||||
|
||||
"@swc/core-win32-x64-msvc": ["@swc/core-win32-x64-msvc@1.3.35", "", { "os": "win32", "cpu": "x64" }, "sha512-/RvphT4WfuGfIK84Ha0dovdPrKB1bW/mc+dtdmhv2E3EGkNc5FoueNwYmXWRimxnU7X0X7IkcRhyKB4G5DeAmg=="],
|
||||
|
||||
"@types/debug": ["@types/debug@4.1.12", "", { "dependencies": { "@types/ms": "*" } }, "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ=="],
|
||||
|
||||
"@types/estree": ["@types/estree@1.0.8", "", {}, "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w=="],
|
||||
|
||||
"@types/estree-jsx": ["@types/estree-jsx@1.0.5", "", { "dependencies": { "@types/estree": "*" } }, "sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg=="],
|
||||
|
||||
"@types/fs-extra": ["@types/fs-extra@11.0.4", "", { "dependencies": { "@types/jsonfile": "*", "@types/node": "*" } }, "sha512-yTbItCNreRooED33qjunPthRcSjERP1r4MqCZc7wv0u2sUkzTFp45tgUfS5+r7FrZPdmCCNflLhVSP/o+SemsQ=="],
|
||||
|
||||
"@types/hast": ["@types/hast@3.0.4", "", { "dependencies": { "@types/unist": "*" } }, "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ=="],
|
||||
|
||||
"@types/jsonfile": ["@types/jsonfile@6.1.4", "", { "dependencies": { "@types/node": "*" } }, "sha512-D5qGUYwjvnNNextdU59/+fI+spnwtTFmyQP0h+PfIOSkNfpU6AOICUOkm4i0OnSk+NyjdPJrxCDro0sJsWlRpQ=="],
|
||||
|
||||
"@types/mdast": ["@types/mdast@4.0.4", "", { "dependencies": { "@types/unist": "*" } }, "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA=="],
|
||||
|
||||
"@types/minimist": ["@types/minimist@1.2.5", "", {}, "sha512-hov8bUuiLiyFPGyFPE1lwWhmzYbirOXQNNo40+y3zow8aFVTeyn3VWL0VFFfdNddA8S4Vf0Tc062rzyNr7Paag=="],
|
||||
|
||||
"@types/ms": ["@types/ms@2.1.0", "", {}, "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA=="],
|
||||
|
||||
"@types/node": ["@types/node@18.19.8", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-g1pZtPhsvGVTwmeVoexWZLTQaOvXwoSq//pTL0DHeNzUDrFnir4fgETdhjhIxjVnN+hKOuh98+E1eMLnUXstFg=="],
|
||||
|
||||
"@types/ps-tree": ["@types/ps-tree@1.1.6", "", {}, "sha512-PtrlVaOaI44/3pl3cvnlK+GxOM3re2526TJvPvh7W+keHIXdV4TE0ylpPBAcvFQCbGitaTXwL9u+RF7qtVeazQ=="],
|
||||
|
||||
"@types/react": ["@types/react@19.2.10", "", { "dependencies": { "csstype": "^3.2.2" } }, "sha512-WPigyYuGhgZ/cTPRXB2EwUw+XvsRA3GqHlsP4qteqrnnjDrApbS7MxcGr/hke5iUoeB7E/gQtrs9I37zAJ0Vjw=="],
|
||||
|
||||
"@types/unist": ["@types/unist@3.0.3", "", {}, "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q=="],
|
||||
|
||||
"@types/which": ["@types/which@3.0.3", "", {}, "sha512-2C1+XoY0huExTbs8MQv1DuS5FS86+SEjdM9F/+GS61gg5Hqbtj8ZiDSx8MfWcyei907fIPbfPGCOrNUTnVHY1g=="],
|
||||
|
||||
"@ungap/structured-clone": ["@ungap/structured-clone@1.3.0", "", {}, "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g=="],
|
||||
|
||||
"abstract-logging": ["abstract-logging@2.0.1", "", {}, "sha512-2BjRTZxTPvheOvGbBslFSYOUkr+SjPtOnrLP33f+VIWLzezQpZcqVg7ja3L4dBXmzzgwT+a029jRx5PCi3JuiA=="],
|
||||
|
||||
"ajv": ["ajv@8.17.1", "", { "dependencies": { "fast-deep-equal": "^3.1.3", "fast-uri": "^3.0.1", "json-schema-traverse": "^1.0.0", "require-from-string": "^2.0.2" } }, "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g=="],
|
||||
@@ -169,12 +192,14 @@
|
||||
|
||||
"ansi-regex": ["ansi-regex@6.0.1", "", {}, "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA=="],
|
||||
|
||||
"ansi-styles": ["ansi-styles@3.2.1", "", { "dependencies": { "color-convert": "^1.9.0" } }, "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA=="],
|
||||
"ansi-styles": ["ansi-styles@6.2.3", "https://artifactory.infra.ant.dev:443/artifactory/api/npm/npm-all/ansi-styles/-/ansi-styles-6.2.3.tgz", {}, "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg=="],
|
||||
|
||||
"atomic-sleep": ["atomic-sleep@1.0.0", "", {}, "sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ=="],
|
||||
|
||||
"avvio": ["avvio@9.1.0", "", { "dependencies": { "@fastify/error": "^4.0.0", "fastq": "^1.17.1" } }, "sha512-fYASnYi600CsH/j9EQov7lECAniYiBFiiAtBNuZYLA2leLe9qOvZzqYHFjtIj6gD2VMoMLP14834LFWvr4IfDw=="],
|
||||
|
||||
"bail": ["bail@2.0.2", "", {}, "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw=="],
|
||||
|
||||
"benchmark": ["benchmark@2.1.4", "", { "dependencies": { "lodash": "^4.17.4", "platform": "^1.3.3" } }, "sha512-l9MlfN4M1K/H2fbhfMy3B7vJd6AGKJVQn2h6Sg/Yx+KckoUA7ewS5Vv6TjSq18ooE1kS9hhAlQRH3AkXIh/aOQ=="],
|
||||
|
||||
"braces": ["braces@3.0.2", "", { "dependencies": { "fill-range": "^7.0.1" } }, "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A=="],
|
||||
@@ -183,8 +208,18 @@
|
||||
|
||||
"caniuse-lite": ["caniuse-lite@1.0.30001456", "", {}, "sha512-XFHJY5dUgmpMV25UqaD4kVq2LsiaU5rS8fb0f17pCoXQiQslzmFgnfOxfvo1bTpTqf7dwG/N/05CnLCnOEKmzA=="],
|
||||
|
||||
"ccount": ["ccount@2.0.1", "", {}, "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg=="],
|
||||
|
||||
"chalk": ["chalk@5.3.0", "", {}, "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w=="],
|
||||
|
||||
"character-entities": ["character-entities@2.0.2", "", {}, "sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ=="],
|
||||
|
||||
"character-entities-html4": ["character-entities-html4@2.1.0", "", {}, "sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA=="],
|
||||
|
||||
"character-entities-legacy": ["character-entities-legacy@3.0.0", "", {}, "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ=="],
|
||||
|
||||
"character-reference-invalid": ["character-reference-invalid@2.0.1", "", {}, "sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw=="],
|
||||
|
||||
"chownr": ["chownr@3.0.0", "", {}, "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g=="],
|
||||
|
||||
"color": ["color@4.2.3", "", { "dependencies": { "color-convert": "^2.0.1", "color-string": "^1.9.0" } }, "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A=="],
|
||||
@@ -195,18 +230,26 @@
|
||||
|
||||
"color-string": ["color-string@1.9.1", "", { "dependencies": { "color-name": "^1.0.0", "simple-swizzle": "^0.2.2" } }, "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg=="],
|
||||
|
||||
"comma-separated-tokens": ["comma-separated-tokens@2.0.3", "", {}, "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg=="],
|
||||
|
||||
"convert-source-map": ["convert-source-map@1.9.0", "", {}, "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A=="],
|
||||
|
||||
"cookie": ["cookie@1.0.2", "", {}, "sha512-9Kr/j4O16ISv8zBBhJoi4bXOYNTkFLOqSL3UDB0njXxCXNezjeyVrJyGOWtgfs/q2km1gwBcfH8q1yEGoMYunA=="],
|
||||
|
||||
"cross-spawn": ["cross-spawn@7.0.3", "", { "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" } }, "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w=="],
|
||||
|
||||
"csstype": ["csstype@3.2.3", "", {}, "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ=="],
|
||||
|
||||
"data-uri-to-buffer": ["data-uri-to-buffer@4.0.1", "", {}, "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A=="],
|
||||
|
||||
"debug": ["debug@4.3.4", "", { "dependencies": { "ms": "2.1.2" } }, "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ=="],
|
||||
|
||||
"decode-named-character-reference": ["decode-named-character-reference@1.3.0", "", { "dependencies": { "character-entities": "^2.0.0" } }, "sha512-GtpQYB283KrPp6nRw50q3U9/VfOutZOe103qlN7BPP6Ad27xYnOIWv4lPzo8HCAL+mMZofJ9KEy30fq6MfaK6Q=="],
|
||||
|
||||
"dequal": ["dequal@2.0.3", "", {}, "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA=="],
|
||||
|
||||
"devlop": ["devlop@1.1.0", "", { "dependencies": { "dequal": "^2.0.0" } }, "sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA=="],
|
||||
|
||||
"dir-glob": ["dir-glob@3.0.1", "", { "dependencies": { "path-type": "^4.0.0" } }, "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA=="],
|
||||
|
||||
"duplexer": ["duplexer@0.1.2", "", {}, "sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg=="],
|
||||
@@ -261,12 +304,16 @@
|
||||
|
||||
"escape-string-regexp": ["escape-string-regexp@1.0.5", "", {}, "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg=="],
|
||||
|
||||
"estree-util-is-identifier-name": ["estree-util-is-identifier-name@3.0.0", "", {}, "sha512-hFtqIDZTIUZ9BXLb8y4pYGyk6+wekIivNVTcmvk8NoOh+VeRn5y6cEHzbURrWbfp1fIqdVipilzj+lfaadNZmg=="],
|
||||
|
||||
"event-stream": ["event-stream@3.3.4", "", { "dependencies": { "duplexer": "~0.1.1", "from": "~0", "map-stream": "~0.1.0", "pause-stream": "0.0.11", "split": "0.3", "stream-combiner": "~0.0.4", "through": "~2.3.1" } }, "sha512-QHpkERcGsR0T7Qm3HNJSyXKEEj8AHNxkY3PK8TS2KJvQ7NiSHe3DDpwVKKtoYprL/AreyzFBeIkBIWChAqn60g=="],
|
||||
|
||||
"eventemitter3": ["eventemitter3@5.0.0", "", {}, "sha512-riuVbElZZNXLeLEoprfNYoDSwTBRR44X3mnhdI1YcnENpWTCsTTVZ2zFuqQcpoyqPQIUXdiPEU0ECAq0KQRaHg=="],
|
||||
|
||||
"execa": ["execa@8.0.1", "", { "dependencies": { "cross-spawn": "^7.0.3", "get-stream": "^8.0.1", "human-signals": "^5.0.0", "is-stream": "^3.0.0", "merge-stream": "^2.0.0", "npm-run-path": "^5.1.0", "onetime": "^6.0.0", "signal-exit": "^4.1.0", "strip-final-newline": "^3.0.0" } }, "sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg=="],
|
||||
|
||||
"extend": ["extend@3.0.2", "", {}, "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="],
|
||||
|
||||
"fast-decode-uri-component": ["fast-decode-uri-component@1.0.1", "", {}, "sha512-WKgKWg5eUxvRZGwW8FvfbaH7AXSh2cL+3j5fMGzUMCxWBJ3dV3a7Wz8y2f/uQ0e3B6WmodD3oS54jTQ9HVTIIg=="],
|
||||
|
||||
"fast-deep-equal": ["fast-deep-equal@3.1.3", "", {}, "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="],
|
||||
@@ -317,20 +364,44 @@
|
||||
|
||||
"has-flag": ["has-flag@3.0.0", "", {}, "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw=="],
|
||||
|
||||
"hast-util-sanitize": ["hast-util-sanitize@5.0.2", "", { "dependencies": { "@types/hast": "^3.0.0", "@ungap/structured-clone": "^1.0.0", "unist-util-position": "^5.0.0" } }, "sha512-3yTWghByc50aGS7JlGhk61SPenfE/p1oaFeNwkOOyrscaOkMGrcW9+Cy/QAIOBpZxP1yqDIzFMR0+Np0i0+usg=="],
|
||||
|
||||
"hast-util-to-html": ["hast-util-to-html@9.0.5", "", { "dependencies": { "@types/hast": "^3.0.0", "@types/unist": "^3.0.0", "ccount": "^2.0.0", "comma-separated-tokens": "^2.0.0", "hast-util-whitespace": "^3.0.0", "html-void-elements": "^3.0.0", "mdast-util-to-hast": "^13.0.0", "property-information": "^7.0.0", "space-separated-tokens": "^2.0.0", "stringify-entities": "^4.0.0", "zwitch": "^2.0.4" } }, "sha512-OguPdidb+fbHQSU4Q4ZiLKnzWo8Wwsf5bZfbvu7//a9oTYoqD/fWpe96NuHkoS9h0ccGOTe0C4NGXdtS0iObOw=="],
|
||||
|
||||
"hast-util-to-jsx-runtime": ["hast-util-to-jsx-runtime@2.3.6", "", { "dependencies": { "@types/estree": "^1.0.0", "@types/hast": "^3.0.0", "@types/unist": "^3.0.0", "comma-separated-tokens": "^2.0.0", "devlop": "^1.0.0", "estree-util-is-identifier-name": "^3.0.0", "hast-util-whitespace": "^3.0.0", "mdast-util-mdx-expression": "^2.0.0", "mdast-util-mdx-jsx": "^3.0.0", "mdast-util-mdxjs-esm": "^2.0.0", "property-information": "^7.0.0", "space-separated-tokens": "^2.0.0", "style-to-js": "^1.0.0", "unist-util-position": "^5.0.0", "vfile-message": "^4.0.0" } }, "sha512-zl6s8LwNyo1P9uw+XJGvZtdFF1GdAkOg8ujOw+4Pyb76874fLps4ueHXDhXWdk6YHQ6OgUtinliG7RsYvCbbBg=="],
|
||||
|
||||
"hast-util-whitespace": ["hast-util-whitespace@3.0.0", "", { "dependencies": { "@types/hast": "^3.0.0" } }, "sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw=="],
|
||||
|
||||
"html-url-attributes": ["html-url-attributes@3.0.1", "", {}, "sha512-ol6UPyBWqsrO6EJySPz2O7ZSr856WDrEzM5zMqp+FJJLGMW35cLYmmZnl0vztAZxRUoNZJFTCohfjuIJ8I4QBQ=="],
|
||||
|
||||
"html-void-elements": ["html-void-elements@3.0.0", "", {}, "sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg=="],
|
||||
|
||||
"human-signals": ["human-signals@5.0.0", "", {}, "sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ=="],
|
||||
|
||||
"ignore": ["ignore@5.3.0", "", {}, "sha512-g7dmpshy+gD7mh88OC9NwSGTKoc3kyLAZQRU1mt53Aw/vnvfXnbC+F/7F7QoYVKbV+KNvJx8wArewKy1vXMtlg=="],
|
||||
|
||||
"inline-style-parser": ["inline-style-parser@0.2.7", "", {}, "sha512-Nb2ctOyNR8DqQoR0OwRG95uNWIC0C1lCgf5Naz5H6Ji72KZ8OcFZLz2P5sNgwlyoJ8Yif11oMuYs5pBQa86csA=="],
|
||||
|
||||
"ipaddr.js": ["ipaddr.js@2.2.0", "", {}, "sha512-Ag3wB2o37wslZS19hZqorUnrnzSkpOVy+IiiDEiTqNubEYpYuHWIf6K4psgN2ZWKExS4xhVCrRVfb/wfW8fWJA=="],
|
||||
|
||||
"is-alphabetical": ["is-alphabetical@2.0.1", "", {}, "sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ=="],
|
||||
|
||||
"is-alphanumerical": ["is-alphanumerical@2.0.1", "", { "dependencies": { "is-alphabetical": "^2.0.0", "is-decimal": "^2.0.0" } }, "sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw=="],
|
||||
|
||||
"is-arrayish": ["is-arrayish@0.3.2", "", {}, "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ=="],
|
||||
|
||||
"is-decimal": ["is-decimal@2.0.1", "", {}, "sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A=="],
|
||||
|
||||
"is-extglob": ["is-extglob@2.1.1", "", {}, "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ=="],
|
||||
|
||||
"is-glob": ["is-glob@4.0.3", "", { "dependencies": { "is-extglob": "^2.1.1" } }, "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg=="],
|
||||
|
||||
"is-hexadecimal": ["is-hexadecimal@2.0.1", "", {}, "sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg=="],
|
||||
|
||||
"is-number": ["is-number@7.0.0", "", {}, "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng=="],
|
||||
|
||||
"is-plain-obj": ["is-plain-obj@4.1.0", "", {}, "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg=="],
|
||||
|
||||
"is-stream": ["is-stream@3.0.0", "", {}, "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA=="],
|
||||
|
||||
"isexe": ["isexe@2.0.0", "", {}, "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="],
|
||||
@@ -351,16 +422,76 @@
|
||||
|
||||
"lodash": ["lodash@4.17.21", "", {}, "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="],
|
||||
|
||||
"loose-envify": ["loose-envify@1.4.0", "", { "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" }, "bin": { "loose-envify": "cli.js" } }, "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q=="],
|
||||
"longest-streak": ["longest-streak@3.1.0", "", {}, "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g=="],
|
||||
|
||||
"lru-cache": ["lru-cache@5.1.1", "", { "dependencies": { "yallist": "^3.0.2" } }, "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w=="],
|
||||
|
||||
"map-stream": ["map-stream@0.1.0", "", {}, "sha512-CkYQrPYZfWnu/DAmVCpTSX/xHpKZ80eKh2lAkyA6AJTef6bW+6JpbQZN5rofum7da+SyN1bi5ctTm+lTfcCW3g=="],
|
||||
|
||||
"marked": ["marked@17.0.1", "", { "bin": { "marked": "bin/marked.js" } }, "sha512-boeBdiS0ghpWcSwoNm/jJBwdpFaMnZWRzjA6SkUMYb40SVaN1x7mmfGKp0jvexGcx+7y2La5zRZsYFZI6Qpypg=="],
|
||||
|
||||
"mdast-util-from-markdown": ["mdast-util-from-markdown@2.0.2", "", { "dependencies": { "@types/mdast": "^4.0.0", "@types/unist": "^3.0.0", "decode-named-character-reference": "^1.0.0", "devlop": "^1.0.0", "mdast-util-to-string": "^4.0.0", "micromark": "^4.0.0", "micromark-util-decode-numeric-character-reference": "^2.0.0", "micromark-util-decode-string": "^2.0.0", "micromark-util-normalize-identifier": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0", "unist-util-stringify-position": "^4.0.0" } }, "sha512-uZhTV/8NBuw0WHkPTrCqDOl0zVe1BIng5ZtHoDk49ME1qqcjYmmLmOf0gELgcRMxN4w2iuIeVso5/6QymSrgmA=="],
|
||||
|
||||
"mdast-util-mdx-expression": ["mdast-util-mdx-expression@2.0.1", "", { "dependencies": { "@types/estree-jsx": "^1.0.0", "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", "devlop": "^1.0.0", "mdast-util-from-markdown": "^2.0.0", "mdast-util-to-markdown": "^2.0.0" } }, "sha512-J6f+9hUp+ldTZqKRSg7Vw5V6MqjATc+3E4gf3CFNcuZNWD8XdyI6zQ8GqH7f8169MM6P7hMBRDVGnn7oHB9kXQ=="],
|
||||
|
||||
"mdast-util-mdx-jsx": ["mdast-util-mdx-jsx@3.2.0", "", { "dependencies": { "@types/estree-jsx": "^1.0.0", "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", "@types/unist": "^3.0.0", "ccount": "^2.0.0", "devlop": "^1.1.0", "mdast-util-from-markdown": "^2.0.0", "mdast-util-to-markdown": "^2.0.0", "parse-entities": "^4.0.0", "stringify-entities": "^4.0.0", "unist-util-stringify-position": "^4.0.0", "vfile-message": "^4.0.0" } }, "sha512-lj/z8v0r6ZtsN/cGNNtemmmfoLAFZnjMbNyLzBafjzikOM+glrjNHPlf6lQDOTccj9n5b0PPihEBbhneMyGs1Q=="],
|
||||
|
||||
"mdast-util-mdxjs-esm": ["mdast-util-mdxjs-esm@2.0.1", "", { "dependencies": { "@types/estree-jsx": "^1.0.0", "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", "devlop": "^1.0.0", "mdast-util-from-markdown": "^2.0.0", "mdast-util-to-markdown": "^2.0.0" } }, "sha512-EcmOpxsZ96CvlP03NghtH1EsLtr0n9Tm4lPUJUBccV9RwUOneqSycg19n5HGzCf+10LozMRSObtVr3ee1WoHtg=="],
|
||||
|
||||
"mdast-util-phrasing": ["mdast-util-phrasing@4.1.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "unist-util-is": "^6.0.0" } }, "sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w=="],
|
||||
|
||||
"mdast-util-to-hast": ["mdast-util-to-hast@13.2.1", "", { "dependencies": { "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", "@ungap/structured-clone": "^1.0.0", "devlop": "^1.0.0", "micromark-util-sanitize-uri": "^2.0.0", "trim-lines": "^3.0.0", "unist-util-position": "^5.0.0", "unist-util-visit": "^5.0.0", "vfile": "^6.0.0" } }, "sha512-cctsq2wp5vTsLIcaymblUriiTcZd0CwWtCbLvrOzYCDZoWyMNV8sZ7krj09FSnsiJi3WVsHLM4k6Dq/yaPyCXA=="],
|
||||
|
||||
"mdast-util-to-markdown": ["mdast-util-to-markdown@2.1.2", "", { "dependencies": { "@types/mdast": "^4.0.0", "@types/unist": "^3.0.0", "longest-streak": "^3.0.0", "mdast-util-phrasing": "^4.0.0", "mdast-util-to-string": "^4.0.0", "micromark-util-classify-character": "^2.0.0", "micromark-util-decode-string": "^2.0.0", "unist-util-visit": "^5.0.0", "zwitch": "^2.0.0" } }, "sha512-xj68wMTvGXVOKonmog6LwyJKrYXZPvlwabaryTjLh9LuvovB/KAH+kvi8Gjj+7rJjsFi23nkUxRQv1KqSroMqA=="],
|
||||
|
||||
"mdast-util-to-string": ["mdast-util-to-string@4.0.0", "", { "dependencies": { "@types/mdast": "^4.0.0" } }, "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg=="],
|
||||
|
||||
"merge-stream": ["merge-stream@2.0.0", "", {}, "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w=="],
|
||||
|
||||
"merge2": ["merge2@1.4.1", "", {}, "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg=="],
|
||||
|
||||
"micromark": ["micromark@4.0.2", "", { "dependencies": { "@types/debug": "^4.0.0", "debug": "^4.0.0", "decode-named-character-reference": "^1.0.0", "devlop": "^1.0.0", "micromark-core-commonmark": "^2.0.0", "micromark-factory-space": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-chunked": "^2.0.0", "micromark-util-combine-extensions": "^2.0.0", "micromark-util-decode-numeric-character-reference": "^2.0.0", "micromark-util-encode": "^2.0.0", "micromark-util-normalize-identifier": "^2.0.0", "micromark-util-resolve-all": "^2.0.0", "micromark-util-sanitize-uri": "^2.0.0", "micromark-util-subtokenize": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-zpe98Q6kvavpCr1NPVSCMebCKfD7CA2NqZ+rykeNhONIJBpc1tFKt9hucLGwha3jNTNI8lHpctWJWoimVF4PfA=="],
|
||||
|
||||
"micromark-core-commonmark": ["micromark-core-commonmark@2.0.3", "", { "dependencies": { "decode-named-character-reference": "^1.0.0", "devlop": "^1.0.0", "micromark-factory-destination": "^2.0.0", "micromark-factory-label": "^2.0.0", "micromark-factory-space": "^2.0.0", "micromark-factory-title": "^2.0.0", "micromark-factory-whitespace": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-chunked": "^2.0.0", "micromark-util-classify-character": "^2.0.0", "micromark-util-html-tag-name": "^2.0.0", "micromark-util-normalize-identifier": "^2.0.0", "micromark-util-resolve-all": "^2.0.0", "micromark-util-subtokenize": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg=="],
|
||||
|
||||
"micromark-factory-destination": ["micromark-factory-destination@2.0.1", "", { "dependencies": { "micromark-util-character": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-Xe6rDdJlkmbFRExpTOmRj9N3MaWmbAgdpSrBQvCFqhezUn4AHqJHbaEnfbVYYiexVSs//tqOdY/DxhjdCiJnIA=="],
|
||||
|
||||
"micromark-factory-label": ["micromark-factory-label@2.0.1", "", { "dependencies": { "devlop": "^1.0.0", "micromark-util-character": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-VFMekyQExqIW7xIChcXn4ok29YE3rnuyveW3wZQWWqF4Nv9Wk5rgJ99KzPvHjkmPXF93FXIbBp6YdW3t71/7Vg=="],
|
||||
|
||||
"micromark-factory-space": ["micromark-factory-space@2.0.1", "", { "dependencies": { "micromark-util-character": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg=="],
|
||||
|
||||
"micromark-factory-title": ["micromark-factory-title@2.0.1", "", { "dependencies": { "micromark-factory-space": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-5bZ+3CjhAd9eChYTHsjy6TGxpOFSKgKKJPJxr293jTbfry2KDoWkhBb6TcPVB4NmzaPhMs1Frm9AZH7OD4Cjzw=="],
|
||||
|
||||
"micromark-factory-whitespace": ["micromark-factory-whitespace@2.0.1", "", { "dependencies": { "micromark-factory-space": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-Ob0nuZ3PKt/n0hORHyvoD9uZhr+Za8sFoP+OnMcnWK5lngSzALgQYKMr9RJVOWLqQYuyn6ulqGWSXdwf6F80lQ=="],
|
||||
|
||||
"micromark-util-character": ["micromark-util-character@2.1.1", "", { "dependencies": { "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q=="],
|
||||
|
||||
"micromark-util-chunked": ["micromark-util-chunked@2.0.1", "", { "dependencies": { "micromark-util-symbol": "^2.0.0" } }, "sha512-QUNFEOPELfmvv+4xiNg2sRYeS/P84pTW0TCgP5zc9FpXetHY0ab7SxKyAQCNCc1eK0459uoLI1y5oO5Vc1dbhA=="],
|
||||
|
||||
"micromark-util-classify-character": ["micromark-util-classify-character@2.0.1", "", { "dependencies": { "micromark-util-character": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q=="],
|
||||
|
||||
"micromark-util-combine-extensions": ["micromark-util-combine-extensions@2.0.1", "", { "dependencies": { "micromark-util-chunked": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-OnAnH8Ujmy59JcyZw8JSbK9cGpdVY44NKgSM7E9Eh7DiLS2E9RNQf0dONaGDzEG9yjEl5hcqeIsj4hfRkLH/Bg=="],
|
||||
|
||||
"micromark-util-decode-numeric-character-reference": ["micromark-util-decode-numeric-character-reference@2.0.2", "", { "dependencies": { "micromark-util-symbol": "^2.0.0" } }, "sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw=="],
|
||||
|
||||
"micromark-util-decode-string": ["micromark-util-decode-string@2.0.1", "", { "dependencies": { "decode-named-character-reference": "^1.0.0", "micromark-util-character": "^2.0.0", "micromark-util-decode-numeric-character-reference": "^2.0.0", "micromark-util-symbol": "^2.0.0" } }, "sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ=="],
|
||||
|
||||
"micromark-util-encode": ["micromark-util-encode@2.0.1", "", {}, "sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw=="],
|
||||
|
||||
"micromark-util-html-tag-name": ["micromark-util-html-tag-name@2.0.1", "", {}, "sha512-2cNEiYDhCWKI+Gs9T0Tiysk136SnR13hhO8yW6BGNyhOC4qYFnwF1nKfD3HFAIXA5c45RrIG1ub11GiXeYd1xA=="],
|
||||
|
||||
"micromark-util-normalize-identifier": ["micromark-util-normalize-identifier@2.0.1", "", { "dependencies": { "micromark-util-symbol": "^2.0.0" } }, "sha512-sxPqmo70LyARJs0w2UclACPUUEqltCkJ6PhKdMIDuJ3gSf/Q+/GIe3WKl0Ijb/GyH9lOpUkRAO2wp0GVkLvS9Q=="],
|
||||
|
||||
"micromark-util-resolve-all": ["micromark-util-resolve-all@2.0.1", "", { "dependencies": { "micromark-util-types": "^2.0.0" } }, "sha512-VdQyxFWFT2/FGJgwQnJYbe1jjQoNTS4RjglmSjTUlpUMa95Htx9NHeYW4rGDJzbjvCsl9eLjMQwGeElsqmzcHg=="],
|
||||
|
||||
"micromark-util-sanitize-uri": ["micromark-util-sanitize-uri@2.0.1", "", { "dependencies": { "micromark-util-character": "^2.0.0", "micromark-util-encode": "^2.0.0", "micromark-util-symbol": "^2.0.0" } }, "sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ=="],
|
||||
|
||||
"micromark-util-subtokenize": ["micromark-util-subtokenize@2.1.0", "", { "dependencies": { "devlop": "^1.0.0", "micromark-util-chunked": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-XQLu552iSctvnEcgXw6+Sx75GflAPNED1qx7eBJ+wydBb2KCbRZe+NwvIEEMM83uml1+2WSXpBAcp9IUCgCYWA=="],
|
||||
|
||||
"micromark-util-symbol": ["micromark-util-symbol@2.0.1", "", {}, "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q=="],
|
||||
|
||||
"micromark-util-types": ["micromark-util-types@2.0.2", "", {}, "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA=="],
|
||||
|
||||
"micromatch": ["micromatch@4.0.5", "", { "dependencies": { "braces": "^3.0.2", "picomatch": "^2.3.1" } }, "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA=="],
|
||||
|
||||
"mimic-fn": ["mimic-fn@4.0.0", "", {}, "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw=="],
|
||||
@@ -371,7 +502,7 @@
|
||||
|
||||
"minizlib": ["minizlib@3.1.0", "", { "dependencies": { "minipass": "^7.1.2" } }, "sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw=="],
|
||||
|
||||
"mitata": ["mitata@1.0.25", "", {}, "sha512-0v5qZtVW5vwj9FDvYfraR31BMDcRLkhSFWPTLaxx/Z3/EvScfVtAAWtMI2ArIbBcwh7P86dXh0lQWKiXQPlwYA=="],
|
||||
"mitata": ["mitata@1.0.20", "", {}, "sha512-oHWYGX5bi4wGT/1zrhiZAEzqTV14Vq6/PUTW8WK0b3YHBBQcZz2QFm+InHhjnD0I7B6CMtwdGt2K0938r7YTdQ=="],
|
||||
|
||||
"ms": ["ms@2.1.2", "", {}, "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="],
|
||||
|
||||
@@ -387,6 +518,8 @@
|
||||
|
||||
"onetime": ["onetime@6.0.0", "", { "dependencies": { "mimic-fn": "^4.0.0" } }, "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ=="],
|
||||
|
||||
"parse-entities": ["parse-entities@4.0.2", "", { "dependencies": { "@types/unist": "^2.0.0", "character-entities-legacy": "^3.0.0", "character-reference-invalid": "^2.0.0", "decode-named-character-reference": "^1.0.0", "is-alphanumerical": "^2.0.0", "is-decimal": "^2.0.0", "is-hexadecimal": "^2.0.0" } }, "sha512-GG2AQYWoLgL877gQIKeRPGO1xF9+eG1ujIb5soS5gPvLQ1y2o8FL90w2QWNdf9I361Mpp7726c+lj3U0qK1uGw=="],
|
||||
|
||||
"path-key": ["path-key@3.1.1", "", {}, "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="],
|
||||
|
||||
"path-type": ["path-type@4.0.0", "", {}, "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw=="],
|
||||
@@ -407,18 +540,32 @@
|
||||
|
||||
"process-warning": ["process-warning@5.0.0", "", {}, "sha512-a39t9ApHNx2L4+HBnQKqxxHNs1r7KF+Intd8Q/g1bUh6q0WIp9voPXJ/x0j+ZL45KF1pJd9+q2jLIRMfvEshkA=="],
|
||||
|
||||
"property-information": ["property-information@7.1.0", "", {}, "sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ=="],
|
||||
|
||||
"ps-tree": ["ps-tree@1.2.0", "", { "dependencies": { "event-stream": "=3.3.4" }, "bin": { "ps-tree": "./bin/ps-tree.js" } }, "sha512-0VnamPPYHl4uaU/nSFeZZpR21QAWRz+sRv4iW9+v/GS/J5U5iZB5BNN6J0RMoOvdx2gWM2+ZFMIm58q24e4UYA=="],
|
||||
|
||||
"queue-microtask": ["queue-microtask@1.2.3", "", {}, "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="],
|
||||
|
||||
"quick-format-unescaped": ["quick-format-unescaped@4.0.4", "", {}, "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg=="],
|
||||
|
||||
"react": ["react@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ=="],
|
||||
"react": ["react@19.2.4", "", {}, "sha512-9nfp2hYpCwOjAN+8TZFGhtWEwgvWHXqESH8qT89AT/lWklpLON22Lc8pEtnpsZz7VmawabSU0gCjnj8aC0euHQ=="],
|
||||
|
||||
"react-dom": ["react-dom@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0", "scheduler": "^0.23.2" }, "peerDependencies": { "react": "^18.3.1" } }, "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw=="],
|
||||
"react-dom": ["react-dom@19.2.4", "", { "dependencies": { "scheduler": "^0.27.0" }, "peerDependencies": { "react": "^19.2.4" } }, "sha512-AXJdLo8kgMbimY95O2aKQqsz2iWi9jMgKJhRBAxECE4IFxfcazB2LmzloIoibJI3C12IlY20+KFaLv+71bUJeQ=="],
|
||||
|
||||
"react-markdown": ["react-markdown@9.1.0", "", { "dependencies": { "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", "devlop": "^1.0.0", "hast-util-to-jsx-runtime": "^2.0.0", "html-url-attributes": "^3.0.0", "mdast-util-to-hast": "^13.0.0", "remark-parse": "^11.0.0", "remark-rehype": "^11.0.0", "unified": "^11.0.0", "unist-util-visit": "^5.0.0", "vfile": "^6.0.0" }, "peerDependencies": { "@types/react": ">=18", "react": ">=18" } }, "sha512-xaijuJB0kzGiUdG7nc2MOMDUDBWPyGAjZtUrow9XxUeua8IqeP+VlIfAZ3bphpcLTnSZXz6z9jcVC/TCwbfgdw=="],
|
||||
|
||||
"real-require": ["real-require@0.2.0", "", {}, "sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg=="],
|
||||
|
||||
"remark": ["remark@15.0.1", "", { "dependencies": { "@types/mdast": "^4.0.0", "remark-parse": "^11.0.0", "remark-stringify": "^11.0.0", "unified": "^11.0.0" } }, "sha512-Eht5w30ruCXgFmxVUSlNWQ9iiimq07URKeFS3hNc8cUWy1llX4KDWfyEDZRycMc+znsN9Ux5/tJ/BFdgdOwA3A=="],
|
||||
|
||||
"remark-html": ["remark-html@16.0.1", "", { "dependencies": { "@types/mdast": "^4.0.0", "hast-util-sanitize": "^5.0.0", "hast-util-to-html": "^9.0.0", "mdast-util-to-hast": "^13.0.0", "unified": "^11.0.0" } }, "sha512-B9JqA5i0qZe0Nsf49q3OXyGvyXuZFDzAP2iOFLEumymuYJITVpiH1IgsTEwTpdptDmZlMDMWeDmSawdaJIGCXQ=="],
|
||||
|
||||
"remark-parse": ["remark-parse@11.0.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "mdast-util-from-markdown": "^2.0.0", "micromark-util-types": "^2.0.0", "unified": "^11.0.0" } }, "sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA=="],
|
||||
|
||||
"remark-rehype": ["remark-rehype@11.1.2", "", { "dependencies": { "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", "mdast-util-to-hast": "^13.0.0", "unified": "^11.0.0", "vfile": "^6.0.0" } }, "sha512-Dh7l57ianaEoIpzbp0PC9UKAdCSVklD8E5Rpw7ETfbTl3FqcOOgq5q2LVDhgGCkaBv7p24JXikPdvhhmHvKMsw=="],
|
||||
|
||||
"remark-stringify": ["remark-stringify@11.0.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "mdast-util-to-markdown": "^2.0.0", "unified": "^11.0.0" } }, "sha512-1OSmLd3awB/t8qdoEOMazZkNsfVTeY4fTsgzcQFdXNq8ToTN4ZGwrMnlda4K6smTFKD+GRV6O48i6Z4iKgPPpw=="],
|
||||
|
||||
"require-from-string": ["require-from-string@2.0.2", "", {}, "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw=="],
|
||||
|
||||
"ret": ["ret@0.5.0", "", {}, "sha512-I1XxrZSQ+oErkRR4jYbAyEEu2I0avBvvMM5JN+6EBprOGRCs63ENqZ3vjavq8fBw2+62G5LF5XelKwuJpcvcxw=="],
|
||||
@@ -433,7 +580,7 @@
|
||||
|
||||
"safe-stable-stringify": ["safe-stable-stringify@2.5.0", "", {}, "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA=="],
|
||||
|
||||
"scheduler": ["scheduler@0.23.2", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ=="],
|
||||
"scheduler": ["scheduler@0.27.0", "", {}, "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q=="],
|
||||
|
||||
"secure-json-parse": ["secure-json-parse@4.0.0", "", {}, "sha512-dxtLJO6sc35jWidmLxo7ij+Eg48PM/kleBsxpC8QJE0qJICe+KawkDQmvCMZUr9u7WKVHgMW6vy3fQ7zMiFZMA=="],
|
||||
|
||||
@@ -453,6 +600,8 @@
|
||||
|
||||
"sonic-boom": ["sonic-boom@4.2.0", "", { "dependencies": { "atomic-sleep": "^1.0.0" } }, "sha512-INb7TM37/mAcsGmc9hyyI6+QR3rR1zVRu36B0NeGXKnOOLiZOfER5SA+N7X7k3yUYRzLWafduTDvJAfDswwEww=="],
|
||||
|
||||
"space-separated-tokens": ["space-separated-tokens@2.0.2", "", {}, "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q=="],
|
||||
|
||||
"split": ["split@0.3.3", "", { "dependencies": { "through": "2" } }, "sha512-wD2AeVmxXRBoX44wAycgjVpMhvbwdI2aZjCkvfNcH1YqHQvJVa1duWc73OyVGJUc05fhFaTZeQ/PYsrmyH0JVA=="],
|
||||
|
||||
"split2": ["split2@4.2.0", "", {}, "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg=="],
|
||||
@@ -461,10 +610,16 @@
|
||||
|
||||
"string-width": ["string-width@7.1.0", "", { "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", "strip-ansi": "^7.1.0" } }, "sha512-SEIJCWiX7Kg4c129n48aDRwLbFb2LJmXXFrWBG4NGaRtMQ3myKPKbwrD1BKqQn74oCoNMBVrfDEr5M9YxCsrkw=="],
|
||||
|
||||
"stringify-entities": ["stringify-entities@4.0.4", "", { "dependencies": { "character-entities-html4": "^2.0.0", "character-entities-legacy": "^3.0.0" } }, "sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg=="],
|
||||
|
||||
"strip-ansi": ["strip-ansi@7.1.0", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ=="],
|
||||
|
||||
"strip-final-newline": ["strip-final-newline@3.0.0", "", {}, "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw=="],
|
||||
|
||||
"style-to-js": ["style-to-js@1.1.21", "", { "dependencies": { "style-to-object": "1.0.14" } }, "sha512-RjQetxJrrUJLQPHbLku6U/ocGtzyjbJMP9lCNK7Ag0CNh690nSH8woqWH9u16nMjYBAok+i7JO1NP2pOy8IsPQ=="],
|
||||
|
||||
"style-to-object": ["style-to-object@1.0.14", "", { "dependencies": { "inline-style-parser": "0.2.7" } }, "sha512-LIN7rULI0jBscWQYaSswptyderlarFkjQ+t79nzty8tcIAceVomEVlLzH5VP4Cmsv6MtKhs7qaAiwlcp+Mgaxw=="],
|
||||
|
||||
"supports-color": ["supports-color@5.5.0", "", { "dependencies": { "has-flag": "^3.0.0" } }, "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow=="],
|
||||
|
||||
"tar": ["tar@7.5.2", "", { "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", "minizlib": "^3.1.0", "yallist": "^5.0.0" } }, "sha512-7NyxrTE4Anh8km8iEy7o0QYPs+0JKBTj5ZaqHg6B39erLg0qYXN3BijtShwbsNSvQ+LN75+KV+C4QR/f6Gwnpg=="],
|
||||
@@ -481,30 +636,52 @@
|
||||
|
||||
"toad-cache": ["toad-cache@3.7.0", "", {}, "sha512-/m8M+2BJUpoJdgAHoG+baCwBT+tf2VraSfkBgl0Y00qIWt41DJ8R5B8nsEw0I58YwF5IZH6z24/2TobDKnqSWw=="],
|
||||
|
||||
"trim-lines": ["trim-lines@3.0.1", "", {}, "sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg=="],
|
||||
|
||||
"trough": ["trough@2.2.0", "", {}, "sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw=="],
|
||||
|
||||
"undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="],
|
||||
|
||||
"unified": ["unified@11.0.5", "", { "dependencies": { "@types/unist": "^3.0.0", "bail": "^2.0.0", "devlop": "^1.0.0", "extend": "^3.0.0", "is-plain-obj": "^4.0.0", "trough": "^2.0.0", "vfile": "^6.0.0" } }, "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA=="],
|
||||
|
||||
"unist-util-is": ["unist-util-is@6.0.1", "", { "dependencies": { "@types/unist": "^3.0.0" } }, "sha512-LsiILbtBETkDz8I9p1dQ0uyRUWuaQzd/cuEeS1hoRSyW5E5XGmTzlwY1OrNzzakGowI9Dr/I8HVaw4hTtnxy8g=="],
|
||||
|
||||
"unist-util-position": ["unist-util-position@5.0.0", "", { "dependencies": { "@types/unist": "^3.0.0" } }, "sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA=="],
|
||||
|
||||
"unist-util-stringify-position": ["unist-util-stringify-position@4.0.0", "", { "dependencies": { "@types/unist": "^3.0.0" } }, "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ=="],
|
||||
|
||||
"unist-util-visit": ["unist-util-visit@5.1.0", "", { "dependencies": { "@types/unist": "^3.0.0", "unist-util-is": "^6.0.0", "unist-util-visit-parents": "^6.0.0" } }, "sha512-m+vIdyeCOpdr/QeQCu2EzxX/ohgS8KbnPDgFni4dQsfSCtpz8UqDyY5GjRru8PDKuYn7Fq19j1CQ+nJSsGKOzg=="],
|
||||
|
||||
"unist-util-visit-parents": ["unist-util-visit-parents@6.0.2", "", { "dependencies": { "@types/unist": "^3.0.0", "unist-util-is": "^6.0.0" } }, "sha512-goh1s1TBrqSqukSc8wrjwWhL0hiJxgA8m4kFxGlQ+8FYQ3C/m11FcTs4YYem7V664AhHVvgoQLk890Ssdsr2IQ=="],
|
||||
|
||||
"universalify": ["universalify@2.0.1", "", {}, "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw=="],
|
||||
|
||||
"update-browserslist-db": ["update-browserslist-db@1.0.10", "", { "dependencies": { "escalade": "^3.1.1", "picocolors": "^1.0.0" }, "peerDependencies": { "browserslist": ">= 4.21.0" }, "bin": { "browserslist-lint": "cli.js" } }, "sha512-OztqDenkfFkbSG+tRxBeAnCVPckDBcvibKd35yDONx6OU8N7sqgwc7rCbkJ/WcYtVRZ4ba68d6byhC21GFh7sQ=="],
|
||||
|
||||
"vfile": ["vfile@6.0.3", "", { "dependencies": { "@types/unist": "^3.0.0", "vfile-message": "^4.0.0" } }, "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q=="],
|
||||
|
||||
"vfile-message": ["vfile-message@4.0.3", "", { "dependencies": { "@types/unist": "^3.0.0", "unist-util-stringify-position": "^4.0.0" } }, "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw=="],
|
||||
|
||||
"web-streams-polyfill": ["web-streams-polyfill@3.3.2", "", {}, "sha512-3pRGuxRF5gpuZc0W+EpwQRmCD7gRqcDOMt688KmdlDAgAyaB1XlN0zq2njfDNm44XVdIouE7pZ6GzbdyH47uIQ=="],
|
||||
|
||||
"webpod": ["webpod@0.0.2", "", { "bin": { "webpod": "dist/index.js" } }, "sha512-cSwwQIeg8v4i3p4ajHhwgR7N6VyxAf+KYSSsY6Pd3aETE+xEU4vbitz7qQkB0I321xnhDdgtxuiSfk5r/FVtjg=="],
|
||||
|
||||
"which": ["which@3.0.1", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "bin/which.js" } }, "sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg=="],
|
||||
|
||||
"wrap-ansi": ["wrap-ansi@9.0.2", "https://artifactory.infra.ant.dev:443/artifactory/api/npm/npm-all/wrap-ansi/-/wrap-ansi-9.0.2.tgz", { "dependencies": { "ansi-styles": "^6.2.1", "string-width": "^7.0.0", "strip-ansi": "^7.1.0" } }, "sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww=="],
|
||||
|
||||
"yallist": ["yallist@5.0.0", "", {}, "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw=="],
|
||||
|
||||
"yaml": ["yaml@2.3.4", "", {}, "sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA=="],
|
||||
|
||||
"zwitch": ["zwitch@2.0.4", "", {}, "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A=="],
|
||||
|
||||
"zx": ["zx@7.2.3", "", { "dependencies": { "@types/fs-extra": "^11.0.1", "@types/minimist": "^1.2.2", "@types/node": "^18.16.3", "@types/ps-tree": "^1.1.2", "@types/which": "^3.0.0", "chalk": "^5.2.0", "fs-extra": "^11.1.1", "fx": "*", "globby": "^13.1.4", "minimist": "^1.2.8", "node-fetch": "3.3.1", "ps-tree": "^1.2.0", "webpod": "^0", "which": "^3.0.0", "yaml": "^2.2.2" }, "bin": { "zx": "build/cli.js" } }, "sha512-QODu38nLlYXg/B/Gw7ZKiZrvPkEsjPN3LQ5JFXM7h0JvwhEdPNNl+4Ao1y4+o3CLNiDUNcwzQYZ4/Ko7kKzCMA=="],
|
||||
|
||||
"@babel/generator/@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.2", "", { "dependencies": { "@jridgewell/set-array": "^1.0.1", "@jridgewell/sourcemap-codec": "^1.4.10", "@jridgewell/trace-mapping": "^0.3.9" } }, "sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A=="],
|
||||
|
||||
"@babel/highlight/chalk": ["chalk@2.4.2", "", { "dependencies": { "ansi-styles": "^3.2.1", "escape-string-regexp": "^1.0.5", "supports-color": "^5.3.0" } }, "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ=="],
|
||||
|
||||
"ansi-styles/color-convert": ["color-convert@1.9.3", "", { "dependencies": { "color-name": "1.1.3" } }, "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg=="],
|
||||
|
||||
"avvio/fastq": ["fastq@1.19.1", "", { "dependencies": { "reusify": "^1.0.4" } }, "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ=="],
|
||||
|
||||
"cross-spawn/which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="],
|
||||
@@ -517,6 +694,12 @@
|
||||
|
||||
"npm-run-path/path-key": ["path-key@4.0.0", "", {}, "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ=="],
|
||||
|
||||
"ansi-styles/color-convert/color-name": ["color-name@1.1.3", "", {}, "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw=="],
|
||||
"parse-entities/@types/unist": ["@types/unist@2.0.11", "", {}, "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA=="],
|
||||
|
||||
"@babel/highlight/chalk/ansi-styles": ["ansi-styles@3.2.1", "", { "dependencies": { "color-convert": "^1.9.0" } }, "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA=="],
|
||||
|
||||
"@babel/highlight/chalk/ansi-styles/color-convert": ["color-convert@1.9.3", "", { "dependencies": { "color-name": "1.1.3" } }, "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg=="],
|
||||
|
||||
"@babel/highlight/chalk/ansi-styles/color-convert/color-name": ["color-name@1.1.3", "", {}, "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw=="],
|
||||
}
|
||||
}
|
||||
|
||||
15
bench/json5/bun.lock
Normal file
15
bench/json5/bun.lock
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"configVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "json5-benchmark",
|
||||
"dependencies": {
|
||||
"json5": "^2.2.3",
|
||||
},
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"json5": ["json5@2.2.3", "", { "bin": { "json5": "lib/cli.js" } }, "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg=="],
|
||||
}
|
||||
}
|
||||
88
bench/json5/json5.mjs
Normal file
88
bench/json5/json5.mjs
Normal file
@@ -0,0 +1,88 @@
|
||||
import JSON5 from "json5";
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
|
||||
const isBun = typeof Bun !== "undefined" && Bun.JSON5;
|
||||
|
||||
function sizeLabel(n) {
|
||||
if (n >= 1024 * 1024) return `${(n / 1024 / 1024).toFixed(1)}MB`;
|
||||
if (n >= 1024) return `${(n / 1024).toFixed(0)}KB`;
|
||||
return `${n}B`;
|
||||
}
|
||||
|
||||
// -- parse inputs --
|
||||
|
||||
const smallJson5 = `{
|
||||
// User profile
|
||||
name: "John Doe",
|
||||
age: 30,
|
||||
email: 'john@example.com',
|
||||
active: true,
|
||||
}`;
|
||||
|
||||
function generateLargeJson5(count) {
|
||||
const lines = ["{\n // Auto-generated dataset\n items: [\n"];
|
||||
for (let i = 0; i < count; i++) {
|
||||
lines.push(` {
|
||||
id: ${i},
|
||||
name: 'item_${i}',
|
||||
value: ${(Math.random() * 1000).toFixed(2)},
|
||||
hex: 0x${i.toString(16).toUpperCase()},
|
||||
active: ${i % 2 === 0},
|
||||
tags: ['tag_${i % 10}', 'category_${i % 5}',],
|
||||
// entry ${i}
|
||||
},\n`);
|
||||
}
|
||||
lines.push(" ],\n total: " + count + ",\n status: 'complete',\n}\n");
|
||||
return lines.join("");
|
||||
}
|
||||
|
||||
const largeJson5 = generateLargeJson5(6500);
|
||||
|
||||
// -- stringify inputs --
|
||||
|
||||
const smallObject = {
|
||||
name: "John Doe",
|
||||
age: 30,
|
||||
email: "john@example.com",
|
||||
active: true,
|
||||
};
|
||||
|
||||
const largeObject = {
|
||||
items: Array.from({ length: 10000 }, (_, i) => ({
|
||||
id: i,
|
||||
name: `item_${i}`,
|
||||
value: +(Math.random() * 1000).toFixed(2),
|
||||
active: i % 2 === 0,
|
||||
tags: [`tag_${i % 10}`, `category_${i % 5}`],
|
||||
})),
|
||||
total: 10000,
|
||||
status: "complete",
|
||||
};
|
||||
|
||||
const stringify = isBun ? Bun.JSON5.stringify : JSON5.stringify;
|
||||
|
||||
// -- parse benchmarks --
|
||||
|
||||
group(`parse small (${sizeLabel(smallJson5.length)})`, () => {
|
||||
if (isBun) bench("Bun.JSON5.parse", () => Bun.JSON5.parse(smallJson5));
|
||||
bench("json5.parse", () => JSON5.parse(smallJson5));
|
||||
});
|
||||
|
||||
group(`parse large (${sizeLabel(largeJson5.length)})`, () => {
|
||||
if (isBun) bench("Bun.JSON5.parse", () => Bun.JSON5.parse(largeJson5));
|
||||
bench("json5.parse", () => JSON5.parse(largeJson5));
|
||||
});
|
||||
|
||||
// -- stringify benchmarks --
|
||||
|
||||
group(`stringify small (${sizeLabel(stringify(smallObject).length)})`, () => {
|
||||
if (isBun) bench("Bun.JSON5.stringify", () => Bun.JSON5.stringify(smallObject));
|
||||
bench("json5.stringify", () => JSON5.stringify(smallObject));
|
||||
});
|
||||
|
||||
group(`stringify large (${sizeLabel(stringify(largeObject).length)})`, () => {
|
||||
if (isBun) bench("Bun.JSON5.stringify", () => Bun.JSON5.stringify(largeObject));
|
||||
bench("json5.stringify", () => JSON5.stringify(largeObject));
|
||||
});
|
||||
|
||||
await run();
|
||||
7
bench/json5/package.json
Normal file
7
bench/json5/package.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"name": "json5-benchmark",
|
||||
"version": "1.0.0",
|
||||
"dependencies": {
|
||||
"json5": "^2.2.3"
|
||||
}
|
||||
}
|
||||
@@ -14,13 +14,18 @@
|
||||
"fast-glob": "3.3.1",
|
||||
"fastify": "^5.0.0",
|
||||
"fdir": "^6.1.0",
|
||||
"mitata": "^1.0.25",
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"marked": "^17.0.1",
|
||||
"mitata": "1.0.20",
|
||||
"react": "^19",
|
||||
"react-dom": "^19",
|
||||
"react-markdown": "^9.0.3",
|
||||
"remark": "^15.0.1",
|
||||
"remark-html": "^16.0.1",
|
||||
"string-width": "7.1.0",
|
||||
"strip-ansi": "^7.1.0",
|
||||
"tar": "^7.4.3",
|
||||
"tinycolor2": "^1.6.0",
|
||||
"wrap-ansi": "^9.0.0",
|
||||
"zx": "^7.2.3"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -14,3 +14,4 @@ export function run(opts = {}) {
|
||||
|
||||
export const bench = Mitata.bench;
|
||||
export const group = Mitata.group;
|
||||
export const summary = Mitata.summary;
|
||||
|
||||
38
bench/snippets/buffer-from-array.mjs
Normal file
38
bench/snippets/buffer-from-array.mjs
Normal file
@@ -0,0 +1,38 @@
|
||||
// @runtime bun,node
|
||||
import { Buffer } from "node:buffer";
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
|
||||
// Small arrays (common case)
|
||||
const int32Array8 = [1, 2, 3, 4, 5, 6, 7, 8];
|
||||
const doubleArray8 = [1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5];
|
||||
|
||||
// Medium arrays
|
||||
const int32Array64 = Array.from({ length: 64 }, (_, i) => i % 256);
|
||||
const doubleArray64 = Array.from({ length: 64 }, (_, i) => i + 0.5);
|
||||
|
||||
// Large arrays
|
||||
const int32Array1024 = Array.from({ length: 1024 }, (_, i) => i % 256);
|
||||
|
||||
// Array-like objects (fallback path)
|
||||
const arrayLike8 = { 0: 1, 1: 2, 2: 3, 3: 4, 4: 5, 5: 6, 6: 7, 7: 8, length: 8 };
|
||||
|
||||
// Empty array
|
||||
const emptyArray = [];
|
||||
|
||||
group("Buffer.from(array) - Int32 arrays", () => {
|
||||
bench("Buffer.from(int32[8])", () => Buffer.from(int32Array8));
|
||||
bench("Buffer.from(int32[64])", () => Buffer.from(int32Array64));
|
||||
bench("Buffer.from(int32[1024])", () => Buffer.from(int32Array1024));
|
||||
});
|
||||
|
||||
group("Buffer.from(array) - Double arrays", () => {
|
||||
bench("Buffer.from(double[8])", () => Buffer.from(doubleArray8));
|
||||
bench("Buffer.from(double[64])", () => Buffer.from(doubleArray64));
|
||||
});
|
||||
|
||||
group("Buffer.from(array) - Edge cases", () => {
|
||||
bench("Buffer.from([])", () => Buffer.from(emptyArray));
|
||||
bench("Buffer.from(arrayLike[8])", () => Buffer.from(arrayLike8));
|
||||
});
|
||||
|
||||
await run();
|
||||
92
bench/snippets/markdown-react.mjs
Normal file
92
bench/snippets/markdown-react.mjs
Normal file
@@ -0,0 +1,92 @@
|
||||
import React from "react";
|
||||
import { renderToString } from "react-dom/server";
|
||||
import ReactMarkdown from "react-markdown";
|
||||
|
||||
const markdown = `# Project README
|
||||
|
||||
## Introduction
|
||||
|
||||
This is a medium-sized markdown document that includes **bold text**, *italic text*,
|
||||
and \`inline code\`. It also has [links](https://example.com) and various formatting.
|
||||
|
||||
## Features
|
||||
|
||||
- Feature one with **bold**
|
||||
- Feature two with *emphasis*
|
||||
- Feature three with \`code\`
|
||||
- Feature four with [a link](https://example.com)
|
||||
|
||||
## Code Example
|
||||
|
||||
\`\`\`javascript
|
||||
function hello() {
|
||||
console.log("Hello, world!");
|
||||
return 42;
|
||||
}
|
||||
|
||||
const result = hello();
|
||||
\`\`\`
|
||||
|
||||
## Table
|
||||
|
||||
| Name | Value | Description |
|
||||
|------|-------|-------------|
|
||||
| foo | 1 | First item |
|
||||
| bar | 2 | Second item |
|
||||
| baz | 3 | Third item |
|
||||
|
||||
## Blockquote
|
||||
|
||||
> This is a blockquote with **bold** and *italic* text.
|
||||
> It spans multiple lines and contains a [link](https://example.com).
|
||||
|
||||
---
|
||||
|
||||
### Nested Lists
|
||||
|
||||
1. First ordered item
|
||||
- Nested unordered
|
||||
- Another nested
|
||||
2. Second ordered item
|
||||
1. Nested ordered
|
||||
2. Another nested
|
||||
3. Third ordered item
|
||||
|
||||
Some final paragraph with ~~strikethrough~~ text and more **formatting**.
|
||||
`;
|
||||
|
||||
// Verify outputs are roughly the same
|
||||
const bunHtml = renderToString(Bun.markdown.react(markdown));
|
||||
const reactMarkdownHtml = renderToString(React.createElement(ReactMarkdown, { children: markdown }));
|
||||
|
||||
console.log("=== Bun.markdown.react output ===");
|
||||
console.log(bunHtml.slice(0, 500));
|
||||
console.log(`... (${bunHtml.length} chars total)\n`);
|
||||
|
||||
console.log("=== react-markdown output ===");
|
||||
console.log(reactMarkdownHtml.slice(0, 500));
|
||||
console.log(`... (${reactMarkdownHtml.length} chars total)\n`);
|
||||
|
||||
const server = Bun.serve({
|
||||
port: 0,
|
||||
routes: {
|
||||
"/bun-markdown": () => {
|
||||
return new Response(renderToString(Bun.markdown.react(markdown)), {
|
||||
headers: { "Content-Type": "text/html" },
|
||||
});
|
||||
},
|
||||
"/react-markdown": () => {
|
||||
return new Response(renderToString(React.createElement(ReactMarkdown, { children: markdown })), {
|
||||
headers: { "Content-Type": "text/html" },
|
||||
});
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
console.log(`Server listening on ${server.url}`);
|
||||
console.log(` ${server.url}bun-markdown`);
|
||||
console.log(` ${server.url}react-markdown`);
|
||||
console.log();
|
||||
console.log("Run:");
|
||||
console.log(` oha -c 20 -z 5s ${server.url}bun-markdown`);
|
||||
console.log(` oha -c 20 -z 5s ${server.url}react-markdown`);
|
||||
159
bench/snippets/markdown.mjs
Normal file
159
bench/snippets/markdown.mjs
Normal file
@@ -0,0 +1,159 @@
|
||||
import { marked } from "marked";
|
||||
import { remark } from "remark";
|
||||
import remarkHtml from "remark-html";
|
||||
import { bench, run, summary } from "../runner.mjs";
|
||||
|
||||
const remarkProcessor = remark().use(remarkHtml);
|
||||
|
||||
const small = `# Hello World
|
||||
|
||||
This is a **bold** and *italic* paragraph with a [link](https://example.com).
|
||||
|
||||
- Item 1
|
||||
- Item 2
|
||||
- Item 3
|
||||
`;
|
||||
|
||||
const medium = `# Project README
|
||||
|
||||
## Introduction
|
||||
|
||||
This is a medium-sized markdown document that includes **bold text**, *italic text*,
|
||||
and \`inline code\`. It also has [links](https://example.com) and various formatting.
|
||||
|
||||
## Features
|
||||
|
||||
- Feature one with **bold**
|
||||
- Feature two with *emphasis*
|
||||
- Feature three with \`code\`
|
||||
- Feature four with [a link](https://example.com)
|
||||
|
||||
## Code Example
|
||||
|
||||
\`\`\`javascript
|
||||
function hello() {
|
||||
console.log("Hello, world!");
|
||||
return 42;
|
||||
}
|
||||
|
||||
const result = hello();
|
||||
\`\`\`
|
||||
|
||||
## Table
|
||||
|
||||
| Name | Value | Description |
|
||||
|------|-------|-------------|
|
||||
| foo | 1 | First item |
|
||||
| bar | 2 | Second item |
|
||||
| baz | 3 | Third item |
|
||||
|
||||
## Blockquote
|
||||
|
||||
> This is a blockquote with **bold** and *italic* text.
|
||||
> It spans multiple lines and contains a [link](https://example.com).
|
||||
|
||||
---
|
||||
|
||||
### Nested Lists
|
||||
|
||||
1. First ordered item
|
||||
- Nested unordered
|
||||
- Another nested
|
||||
2. Second ordered item
|
||||
1. Nested ordered
|
||||
2. Another nested
|
||||
3. Third ordered item
|
||||
|
||||
Some final paragraph with ~~strikethrough~~ text and more **formatting**.
|
||||
`;
|
||||
|
||||
const large = medium.repeat(20);
|
||||
|
||||
const renderCallbacks = {
|
||||
heading: (children, { level }) => `<h${level}>${children}</h${level}>`,
|
||||
paragraph: children => `<p>${children}</p>`,
|
||||
strong: children => `<strong>${children}</strong>`,
|
||||
emphasis: children => `<em>${children}</em>`,
|
||||
codespan: children => `<code>${children}</code>`,
|
||||
code: (children, { language }) =>
|
||||
language
|
||||
? `<pre><code class="language-${language}">${children}</code></pre>`
|
||||
: `<pre><code>${children}</code></pre>`,
|
||||
link: (children, { href, title }) =>
|
||||
title ? `<a href="${href}" title="${title}">${children}</a>` : `<a href="${href}">${children}</a>`,
|
||||
image: (children, { src, title }) =>
|
||||
title ? `<img src="${src}" alt="${children}" title="${title}" />` : `<img src="${src}" alt="${children}" />`,
|
||||
list: (children, { ordered, start }) => (ordered ? `<ol start="${start}">${children}</ol>` : `<ul>${children}</ul>`),
|
||||
listItem: children => `<li>${children}</li>`,
|
||||
blockquote: children => `<blockquote>${children}</blockquote>`,
|
||||
hr: () => `<hr />`,
|
||||
strikethrough: children => `<del>${children}</del>`,
|
||||
table: children => `<table>${children}</table>`,
|
||||
thead: children => `<thead>${children}</thead>`,
|
||||
tbody: children => `<tbody>${children}</tbody>`,
|
||||
tr: children => `<tr>${children}</tr>`,
|
||||
th: children => `<th>${children}</th>`,
|
||||
td: children => `<td>${children}</td>`,
|
||||
};
|
||||
|
||||
summary(() => {
|
||||
if (typeof Bun !== "undefined" && Bun.markdown) {
|
||||
bench(`small (${small.length} chars) - Bun.markdown.html`, () => {
|
||||
return Bun.markdown.html(small);
|
||||
});
|
||||
|
||||
bench(`small (${small.length} chars) - Bun.markdown.render`, () => {
|
||||
return Bun.markdown.render(small, renderCallbacks);
|
||||
});
|
||||
}
|
||||
|
||||
bench(`small (${small.length} chars) - marked`, () => {
|
||||
return marked(small);
|
||||
});
|
||||
|
||||
bench(`small (${small.length} chars) - remark`, () => {
|
||||
return remarkProcessor.processSync(small).toString();
|
||||
});
|
||||
});
|
||||
|
||||
summary(() => {
|
||||
if (typeof Bun !== "undefined" && Bun.markdown) {
|
||||
bench(`medium (${medium.length} chars) - Bun.markdown.html`, () => {
|
||||
return Bun.markdown.html(medium);
|
||||
});
|
||||
|
||||
bench(`medium (${medium.length} chars) - Bun.markdown.render`, () => {
|
||||
return Bun.markdown.render(medium, renderCallbacks);
|
||||
});
|
||||
}
|
||||
|
||||
bench(`medium (${medium.length} chars) - marked`, () => {
|
||||
return marked(medium);
|
||||
});
|
||||
|
||||
bench(`medium (${medium.length} chars) - remark`, () => {
|
||||
return remarkProcessor.processSync(medium).toString();
|
||||
});
|
||||
});
|
||||
|
||||
summary(() => {
|
||||
if (typeof Bun !== "undefined" && Bun.markdown) {
|
||||
bench(`large (${large.length} chars) - Bun.markdown.html`, () => {
|
||||
return Bun.markdown.html(large);
|
||||
});
|
||||
|
||||
bench(`large (${large.length} chars) - Bun.markdown.render`, () => {
|
||||
return Bun.markdown.render(large, renderCallbacks);
|
||||
});
|
||||
}
|
||||
|
||||
bench(`large (${large.length} chars) - marked`, () => {
|
||||
return marked(large);
|
||||
});
|
||||
|
||||
bench(`large (${large.length} chars) - remark`, () => {
|
||||
return remarkProcessor.processSync(large).toString();
|
||||
});
|
||||
});
|
||||
|
||||
await run();
|
||||
20
bench/snippets/spread-set.mjs
Normal file
20
bench/snippets/spread-set.mjs
Normal file
@@ -0,0 +1,20 @@
|
||||
// Benchmark for [...set] optimization (WebKit#56539)
|
||||
// https://github.com/WebKit/WebKit/pull/56539
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const intSet10 = new Set([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
|
||||
const intSet100 = new Set(Array.from({ length: 100 }, (_, i) => i));
|
||||
const strSet10 = new Set(Array.from({ length: 10 }, (_, i) => `key-${i}`));
|
||||
const strSet100 = new Set(Array.from({ length: 100 }, (_, i) => `key-${i}`));
|
||||
|
||||
const objSet10 = new Set(Array.from({ length: 10 }, (_, i) => ({ id: i })));
|
||||
const objSet100 = new Set(Array.from({ length: 100 }, (_, i) => ({ id: i })));
|
||||
|
||||
bench("[...set] - integers (10)", () => [...intSet10]);
|
||||
bench("[...set] - integers (100)", () => [...intSet100]);
|
||||
bench("[...set] - strings (10)", () => [...strSet10]);
|
||||
bench("[...set] - strings (100)", () => [...strSet100]);
|
||||
bench("[...set] - objects (10)", () => [...objSet10]);
|
||||
bench("[...set] - objects (100)", () => [...objSet100]);
|
||||
|
||||
await run();
|
||||
103
bench/snippets/wrap-ansi.js
Normal file
103
bench/snippets/wrap-ansi.js
Normal file
@@ -0,0 +1,103 @@
|
||||
import wrapAnsi from "wrap-ansi";
|
||||
import { bench, run, summary } from "../runner.mjs";
|
||||
|
||||
// Test fixtures
|
||||
const shortText = "The quick brown fox jumped over the lazy dog.";
|
||||
const mediumText = "The quick brown fox jumped over the lazy dog and then ran away with the unicorn. ".repeat(10);
|
||||
const longText = "The quick brown fox jumped over the lazy dog and then ran away with the unicorn. ".repeat(100);
|
||||
|
||||
// ANSI colored text
|
||||
const red = s => `\u001B[31m${s}\u001B[39m`;
|
||||
const green = s => `\u001B[32m${s}\u001B[39m`;
|
||||
const blue = s => `\u001B[34m${s}\u001B[39m`;
|
||||
|
||||
const coloredShort = `The quick ${red("brown fox")} jumped over the ${green("lazy dog")}.`;
|
||||
const coloredMedium =
|
||||
`The quick ${red("brown fox jumped over")} the ${green("lazy dog and then ran away")} with the ${blue("unicorn")}. `.repeat(
|
||||
10,
|
||||
);
|
||||
const coloredLong =
|
||||
`The quick ${red("brown fox jumped over")} the ${green("lazy dog and then ran away")} with the ${blue("unicorn")}. `.repeat(
|
||||
100,
|
||||
);
|
||||
|
||||
// Full-width characters (Japanese)
|
||||
const japaneseText = "日本語のテキストを折り返すテストです。全角文字は幅2としてカウントされます。".repeat(5);
|
||||
|
||||
// Emoji text
|
||||
const emojiText = "Hello 👋 World 🌍! Let's test 🧪 some emoji 😀 wrapping 📦!".repeat(5);
|
||||
|
||||
// Hyperlink text
|
||||
const hyperlinkText = "Check out \u001B]8;;https://bun.sh\u0007Bun\u001B]8;;\u0007, it's fast! ".repeat(10);
|
||||
|
||||
// Options
|
||||
const hardOpts = { hard: true };
|
||||
const noTrimOpts = { trim: false };
|
||||
|
||||
// Basic text benchmarks
|
||||
summary(() => {
|
||||
bench("Short text (45 chars) - npm", () => wrapAnsi(shortText, 20));
|
||||
bench("Short text (45 chars) - Bun", () => Bun.wrapAnsi(shortText, 20));
|
||||
});
|
||||
|
||||
summary(() => {
|
||||
bench("Medium text (810 chars) - npm", () => wrapAnsi(mediumText, 40));
|
||||
bench("Medium text (810 chars) - Bun", () => Bun.wrapAnsi(mediumText, 40));
|
||||
});
|
||||
|
||||
summary(() => {
|
||||
bench("Long text (8100 chars) - npm", () => wrapAnsi(longText, 80));
|
||||
bench("Long text (8100 chars) - Bun", () => Bun.wrapAnsi(longText, 80));
|
||||
});
|
||||
|
||||
// ANSI colored text benchmarks
|
||||
summary(() => {
|
||||
bench("Colored short - npm", () => wrapAnsi(coloredShort, 20));
|
||||
bench("Colored short - Bun", () => Bun.wrapAnsi(coloredShort, 20));
|
||||
});
|
||||
|
||||
summary(() => {
|
||||
bench("Colored medium - npm", () => wrapAnsi(coloredMedium, 40));
|
||||
bench("Colored medium - Bun", () => Bun.wrapAnsi(coloredMedium, 40));
|
||||
});
|
||||
|
||||
summary(() => {
|
||||
bench("Colored long - npm", () => wrapAnsi(coloredLong, 80));
|
||||
bench("Colored long - Bun", () => Bun.wrapAnsi(coloredLong, 80));
|
||||
});
|
||||
|
||||
// Hard wrap benchmarks
|
||||
summary(() => {
|
||||
bench("Hard wrap long - npm", () => wrapAnsi(longText, 80, hardOpts));
|
||||
bench("Hard wrap long - Bun", () => Bun.wrapAnsi(longText, 80, hardOpts));
|
||||
});
|
||||
|
||||
summary(() => {
|
||||
bench("Hard wrap colored - npm", () => wrapAnsi(coloredLong, 80, hardOpts));
|
||||
bench("Hard wrap colored - Bun", () => Bun.wrapAnsi(coloredLong, 80, hardOpts));
|
||||
});
|
||||
|
||||
// Unicode benchmarks
|
||||
summary(() => {
|
||||
bench("Japanese (full-width) - npm", () => wrapAnsi(japaneseText, 40));
|
||||
bench("Japanese (full-width) - Bun", () => Bun.wrapAnsi(japaneseText, 40));
|
||||
});
|
||||
|
||||
summary(() => {
|
||||
bench("Emoji text - npm", () => wrapAnsi(emojiText, 30));
|
||||
bench("Emoji text - Bun", () => Bun.wrapAnsi(emojiText, 30));
|
||||
});
|
||||
|
||||
// Hyperlink benchmarks
|
||||
summary(() => {
|
||||
bench("Hyperlink (OSC 8) - npm", () => wrapAnsi(hyperlinkText, 40));
|
||||
bench("Hyperlink (OSC 8) - Bun", () => Bun.wrapAnsi(hyperlinkText, 40));
|
||||
});
|
||||
|
||||
// No trim option
|
||||
summary(() => {
|
||||
bench("No trim long - npm", () => wrapAnsi(longText, 80, noTrimOpts));
|
||||
bench("No trim long - Bun", () => Bun.wrapAnsi(longText, 80, noTrimOpts));
|
||||
});
|
||||
|
||||
await run();
|
||||
150
build.zig
150
build.zig
@@ -34,6 +34,7 @@ const BunBuildOptions = struct {
|
||||
enable_asan: bool,
|
||||
enable_fuzzilli: bool,
|
||||
enable_valgrind: bool,
|
||||
enable_tinycc: bool,
|
||||
use_mimalloc: bool,
|
||||
tracy_callstack_depth: u16,
|
||||
reported_nodejs_version: Version,
|
||||
@@ -84,6 +85,7 @@ const BunBuildOptions = struct {
|
||||
opts.addOption(bool, "enable_asan", this.enable_asan);
|
||||
opts.addOption(bool, "enable_fuzzilli", this.enable_fuzzilli);
|
||||
opts.addOption(bool, "enable_valgrind", this.enable_valgrind);
|
||||
opts.addOption(bool, "enable_tinycc", this.enable_tinycc);
|
||||
opts.addOption(bool, "use_mimalloc", this.use_mimalloc);
|
||||
opts.addOption([]const u8, "reported_nodejs_version", b.fmt("{f}", .{this.reported_nodejs_version}));
|
||||
opts.addOption(bool, "zig_self_hosted_backend", this.no_llvm);
|
||||
@@ -259,6 +261,7 @@ pub fn build(b: *Build) !void {
|
||||
.enable_asan = b.option(bool, "enable_asan", "Enable asan") orelse false,
|
||||
.enable_fuzzilli = b.option(bool, "enable_fuzzilli", "Enable fuzzilli instrumentation") orelse false,
|
||||
.enable_valgrind = b.option(bool, "enable_valgrind", "Enable valgrind") orelse false,
|
||||
.enable_tinycc = b.option(bool, "enable_tinycc", "Enable TinyCC for FFI JIT compilation") orelse true,
|
||||
.use_mimalloc = b.option(bool, "use_mimalloc", "Use mimalloc as default allocator") orelse false,
|
||||
.llvm_codegen_threads = b.option(u32, "llvm_codegen_threads", "Number of threads to use for LLVM codegen") orelse 1,
|
||||
};
|
||||
@@ -342,6 +345,7 @@ pub fn build(b: *Build) !void {
|
||||
const step = b.step("check-debug", "Check for semantic analysis errors on some platforms");
|
||||
addMultiCheck(b, step, build_options, &.{
|
||||
.{ .os = .windows, .arch = .x86_64 },
|
||||
.{ .os = .windows, .arch = .aarch64 },
|
||||
.{ .os = .mac, .arch = .aarch64 },
|
||||
.{ .os = .linux, .arch = .x86_64 },
|
||||
}, &.{.Debug});
|
||||
@@ -352,6 +356,7 @@ pub fn build(b: *Build) !void {
|
||||
const step = b.step("check-all", "Check for semantic analysis errors on all supported platforms");
|
||||
addMultiCheck(b, step, build_options, &.{
|
||||
.{ .os = .windows, .arch = .x86_64 },
|
||||
.{ .os = .windows, .arch = .aarch64 },
|
||||
.{ .os = .mac, .arch = .x86_64 },
|
||||
.{ .os = .mac, .arch = .aarch64 },
|
||||
.{ .os = .linux, .arch = .x86_64 },
|
||||
@@ -366,6 +371,7 @@ pub fn build(b: *Build) !void {
|
||||
const step = b.step("check-all-debug", "Check for semantic analysis errors on all supported platforms in debug mode");
|
||||
addMultiCheck(b, step, build_options, &.{
|
||||
.{ .os = .windows, .arch = .x86_64 },
|
||||
.{ .os = .windows, .arch = .aarch64 },
|
||||
.{ .os = .mac, .arch = .x86_64 },
|
||||
.{ .os = .mac, .arch = .aarch64 },
|
||||
.{ .os = .linux, .arch = .x86_64 },
|
||||
@@ -380,12 +386,14 @@ pub fn build(b: *Build) !void {
|
||||
const step = b.step("check-windows", "Check for semantic analysis errors on Windows");
|
||||
addMultiCheck(b, step, build_options, &.{
|
||||
.{ .os = .windows, .arch = .x86_64 },
|
||||
.{ .os = .windows, .arch = .aarch64 },
|
||||
}, &.{ .Debug, .ReleaseFast });
|
||||
}
|
||||
{
|
||||
const step = b.step("check-windows-debug", "Check for semantic analysis errors on Windows");
|
||||
addMultiCheck(b, step, build_options, &.{
|
||||
.{ .os = .windows, .arch = .x86_64 },
|
||||
.{ .os = .windows, .arch = .aarch64 },
|
||||
}, &.{.Debug});
|
||||
}
|
||||
{
|
||||
@@ -422,6 +430,7 @@ pub fn build(b: *Build) !void {
|
||||
const step = b.step("translate-c", "Copy generated translated-c-headers.zig to zig-out");
|
||||
for ([_]TargetDescription{
|
||||
.{ .os = .windows, .arch = .x86_64 },
|
||||
.{ .os = .windows, .arch = .aarch64 },
|
||||
.{ .os = .mac, .arch = .x86_64 },
|
||||
.{ .os = .mac, .arch = .aarch64 },
|
||||
.{ .os = .linux, .arch = .x86_64 },
|
||||
@@ -450,6 +459,146 @@ pub fn build(b: *Build) !void {
|
||||
// const run = b.addRunArtifact(exe);
|
||||
// step.dependOn(&run.step);
|
||||
}
|
||||
|
||||
// zig build generate-grapheme-tables
|
||||
// Regenerates src/string/immutable/grapheme_tables.zig from the vendored uucode.
|
||||
// Run this when updating src/deps/uucode. Normal builds use the committed file.
|
||||
{
|
||||
const step = b.step("generate-grapheme-tables", "Regenerate grapheme property tables from vendored uucode");
|
||||
|
||||
// --- Phase 1: Build uucode tables (separate module graph, no tables dependency) ---
|
||||
const bt_config_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/config.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
const bt_types_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/types.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
bt_types_mod.addImport("config.zig", bt_config_mod);
|
||||
bt_config_mod.addImport("types.zig", bt_types_mod);
|
||||
|
||||
const bt_config_x_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/x/config.x.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
const bt_types_x_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/x/types.x.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
bt_types_x_mod.addImport("config.x.zig", bt_config_x_mod);
|
||||
bt_config_x_mod.addImport("types.x.zig", bt_types_x_mod);
|
||||
bt_config_x_mod.addImport("types.zig", bt_types_mod);
|
||||
bt_config_x_mod.addImport("config.zig", bt_config_mod);
|
||||
|
||||
const bt_build_config_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/unicode/uucode/uucode_config.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
bt_build_config_mod.addImport("types.zig", bt_types_mod);
|
||||
bt_build_config_mod.addImport("config.zig", bt_config_mod);
|
||||
bt_build_config_mod.addImport("types.x.zig", bt_types_x_mod);
|
||||
bt_build_config_mod.addImport("config.x.zig", bt_config_x_mod);
|
||||
|
||||
const build_tables_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/build/tables.zig"),
|
||||
.target = b.graph.host,
|
||||
.optimize = .Debug,
|
||||
});
|
||||
build_tables_mod.addImport("config.zig", bt_config_mod);
|
||||
build_tables_mod.addImport("build_config", bt_build_config_mod);
|
||||
build_tables_mod.addImport("types.zig", bt_types_mod);
|
||||
|
||||
const build_tables_exe = b.addExecutable(.{
|
||||
.name = "uucode_build_tables",
|
||||
.root_module = build_tables_mod,
|
||||
.use_llvm = true,
|
||||
});
|
||||
const run_build_tables = b.addRunArtifact(build_tables_exe);
|
||||
run_build_tables.setCwd(b.path("src/deps/uucode"));
|
||||
const tables_path = run_build_tables.addOutputFileArg("tables.zig");
|
||||
|
||||
// --- Phase 2: Build grapheme-gen with full uucode (separate module graph) ---
|
||||
const rt_config_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/config.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
const rt_types_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/types.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
rt_types_mod.addImport("config.zig", rt_config_mod);
|
||||
rt_config_mod.addImport("types.zig", rt_types_mod);
|
||||
|
||||
const rt_config_x_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/x/config.x.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
const rt_types_x_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/x/types.x.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
rt_types_x_mod.addImport("config.x.zig", rt_config_x_mod);
|
||||
rt_config_x_mod.addImport("types.x.zig", rt_types_x_mod);
|
||||
rt_config_x_mod.addImport("types.zig", rt_types_mod);
|
||||
rt_config_x_mod.addImport("config.zig", rt_config_mod);
|
||||
|
||||
const rt_build_config_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/unicode/uucode/uucode_config.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
rt_build_config_mod.addImport("types.zig", rt_types_mod);
|
||||
rt_build_config_mod.addImport("config.zig", rt_config_mod);
|
||||
rt_build_config_mod.addImport("types.x.zig", rt_types_x_mod);
|
||||
rt_build_config_mod.addImport("config.x.zig", rt_config_x_mod);
|
||||
|
||||
const rt_tables_mod = b.createModule(.{
|
||||
.root_source_file = tables_path,
|
||||
.target = b.graph.host,
|
||||
});
|
||||
rt_tables_mod.addImport("types.zig", rt_types_mod);
|
||||
rt_tables_mod.addImport("types.x.zig", rt_types_x_mod);
|
||||
rt_tables_mod.addImport("config.zig", rt_config_mod);
|
||||
rt_tables_mod.addImport("build_config", rt_build_config_mod);
|
||||
|
||||
const rt_get_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/get.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
rt_get_mod.addImport("types.zig", rt_types_mod);
|
||||
rt_get_mod.addImport("tables", rt_tables_mod);
|
||||
rt_types_mod.addImport("get.zig", rt_get_mod);
|
||||
|
||||
const uucode_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/root.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
uucode_mod.addImport("types.zig", rt_types_mod);
|
||||
uucode_mod.addImport("config.zig", rt_config_mod);
|
||||
uucode_mod.addImport("types.x.zig", rt_types_x_mod);
|
||||
uucode_mod.addImport("tables", rt_tables_mod);
|
||||
uucode_mod.addImport("get.zig", rt_get_mod);
|
||||
|
||||
// grapheme_gen executable
|
||||
const gen_exe = b.addExecutable(.{
|
||||
.name = "grapheme-gen",
|
||||
.root_module = b.createModule(.{
|
||||
.root_source_file = b.path("src/unicode/uucode/grapheme_gen.zig"),
|
||||
.target = b.graph.host,
|
||||
.optimize = .Debug,
|
||||
.imports = &.{
|
||||
.{ .name = "uucode", .module = uucode_mod },
|
||||
},
|
||||
}),
|
||||
.use_llvm = true,
|
||||
});
|
||||
|
||||
const run_gen = b.addRunArtifact(gen_exe);
|
||||
const gen_output = run_gen.captureStdOut();
|
||||
|
||||
const install = b.addInstallFile(gen_output, "../src/string/immutable/grapheme_tables.zig");
|
||||
step.dependOn(&install.step);
|
||||
}
|
||||
}
|
||||
|
||||
const TargetDescription = struct {
|
||||
@@ -493,6 +642,7 @@ fn addMultiCheck(
|
||||
.no_llvm = root_build_options.no_llvm,
|
||||
.enable_asan = root_build_options.enable_asan,
|
||||
.enable_valgrind = root_build_options.enable_valgrind,
|
||||
.enable_tinycc = root_build_options.enable_tinycc,
|
||||
.enable_fuzzilli = root_build_options.enable_fuzzilli,
|
||||
.use_mimalloc = root_build_options.use_mimalloc,
|
||||
.override_no_export_cpp_apis = root_build_options.override_no_export_cpp_apis,
|
||||
|
||||
@@ -21,6 +21,10 @@ endforeach()
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm|ARM|arm64|ARM64|aarch64|AARCH64")
|
||||
if(APPLE)
|
||||
register_compiler_flags(-mcpu=apple-m1)
|
||||
elseif(WIN32)
|
||||
# Windows ARM64: use /clang: prefix for clang-cl, skip for MSVC cl.exe subprojects
|
||||
# These flags are only understood by clang-cl, not MSVC cl.exe
|
||||
register_compiler_flags(/clang:-march=armv8-a+crc /clang:-mtune=ampere1)
|
||||
else()
|
||||
register_compiler_flags(-march=armv8-a+crc -mtune=ampere1)
|
||||
endif()
|
||||
@@ -242,10 +246,17 @@ if(UNIX)
|
||||
)
|
||||
endif()
|
||||
|
||||
register_compiler_flags(
|
||||
DESCRIPTION "Set C/C++ error limit"
|
||||
-ferror-limit=${ERROR_LIMIT}
|
||||
)
|
||||
if(WIN32)
|
||||
register_compiler_flags(
|
||||
DESCRIPTION "Set C/C++ error limit"
|
||||
/clang:-ferror-limit=${ERROR_LIMIT}
|
||||
)
|
||||
else()
|
||||
register_compiler_flags(
|
||||
DESCRIPTION "Set C/C++ error limit"
|
||||
-ferror-limit=${ERROR_LIMIT}
|
||||
)
|
||||
endif()
|
||||
|
||||
# --- LTO ---
|
||||
if(ENABLE_LTO)
|
||||
|
||||
@@ -106,9 +106,9 @@ else()
|
||||
endif()
|
||||
|
||||
if(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "arm64|ARM64|aarch64|AARCH64")
|
||||
set(HOST_OS "aarch64")
|
||||
set(HOST_ARCH "aarch64")
|
||||
elseif(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "x86_64|X86_64|x64|X64|amd64|AMD64")
|
||||
set(HOST_OS "x64")
|
||||
set(HOST_ARCH "x64")
|
||||
else()
|
||||
unsupported(CMAKE_HOST_SYSTEM_PROCESSOR)
|
||||
endif()
|
||||
@@ -433,6 +433,33 @@ function(register_command)
|
||||
list(APPEND CMD_EFFECTIVE_DEPENDS ${CMD_EXECUTABLE})
|
||||
endif()
|
||||
|
||||
# SKIP_CODEGEN: Skip commands that use BUN_EXECUTABLE if all outputs exist
|
||||
# This is used for Windows ARM64 builds where x64 bun crashes under emulation
|
||||
if(SKIP_CODEGEN AND CMD_EXECUTABLE STREQUAL "${BUN_EXECUTABLE}")
|
||||
set(ALL_OUTPUTS_EXIST TRUE)
|
||||
foreach(output ${CMD_OUTPUTS})
|
||||
if(NOT EXISTS ${output})
|
||||
set(ALL_OUTPUTS_EXIST FALSE)
|
||||
break()
|
||||
endif()
|
||||
endforeach()
|
||||
if(ALL_OUTPUTS_EXIST AND CMD_OUTPUTS)
|
||||
message(STATUS "SKIP_CODEGEN: Skipping ${CMD_TARGET} (outputs exist)")
|
||||
if(CMD_TARGET)
|
||||
add_custom_target(${CMD_TARGET})
|
||||
endif()
|
||||
return()
|
||||
elseif(NOT CMD_OUTPUTS)
|
||||
message(STATUS "SKIP_CODEGEN: Skipping ${CMD_TARGET} (no outputs)")
|
||||
if(CMD_TARGET)
|
||||
add_custom_target(${CMD_TARGET})
|
||||
endif()
|
||||
return()
|
||||
else()
|
||||
message(FATAL_ERROR "SKIP_CODEGEN: Cannot skip ${CMD_TARGET} - missing outputs. Run codegen on x64 first.")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
foreach(target ${CMD_TARGETS})
|
||||
if(target MATCHES "/|\\\\")
|
||||
message(FATAL_ERROR "register_command: TARGETS contains \"${target}\", if it's a path add it to SOURCES instead")
|
||||
@@ -650,6 +677,7 @@ function(register_bun_install)
|
||||
${NPM_CWD}
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
${BUN_FLAGS}
|
||||
install
|
||||
--frozen-lockfile
|
||||
SOURCES
|
||||
@@ -757,7 +785,7 @@ function(register_cmake_command)
|
||||
set(MAKE_EFFECTIVE_ARGS -B${MAKE_BUILD_PATH} ${CMAKE_ARGS})
|
||||
|
||||
set(setFlags GENERATOR BUILD_TYPE)
|
||||
set(appendFlags C_FLAGS CXX_FLAGS LINKER_FLAGS)
|
||||
set(appendFlags C_FLAGS CXX_FLAGS LINKER_FLAGS STATIC_LINKER_FLAGS EXE_LINKER_FLAGS SHARED_LINKER_FLAGS MODULE_LINKER_FLAGS)
|
||||
set(specialFlags POSITION_INDEPENDENT_CODE)
|
||||
set(flags ${setFlags} ${appendFlags} ${specialFlags})
|
||||
|
||||
@@ -803,6 +831,14 @@ function(register_cmake_command)
|
||||
list(APPEND MAKE_EFFECTIVE_ARGS "-DCMAKE_${flag}=${MAKE_${flag}}")
|
||||
endforeach()
|
||||
|
||||
# Workaround for CMake 4.1.0 bug: Force correct machine type for Windows ARM64
|
||||
# Use toolchain file and set CMP0197 policy to prevent duplicate /machine: flags
|
||||
if(WIN32 AND CMAKE_SYSTEM_PROCESSOR MATCHES "ARM64|aarch64|AARCH64")
|
||||
list(APPEND MAKE_EFFECTIVE_ARGS "-DCMAKE_TOOLCHAIN_FILE=${CWD}/cmake/toolchains/windows-aarch64.cmake")
|
||||
list(APPEND MAKE_EFFECTIVE_ARGS "-DCMAKE_POLICY_DEFAULT_CMP0197=NEW")
|
||||
list(APPEND MAKE_EFFECTIVE_ARGS "-DCMAKE_PROJECT_INCLUDE=${CWD}/cmake/arm64-static-lib-fix.cmake")
|
||||
endif()
|
||||
|
||||
if(DEFINED FRESH)
|
||||
list(APPEND MAKE_EFFECTIVE_ARGS --fresh)
|
||||
endif()
|
||||
|
||||
@@ -4,6 +4,7 @@ endif()
|
||||
|
||||
optionx(BUN_LINK_ONLY BOOL "If only the linking step should be built" DEFAULT OFF)
|
||||
optionx(BUN_CPP_ONLY BOOL "If only the C++ part of Bun should be built" DEFAULT OFF)
|
||||
optionx(SKIP_CODEGEN BOOL "Skip JavaScript codegen (for Windows ARM64 debug)" DEFAULT OFF)
|
||||
|
||||
optionx(BUILDKITE BOOL "If Buildkite is enabled" DEFAULT OFF)
|
||||
optionx(GITHUB_ACTIONS BOOL "If GitHub Actions is enabled" DEFAULT OFF)
|
||||
@@ -49,7 +50,7 @@ else()
|
||||
message(FATAL_ERROR "Unsupported operating system: ${CMAKE_SYSTEM_NAME}")
|
||||
endif()
|
||||
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|arm64|arm")
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|arm64|ARM64")
|
||||
setx(ARCH "aarch64")
|
||||
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "amd64|x86_64|x64|AMD64")
|
||||
setx(ARCH "x64")
|
||||
@@ -57,6 +58,18 @@ else()
|
||||
message(FATAL_ERROR "Unsupported architecture: ${CMAKE_SYSTEM_PROCESSOR}")
|
||||
endif()
|
||||
|
||||
# CMake 4.0+ policy CMP0197 controls how MSVC machine type flags are handled
|
||||
# Setting to NEW prevents duplicate /machine: flags being added to linker commands
|
||||
if(WIN32 AND ARCH STREQUAL "aarch64")
|
||||
set(CMAKE_POLICY_DEFAULT_CMP0197 NEW)
|
||||
set(CMAKE_MSVC_CMP0197 NEW)
|
||||
# Set linker flags for exe/shared linking
|
||||
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} /machine:ARM64")
|
||||
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} /machine:ARM64")
|
||||
set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} /machine:ARM64")
|
||||
set(CMAKE_STATIC_LINKER_FLAGS "${CMAKE_STATIC_LINKER_FLAGS} /machine:ARM64")
|
||||
endif()
|
||||
|
||||
# Windows Code Signing Option
|
||||
if(WIN32)
|
||||
optionx(ENABLE_WINDOWS_CODESIGNING BOOL "Enable Windows code signing with DigiCert KeyLocker" DEFAULT OFF)
|
||||
@@ -199,6 +212,16 @@ optionx(USE_WEBKIT_ICU BOOL "Use the ICU libraries from WebKit" DEFAULT ${DEFAUL
|
||||
|
||||
optionx(ERROR_LIMIT STRING "Maximum number of errors to show when compiling C++ code" DEFAULT "100")
|
||||
|
||||
# TinyCC is used for FFI JIT compilation
|
||||
# Disable on Windows ARM64 where it's not yet supported
|
||||
if(WIN32 AND ARCH STREQUAL "aarch64")
|
||||
set(DEFAULT_ENABLE_TINYCC OFF)
|
||||
else()
|
||||
set(DEFAULT_ENABLE_TINYCC ON)
|
||||
endif()
|
||||
|
||||
optionx(ENABLE_TINYCC BOOL "Enable TinyCC for FFI JIT compilation" DEFAULT ${DEFAULT_ENABLE_TINYCC})
|
||||
|
||||
# This is not an `option` because setting this variable to OFF is experimental
|
||||
# and unsupported. This replaces the `use_mimalloc` variable previously in
|
||||
# bun.zig, and enables C++ code to also be aware of the option.
|
||||
|
||||
@@ -13,10 +13,7 @@
|
||||
},
|
||||
{
|
||||
"output": "JavaScriptSources.txt",
|
||||
"paths": [
|
||||
"src/js/**/*.{js,ts}",
|
||||
"src/install/PackageManager/scanner-entry.ts"
|
||||
]
|
||||
"paths": ["src/js/**/*.{js,ts}", "src/install/PackageManager/scanner-entry.ts"]
|
||||
},
|
||||
{
|
||||
"output": "JavaScriptCodegenSources.txt",
|
||||
|
||||
8
cmake/arm64-static-lib-fix.cmake
Normal file
8
cmake/arm64-static-lib-fix.cmake
Normal file
@@ -0,0 +1,8 @@
|
||||
# This file is included after project() via CMAKE_PROJECT_INCLUDE
|
||||
# It fixes the static library creation command to use ARM64 machine type
|
||||
|
||||
if(WIN32 AND CMAKE_SYSTEM_PROCESSOR STREQUAL \"aarch64\")
|
||||
# Override the static library creation commands to avoid spurious /machine:x64 flags
|
||||
set(CMAKE_C_CREATE_STATIC_LIBRARY \"<CMAKE_AR> /nologo /machine:ARM64 /out:<TARGET> <OBJECTS>\" CACHE STRING \"\" FORCE)
|
||||
set(CMAKE_CXX_CREATE_STATIC_LIBRARY \"<CMAKE_AR> /nologo /machine:ARM64 /out:<TARGET> <OBJECTS>\" CACHE STRING \"\" FORCE)
|
||||
endif()
|
||||
@@ -21,7 +21,12 @@ if(NOT DEFINED CMAKE_HOST_SYSTEM_PROCESSOR)
|
||||
endif()
|
||||
|
||||
if(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "arm64|ARM64|aarch64|AARCH64")
|
||||
set(ZIG_ARCH "aarch64")
|
||||
# Windows ARM64 can run x86_64 via emulation, and no native ARM64 Zig build exists yet
|
||||
if(CMAKE_HOST_WIN32)
|
||||
set(ZIG_ARCH "x86_64")
|
||||
else()
|
||||
set(ZIG_ARCH "aarch64")
|
||||
endif()
|
||||
elseif(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "amd64|AMD64|x86_64|X86_64|x64|X64")
|
||||
set(ZIG_ARCH "x86_64")
|
||||
else()
|
||||
|
||||
34
cmake/scripts/llvm-lib-arm64.bat
Normal file
34
cmake/scripts/llvm-lib-arm64.bat
Normal file
@@ -0,0 +1,34 @@
|
||||
@echo off
|
||||
setlocal enabledelayedexpansion
|
||||
|
||||
REM Wrapper for llvm-lib that strips conflicting /machine:x64 flag for ARM64 builds
|
||||
REM This is a workaround for CMake 4.1.0 bug
|
||||
|
||||
REM Find llvm-lib.exe - check LLVM_LIB env var, then PATH, then known locations
|
||||
if defined LLVM_LIB (
|
||||
set "LLVM_LIB_EXE=!LLVM_LIB!"
|
||||
) else (
|
||||
where llvm-lib.exe >nul 2>&1
|
||||
if !ERRORLEVEL! equ 0 (
|
||||
for /f "delims=" %%i in ('where llvm-lib.exe') do set "LLVM_LIB_EXE=%%i"
|
||||
) else if exist "C:\Program Files\LLVM\bin\llvm-lib.exe" (
|
||||
set "LLVM_LIB_EXE=C:\Program Files\LLVM\bin\llvm-lib.exe"
|
||||
) else (
|
||||
echo Error: Cannot find llvm-lib.exe. Set LLVM_LIB environment variable or add LLVM to PATH.
|
||||
exit /b 1
|
||||
)
|
||||
)
|
||||
|
||||
set "ARGS="
|
||||
|
||||
for %%a in (%*) do (
|
||||
set "ARG=%%a"
|
||||
if /i "!ARG!"=="/machine:x64" (
|
||||
REM Skip this argument
|
||||
) else (
|
||||
set "ARGS=!ARGS! %%a"
|
||||
)
|
||||
)
|
||||
|
||||
"!LLVM_LIB_EXE!" %ARGS%
|
||||
exit /b %ERRORLEVEL%
|
||||
18
cmake/scripts/llvm-lib-arm64.ps1
Normal file
18
cmake/scripts/llvm-lib-arm64.ps1
Normal file
@@ -0,0 +1,18 @@
|
||||
# Wrapper for llvm-lib that strips conflicting /machine:x64 flag for ARM64 builds
|
||||
# This is a workaround for CMake 4.1.0 bug where both /machine:ARM64 and /machine:x64 are added
|
||||
|
||||
# Find llvm-lib.exe - check LLVM_LIB env var, then PATH, then known locations
|
||||
if ($env:LLVM_LIB) {
|
||||
$llvmLib = $env:LLVM_LIB
|
||||
} elseif (Get-Command llvm-lib.exe -ErrorAction SilentlyContinue) {
|
||||
$llvmLib = (Get-Command llvm-lib.exe).Source
|
||||
} elseif (Test-Path "C:\Program Files\LLVM\bin\llvm-lib.exe") {
|
||||
$llvmLib = "C:\Program Files\LLVM\bin\llvm-lib.exe"
|
||||
} else {
|
||||
Write-Error "Cannot find llvm-lib.exe. Set LLVM_LIB environment variable or add LLVM to PATH."
|
||||
exit 1
|
||||
}
|
||||
|
||||
$filteredArgs = $args | Where-Object { $_ -ne "/machine:x64" }
|
||||
& $llvmLib @filteredArgs
|
||||
exit $LASTEXITCODE
|
||||
34
cmake/scripts/llvm-lib-wrapper.bat
Normal file
34
cmake/scripts/llvm-lib-wrapper.bat
Normal file
@@ -0,0 +1,34 @@
|
||||
@echo off
|
||||
setlocal enabledelayedexpansion
|
||||
|
||||
REM Wrapper for llvm-lib that strips conflicting /machine:x64 flag for ARM64 builds
|
||||
REM This is a workaround for CMake 4.1.0 bug
|
||||
|
||||
REM Find llvm-lib.exe - check LLVM_LIB env var, then PATH, then known locations
|
||||
if defined LLVM_LIB (
|
||||
set "LLVM_LIB_EXE=!LLVM_LIB!"
|
||||
) else (
|
||||
where llvm-lib.exe >nul 2>&1
|
||||
if !ERRORLEVEL! equ 0 (
|
||||
for /f "delims=" %%i in ('where llvm-lib.exe') do set "LLVM_LIB_EXE=%%i"
|
||||
) else if exist "C:\Program Files\LLVM\bin\llvm-lib.exe" (
|
||||
set "LLVM_LIB_EXE=C:\Program Files\LLVM\bin\llvm-lib.exe"
|
||||
) else (
|
||||
echo Error: Cannot find llvm-lib.exe. Set LLVM_LIB environment variable or add LLVM to PATH.
|
||||
exit /b 1
|
||||
)
|
||||
)
|
||||
|
||||
set NEWARGS=
|
||||
|
||||
for %%a in (%*) do (
|
||||
set "ARG=%%a"
|
||||
if /i "!ARG!"=="/machine:x64" (
|
||||
REM Skip /machine:x64 argument
|
||||
) else (
|
||||
set "NEWARGS=!NEWARGS! %%a"
|
||||
)
|
||||
)
|
||||
|
||||
"!LLVM_LIB_EXE!" %NEWARGS%
|
||||
exit /b %ERRORLEVEL%
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
oven-sh/boringssl
|
||||
COMMIT
|
||||
f1ffd9e83d4f5c28a9c70d73f9a4e6fcf310062f
|
||||
4f4f5ef8ebc6e23cbf393428f0ab1b526773f7ac
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
|
||||
@@ -57,13 +57,17 @@ set(BUN_DEPENDENCIES
|
||||
LolHtml
|
||||
Lshpack
|
||||
Mimalloc
|
||||
TinyCC
|
||||
Zlib
|
||||
LibArchive # must be loaded after zlib
|
||||
HdrHistogram # must be loaded after zlib
|
||||
Zstd
|
||||
)
|
||||
|
||||
# TinyCC is optional - disabled on Windows ARM64 where it's not supported
|
||||
if(ENABLE_TINYCC)
|
||||
list(APPEND BUN_DEPENDENCIES TinyCC)
|
||||
endif()
|
||||
|
||||
include(CloneZstd)
|
||||
|
||||
# --- Codegen ---
|
||||
@@ -185,7 +189,7 @@ register_command(
|
||||
CWD
|
||||
${BUN_NODE_FALLBACKS_SOURCE}
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE} run build-fallbacks
|
||||
${BUN_EXECUTABLE} ${BUN_FLAGS} run build-fallbacks
|
||||
${BUN_NODE_FALLBACKS_OUTPUT}
|
||||
${BUN_NODE_FALLBACKS_SOURCES}
|
||||
SOURCES
|
||||
@@ -206,7 +210,7 @@ register_command(
|
||||
CWD
|
||||
${BUN_NODE_FALLBACKS_SOURCE}
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE} build
|
||||
${BUN_EXECUTABLE} ${BUN_FLAGS} build
|
||||
${BUN_NODE_FALLBACKS_SOURCE}/node_modules/react-refresh/cjs/react-refresh-runtime.development.js
|
||||
--outfile=${BUN_REACT_REFRESH_OUTPUT}
|
||||
--target=browser
|
||||
@@ -243,6 +247,7 @@ register_command(
|
||||
"Generating ErrorCode.{zig,h}"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
${BUN_FLAGS}
|
||||
run
|
||||
${BUN_ERROR_CODE_SCRIPT}
|
||||
${CODEGEN_PATH}
|
||||
@@ -278,6 +283,7 @@ register_command(
|
||||
"Generating ZigGeneratedClasses.{zig,cpp,h}"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
${BUN_FLAGS}
|
||||
run
|
||||
${BUN_ZIG_GENERATED_CLASSES_SCRIPT}
|
||||
${BUN_ZIG_GENERATED_CLASSES_SOURCES}
|
||||
@@ -328,6 +334,7 @@ register_command(
|
||||
"Generating C++ --> Zig bindings"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
${BUN_FLAGS}
|
||||
${CWD}/src/codegen/cppbind.ts
|
||||
${CWD}/src
|
||||
${CODEGEN_PATH}
|
||||
@@ -345,6 +352,7 @@ register_command(
|
||||
"Generating CI info"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
${BUN_FLAGS}
|
||||
${CWD}/src/codegen/ci_info.ts
|
||||
${CODEGEN_PATH}/ci_info.zig
|
||||
SOURCES
|
||||
@@ -353,24 +361,35 @@ register_command(
|
||||
${BUN_CI_INFO_OUTPUTS}
|
||||
)
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
bun-js-modules
|
||||
COMMENT
|
||||
"Generating JavaScript modules"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
run
|
||||
if(SKIP_CODEGEN)
|
||||
# Skip JavaScript codegen - useful for Windows ARM64 debug builds where bun crashes
|
||||
message(STATUS "SKIP_CODEGEN is ON - skipping bun-js-modules codegen")
|
||||
foreach(output ${BUN_JAVASCRIPT_OUTPUTS})
|
||||
if(NOT EXISTS ${output})
|
||||
message(FATAL_ERROR "SKIP_CODEGEN is ON but ${output} does not exist. Run codegen manually first.")
|
||||
endif()
|
||||
endforeach()
|
||||
else()
|
||||
register_command(
|
||||
TARGET
|
||||
bun-js-modules
|
||||
COMMENT
|
||||
"Generating JavaScript modules"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
${BUN_FLAGS}
|
||||
run
|
||||
${BUN_JAVASCRIPT_CODEGEN_SCRIPT}
|
||||
--debug=${DEBUG}
|
||||
${BUILD_PATH}
|
||||
SOURCES
|
||||
${BUN_JAVASCRIPT_SOURCES}
|
||||
${BUN_JAVASCRIPT_CODEGEN_SOURCES}
|
||||
${BUN_JAVASCRIPT_CODEGEN_SCRIPT}
|
||||
--debug=${DEBUG}
|
||||
${BUILD_PATH}
|
||||
SOURCES
|
||||
${BUN_JAVASCRIPT_SOURCES}
|
||||
${BUN_JAVASCRIPT_CODEGEN_SOURCES}
|
||||
${BUN_JAVASCRIPT_CODEGEN_SCRIPT}
|
||||
OUTPUTS
|
||||
${BUN_JAVASCRIPT_OUTPUTS}
|
||||
)
|
||||
OUTPUTS
|
||||
${BUN_JAVASCRIPT_OUTPUTS}
|
||||
)
|
||||
endif()
|
||||
|
||||
set(BUN_BAKE_RUNTIME_CODEGEN_SCRIPT ${CWD}/src/codegen/bake-codegen.ts)
|
||||
|
||||
@@ -392,6 +411,7 @@ register_command(
|
||||
"Bundling Bake Runtime"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
${BUN_FLAGS}
|
||||
run
|
||||
${BUN_BAKE_RUNTIME_CODEGEN_SCRIPT}
|
||||
--debug=${DEBUG}
|
||||
@@ -415,7 +435,7 @@ string(REPLACE ";" "," BUN_BINDGENV2_SOURCES_COMMA_SEPARATED
|
||||
"${BUN_BINDGENV2_SOURCES}")
|
||||
|
||||
execute_process(
|
||||
COMMAND ${BUN_EXECUTABLE} run ${BUN_BINDGENV2_SCRIPT}
|
||||
COMMAND ${BUN_EXECUTABLE} ${BUN_FLAGS} run ${BUN_BINDGENV2_SCRIPT}
|
||||
--command=list-outputs
|
||||
--sources=${BUN_BINDGENV2_SOURCES_COMMA_SEPARATED}
|
||||
--codegen-path=${CODEGEN_PATH}
|
||||
@@ -438,7 +458,7 @@ register_command(
|
||||
COMMENT
|
||||
"Generating bindings (v2)"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE} run ${BUN_BINDGENV2_SCRIPT}
|
||||
${BUN_EXECUTABLE} ${BUN_FLAGS} run ${BUN_BINDGENV2_SCRIPT}
|
||||
--command=generate
|
||||
--codegen-path=${CODEGEN_PATH}
|
||||
--sources=${BUN_BINDGENV2_SOURCES_COMMA_SEPARATED}
|
||||
@@ -469,6 +489,7 @@ register_command(
|
||||
"Processing \".bind.ts\" files"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
${BUN_FLAGS}
|
||||
run
|
||||
${BUN_BINDGEN_SCRIPT}
|
||||
--debug=${DEBUG}
|
||||
@@ -501,6 +522,7 @@ register_command(
|
||||
"Generating JSSink.{cpp,h}"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
${BUN_FLAGS}
|
||||
run
|
||||
${BUN_JS_SINK_SCRIPT}
|
||||
${CODEGEN_PATH}
|
||||
@@ -573,6 +595,7 @@ foreach(i RANGE 0 ${BUN_OBJECT_LUT_SOURCES_MAX_INDEX})
|
||||
${BUN_OBJECT_LUT_SOURCE}
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
${BUN_FLAGS}
|
||||
run
|
||||
${BUN_OBJECT_LUT_SCRIPT}
|
||||
${BUN_OBJECT_LUT_SOURCE}
|
||||
@@ -656,6 +679,10 @@ endif()
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm|ARM|arm64|ARM64|aarch64|AARCH64")
|
||||
if(APPLE)
|
||||
set(ZIG_CPU "apple_m1")
|
||||
elseif(WIN32)
|
||||
# Windows ARM64: use a specific CPU with NEON support
|
||||
# Zig running under x64 emulation would detect wrong CPU with "native"
|
||||
set(ZIG_CPU "cortex_a76")
|
||||
else()
|
||||
set(ZIG_CPU "native")
|
||||
endif()
|
||||
@@ -694,6 +721,7 @@ register_command(
|
||||
-Denable_asan=$<IF:$<BOOL:${ENABLE_ZIG_ASAN}>,true,false>
|
||||
-Denable_fuzzilli=$<IF:$<BOOL:${ENABLE_FUZZILLI}>,true,false>
|
||||
-Denable_valgrind=$<IF:$<BOOL:${ENABLE_VALGRIND}>,true,false>
|
||||
-Denable_tinycc=$<IF:$<BOOL:${ENABLE_TINYCC}>,true,false>
|
||||
-Duse_mimalloc=$<IF:$<BOOL:${USE_MIMALLOC_AS_DEFAULT_ALLOCATOR}>,true,false>
|
||||
-Dllvm_codegen_threads=${LLVM_ZIG_CODEGEN_THREADS}
|
||||
-Dversion=${VERSION}
|
||||
@@ -911,7 +939,7 @@ if(WIN32)
|
||||
endif()
|
||||
|
||||
if(USE_MIMALLOC_AS_DEFAULT_ALLOCATOR)
|
||||
target_compile_definitions(${bun} PRIVATE USE_MIMALLOC=1)
|
||||
target_compile_definitions(${bun} PRIVATE USE_BUN_MIMALLOC=1)
|
||||
endif()
|
||||
|
||||
target_compile_definitions(${bun} PRIVATE
|
||||
@@ -1211,7 +1239,7 @@ if(BUN_LINK_ONLY)
|
||||
WEBKIT_DOWNLOAD_URL=${WEBKIT_DOWNLOAD_URL}
|
||||
WEBKIT_VERSION=${WEBKIT_VERSION}
|
||||
ZIG_COMMIT=${ZIG_COMMIT}
|
||||
${BUN_EXECUTABLE} ${CWD}/scripts/create-link-metadata.mjs ${BUILD_PATH} ${bun}
|
||||
${BUN_EXECUTABLE} ${BUN_FLAGS} ${CWD}/scripts/create-link-metadata.mjs ${BUILD_PATH} ${bun}
|
||||
SOURCES
|
||||
${BUN_ZIG_OUTPUT}
|
||||
${BUN_CPP_OUTPUT}
|
||||
@@ -1225,6 +1253,7 @@ if(WIN32)
|
||||
target_link_libraries(${bun} PRIVATE
|
||||
${WEBKIT_LIB_PATH}/WTF.lib
|
||||
${WEBKIT_LIB_PATH}/JavaScriptCore.lib
|
||||
${WEBKIT_LIB_PATH}/bmalloc.lib
|
||||
${WEBKIT_LIB_PATH}/sicudtd.lib
|
||||
${WEBKIT_LIB_PATH}/sicuind.lib
|
||||
${WEBKIT_LIB_PATH}/sicuucd.lib
|
||||
@@ -1233,6 +1262,7 @@ if(WIN32)
|
||||
target_link_libraries(${bun} PRIVATE
|
||||
${WEBKIT_LIB_PATH}/WTF.lib
|
||||
${WEBKIT_LIB_PATH}/JavaScriptCore.lib
|
||||
${WEBKIT_LIB_PATH}/bmalloc.lib
|
||||
${WEBKIT_LIB_PATH}/sicudt.lib
|
||||
${WEBKIT_LIB_PATH}/sicuin.lib
|
||||
${WEBKIT_LIB_PATH}/sicuuc.lib
|
||||
@@ -1243,13 +1273,18 @@ else()
|
||||
${WEBKIT_LIB_PATH}/libWTF.a
|
||||
${WEBKIT_LIB_PATH}/libJavaScriptCore.a
|
||||
)
|
||||
if(NOT APPLE OR EXISTS ${WEBKIT_LIB_PATH}/libbmalloc.a)
|
||||
if(WEBKIT_LOCAL OR NOT APPLE OR EXISTS ${WEBKIT_LIB_PATH}/libbmalloc.a)
|
||||
target_link_libraries(${bun} PRIVATE ${WEBKIT_LIB_PATH}/libbmalloc.a)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
include_directories(${WEBKIT_INCLUDE_PATH})
|
||||
|
||||
# When building with a local WebKit, ensure JSC is built before compiling Bun's C++ sources.
|
||||
if(WEBKIT_LOCAL AND TARGET jsc)
|
||||
add_dependencies(${bun} jsc)
|
||||
endif()
|
||||
|
||||
# Include the generated dependency versions header
|
||||
include_directories(${CMAKE_BINARY_DIR})
|
||||
|
||||
@@ -1294,9 +1329,14 @@ if(LINUX)
|
||||
target_link_libraries(${bun} PUBLIC libatomic.so)
|
||||
endif()
|
||||
|
||||
target_link_libraries(${bun} PRIVATE ${WEBKIT_LIB_PATH}/libicudata.a)
|
||||
target_link_libraries(${bun} PRIVATE ${WEBKIT_LIB_PATH}/libicui18n.a)
|
||||
target_link_libraries(${bun} PRIVATE ${WEBKIT_LIB_PATH}/libicuuc.a)
|
||||
if(WEBKIT_LOCAL)
|
||||
find_package(ICU REQUIRED COMPONENTS data i18n uc)
|
||||
target_link_libraries(${bun} PRIVATE ICU::data ICU::i18n ICU::uc)
|
||||
else()
|
||||
target_link_libraries(${bun} PRIVATE ${WEBKIT_LIB_PATH}/libicudata.a)
|
||||
target_link_libraries(${bun} PRIVATE ${WEBKIT_LIB_PATH}/libicui18n.a)
|
||||
target_link_libraries(${bun} PRIVATE ${WEBKIT_LIB_PATH}/libicuuc.a)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(WIN32)
|
||||
@@ -1310,6 +1350,9 @@ if(WIN32)
|
||||
wsock32 # ws2_32 required by TransmitFile aka sendfile on windows
|
||||
delayimp.lib
|
||||
)
|
||||
# Required for static ICU linkage - without this, ICU headers expect DLL linkage
|
||||
# which causes ABI mismatch and crashes (STATUS_STACK_BUFFER_OVERRUN)
|
||||
target_compile_definitions(${bun} PRIVATE U_STATIC_IMPLEMENTATION)
|
||||
endif()
|
||||
|
||||
# --- Packaging ---
|
||||
|
||||
@@ -20,6 +20,15 @@ set(HIGHWAY_CMAKE_ARGS
|
||||
-DHWY_ENABLE_INSTALL=OFF
|
||||
)
|
||||
|
||||
# On Windows ARM64 with clang-cl, the __ARM_NEON macro isn't defined by default
|
||||
# but NEON intrinsics are supported. Define it so Highway can detect NEON support.
|
||||
if(WIN32 AND CMAKE_SYSTEM_PROCESSOR MATCHES "ARM64|aarch64|AARCH64")
|
||||
list(APPEND HIGHWAY_CMAKE_ARGS
|
||||
-DCMAKE_C_FLAGS=-D__ARM_NEON=1
|
||||
-DCMAKE_CXX_FLAGS=-D__ARM_NEON=1
|
||||
)
|
||||
endif()
|
||||
|
||||
register_cmake_command(
|
||||
TARGET
|
||||
highway
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
cloudflare/lol-html
|
||||
COMMIT
|
||||
d64457d9ff0143deef025d5df7e8586092b9afb7
|
||||
e9e16dca48dd4a8ffbc77642bc4be60407585f11
|
||||
)
|
||||
|
||||
set(LOLHTML_CWD ${VENDOR_PATH}/lolhtml/c-api)
|
||||
@@ -33,6 +33,37 @@ if (NOT WIN32)
|
||||
set(RUSTFLAGS "-Cpanic=abort-Cdebuginfo=0-Cforce-unwind-tables=no-Copt-level=s")
|
||||
endif()
|
||||
|
||||
# On Windows, ensure MSVC link.exe is used instead of Git's link.exe
|
||||
set(LOLHTML_ENV
|
||||
CARGO_TERM_COLOR=always
|
||||
CARGO_TERM_VERBOSE=true
|
||||
CARGO_TERM_DIAGNOSTIC=true
|
||||
CARGO_ENCODED_RUSTFLAGS=${RUSTFLAGS}
|
||||
CARGO_HOME=${CARGO_HOME}
|
||||
RUSTUP_HOME=${RUSTUP_HOME}
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
# On Windows, tell Rust to use MSVC link.exe directly via the target-specific linker env var.
|
||||
# This avoids Git's /usr/bin/link being found first in PATH.
|
||||
# Find the MSVC link.exe from Visual Studio installation
|
||||
file(GLOB MSVC_VERSIONS "C:/Program Files/Microsoft Visual Studio/2022/*/VC/Tools/MSVC/*")
|
||||
if(MSVC_VERSIONS)
|
||||
list(GET MSVC_VERSIONS -1 MSVC_LATEST) # Get the latest version
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "ARM64|aarch64")
|
||||
set(MSVC_LINK_PATH "${MSVC_LATEST}/bin/HostARM64/arm64/link.exe")
|
||||
set(CARGO_LINKER_VAR "CARGO_TARGET_AARCH64_PC_WINDOWS_MSVC_LINKER")
|
||||
else()
|
||||
set(MSVC_LINK_PATH "${MSVC_LATEST}/bin/Hostx64/x64/link.exe")
|
||||
set(CARGO_LINKER_VAR "CARGO_TARGET_X86_64_PC_WINDOWS_MSVC_LINKER")
|
||||
endif()
|
||||
if(EXISTS "${MSVC_LINK_PATH}")
|
||||
list(APPEND LOLHTML_ENV "${CARGO_LINKER_VAR}=${MSVC_LINK_PATH}")
|
||||
message(STATUS "lolhtml: Using MSVC link.exe: ${MSVC_LINK_PATH}")
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
lolhtml
|
||||
@@ -45,12 +76,7 @@ register_command(
|
||||
ARTIFACTS
|
||||
${LOLHTML_LIBRARY}
|
||||
ENVIRONMENT
|
||||
CARGO_TERM_COLOR=always
|
||||
CARGO_TERM_VERBOSE=true
|
||||
CARGO_TERM_DIAGNOSTIC=true
|
||||
CARGO_ENCODED_RUSTFLAGS=${RUSTFLAGS}
|
||||
CARGO_HOME=${CARGO_HOME}
|
||||
RUSTUP_HOME=${RUSTUP_HOME}
|
||||
${LOLHTML_ENV}
|
||||
)
|
||||
|
||||
target_link_libraries(${bun} PRIVATE ${LOLHTML_LIBRARY})
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
oven-sh/mimalloc
|
||||
COMMIT
|
||||
1beadf9651a7bfdec6b5367c380ecc3fe1c40d1a
|
||||
ffa38ab8ac914f9eb7af75c1f8ad457643dc14f2
|
||||
)
|
||||
|
||||
set(MIMALLOC_CMAKE_ARGS
|
||||
@@ -14,7 +14,7 @@ set(MIMALLOC_CMAKE_ARGS
|
||||
-DMI_BUILD_TESTS=OFF
|
||||
-DMI_USE_CXX=ON
|
||||
-DMI_SKIP_COLLECT_ON_EXIT=ON
|
||||
|
||||
|
||||
# ```
|
||||
# ❯ mimalloc_allow_large_os_pages=0 BUN_PORT=3004 mem bun http-hello.js
|
||||
# Started development server: http://localhost:3004
|
||||
@@ -51,7 +51,7 @@ if(ENABLE_ASAN)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_DEBUG_UBSAN=ON)
|
||||
elseif(APPLE OR LINUX)
|
||||
if(APPLE)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OVERRIDE=OFF)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OVERRIDE=OFF)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_ZONE=OFF)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_INTERPOSE=OFF)
|
||||
else()
|
||||
@@ -69,17 +69,27 @@ if(ENABLE_VALGRIND)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_VALGRIND=ON)
|
||||
endif()
|
||||
|
||||
# Enable SIMD optimizations when not building for baseline (older CPUs)
|
||||
if(NOT ENABLE_BASELINE)
|
||||
# Enable architecture-specific optimizations when not building for baseline.
|
||||
# On Linux aarch64, upstream mimalloc force-enables MI_OPT_ARCH which adds
|
||||
# -march=armv8.1-a (LSE atomics). This crashes on ARMv8.0 CPUs
|
||||
# (Cortex-A53, Raspberry Pi 4, AWS a1 instances). Use MI_NO_OPT_ARCH
|
||||
# to prevent that, but keep SIMD enabled. -moutline-atomics for runtime
|
||||
# dispatch to LSE/LL-SC. macOS arm64 always has LSE (Apple Silicon) so
|
||||
# MI_OPT_ARCH is safe there.
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|arm64|ARM64|AARCH64" AND NOT APPLE)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_NO_OPT_ARCH=ON)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OPT_SIMD=ON)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS "-DCMAKE_C_FLAGS=-moutline-atomics")
|
||||
elseif(NOT ENABLE_BASELINE)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OPT_ARCH=ON)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OPT_SIMD=ON)
|
||||
endif()
|
||||
|
||||
if(WIN32)
|
||||
if(DEBUG)
|
||||
set(MIMALLOC_LIBRARY mimalloc-static-debug)
|
||||
set(MIMALLOC_LIBRARY mimalloc-debug)
|
||||
else()
|
||||
set(MIMALLOC_LIBRARY mimalloc-static)
|
||||
set(MIMALLOC_LIBRARY mimalloc)
|
||||
endif()
|
||||
elseif(DEBUG)
|
||||
if (ENABLE_ASAN)
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
oven-sh/tinycc
|
||||
COMMIT
|
||||
29985a3b59898861442fa3b43f663fc1af2591d7
|
||||
12882eee073cfe5c7621bcfadf679e1372d4537b
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
|
||||
20
cmake/toolchains/windows-aarch64.cmake
Normal file
20
cmake/toolchains/windows-aarch64.cmake
Normal file
@@ -0,0 +1,20 @@
|
||||
set(CMAKE_SYSTEM_NAME Windows)
|
||||
set(CMAKE_SYSTEM_PROCESSOR aarch64)
|
||||
|
||||
set(CMAKE_C_COMPILER_WORKS ON)
|
||||
set(CMAKE_CXX_COMPILER_WORKS ON)
|
||||
|
||||
# Force ARM64 architecture ID - this is what CMake uses to determine /machine: flag
|
||||
set(MSVC_C_ARCHITECTURE_ID ARM64 CACHE INTERNAL "")
|
||||
set(MSVC_CXX_ARCHITECTURE_ID ARM64 CACHE INTERNAL "")
|
||||
|
||||
# CMake 4.0+ policy CMP0197 controls how MSVC machine type flags are handled
|
||||
set(CMAKE_POLICY_DEFAULT_CMP0197 NEW CACHE INTERNAL "")
|
||||
|
||||
# Clear any inherited static linker flags that might have wrong machine types
|
||||
set(CMAKE_STATIC_LINKER_FLAGS "" CACHE STRING "" FORCE)
|
||||
|
||||
# Use wrapper script for llvm-lib that strips /machine:x64 flags
|
||||
# This works around CMake 4.1.0 bug where both ARM64 and x64 machine flags are added
|
||||
get_filename_component(_TOOLCHAIN_DIR "${CMAKE_CURRENT_LIST_DIR}" DIRECTORY)
|
||||
set(CMAKE_AR "${_TOOLCHAIN_DIR}/scripts/llvm-lib-wrapper.bat" CACHE FILEPATH "" FORCE)
|
||||
@@ -6,7 +6,8 @@ endif()
|
||||
|
||||
optionx(BUILDKITE_ORGANIZATION_SLUG STRING "The organization slug to use on Buildkite" DEFAULT "bun")
|
||||
optionx(BUILDKITE_PIPELINE_SLUG STRING "The pipeline slug to use on Buildkite" DEFAULT "bun")
|
||||
optionx(BUILDKITE_BUILD_ID STRING "The build ID to use on Buildkite")
|
||||
optionx(BUILDKITE_BUILD_ID STRING "The build ID (UUID) to use on Buildkite")
|
||||
optionx(BUILDKITE_BUILD_NUMBER STRING "The build number to use on Buildkite")
|
||||
optionx(BUILDKITE_GROUP_ID STRING "The group ID to use on Buildkite")
|
||||
|
||||
if(ENABLE_BASELINE)
|
||||
@@ -32,7 +33,13 @@ if(NOT BUILDKITE_BUILD_ID)
|
||||
return()
|
||||
endif()
|
||||
|
||||
setx(BUILDKITE_BUILD_URL https://buildkite.com/${BUILDKITE_ORGANIZATION_SLUG}/${BUILDKITE_PIPELINE_SLUG}/builds/${BUILDKITE_BUILD_ID})
|
||||
# Use BUILDKITE_BUILD_NUMBER for the URL if available, as the UUID format causes a 302 redirect
|
||||
# that CMake's file(DOWNLOAD) doesn't follow, resulting in empty response.
|
||||
if(BUILDKITE_BUILD_NUMBER)
|
||||
setx(BUILDKITE_BUILD_URL https://buildkite.com/${BUILDKITE_ORGANIZATION_SLUG}/${BUILDKITE_PIPELINE_SLUG}/builds/${BUILDKITE_BUILD_NUMBER})
|
||||
else()
|
||||
setx(BUILDKITE_BUILD_URL https://buildkite.com/${BUILDKITE_ORGANIZATION_SLUG}/${BUILDKITE_PIPELINE_SLUG}/builds/${BUILDKITE_BUILD_ID})
|
||||
endif()
|
||||
setx(BUILDKITE_BUILD_PATH ${BUILDKITE_BUILDS_PATH}/builds/${BUILDKITE_BUILD_ID})
|
||||
|
||||
file(
|
||||
@@ -48,8 +55,16 @@ if(NOT BUILDKITE_BUILD_STATUS EQUAL 0)
|
||||
endif()
|
||||
|
||||
file(READ ${BUILDKITE_BUILD_PATH}/build.json BUILDKITE_BUILD)
|
||||
# Escape backslashes so CMake doesn't interpret JSON escape sequences (e.g., \n in commit messages)
|
||||
string(REPLACE "\\" "\\\\" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
|
||||
# CMake's string(JSON ...) interprets escape sequences like \n, \r, \t.
|
||||
# We need to escape these specific sequences while preserving valid JSON escapes like \" and \\.
|
||||
# Strategy: Use a unique placeholder to protect \\ sequences, escape \n/\r/\t, then restore \\.
|
||||
# This prevents \\n (literal backslash + n) from being corrupted to \\\n.
|
||||
set(BKSLASH_PLACEHOLDER "___BKSLASH_PLACEHOLDER_7f3a9b2c___")
|
||||
string(REPLACE "\\\\" "${BKSLASH_PLACEHOLDER}" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
|
||||
string(REPLACE "\\n" "\\\\n" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
|
||||
string(REPLACE "\\r" "\\\\r" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
|
||||
string(REPLACE "\\t" "\\\\t" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
|
||||
string(REPLACE "${BKSLASH_PLACEHOLDER}" "\\\\" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
|
||||
|
||||
string(JSON BUILDKITE_BUILD_UUID GET ${BUILDKITE_BUILD} id)
|
||||
string(JSON BUILDKITE_JOBS GET ${BUILDKITE_BUILD} jobs)
|
||||
|
||||
@@ -17,6 +17,14 @@ if (NOT CI)
|
||||
set(BUN_EXECUTABLE ${BUN_EXECUTABLE} CACHE FILEPATH "Bun executable" FORCE)
|
||||
endif()
|
||||
|
||||
# On Windows ARM64, we need to add --smol flag to avoid crashes when running
|
||||
# x64 bun under WoW64 emulation
|
||||
if(WIN32 AND ARCH STREQUAL "aarch64")
|
||||
set(BUN_FLAGS "--smol" CACHE STRING "Extra flags for bun executable")
|
||||
else()
|
||||
set(BUN_FLAGS "" CACHE STRING "Extra flags for bun executable")
|
||||
endif()
|
||||
|
||||
# If this is not set, some advanced features are not checked.
|
||||
# https://github.com/oven-sh/bun/blob/cd7f6a1589db7f1e39dc4e3f4a17234afbe7826c/src/bun.js/javascript.zig#L1069-L1072
|
||||
setenv(BUN_GARBAGE_COLLECTOR_LEVEL 1)
|
||||
|
||||
@@ -12,7 +12,13 @@ if(NOT ENABLE_LLVM)
|
||||
return()
|
||||
endif()
|
||||
|
||||
set(DEFAULT_LLVM_VERSION "19.1.7")
|
||||
# LLVM 21 is required for Windows ARM64 (first version with ARM64 Windows builds)
|
||||
# Other platforms use LLVM 19.1.7
|
||||
if(WIN32 AND CMAKE_SYSTEM_PROCESSOR MATCHES "ARM64|aarch64|AARCH64")
|
||||
set(DEFAULT_LLVM_VERSION "21.1.8")
|
||||
else()
|
||||
set(DEFAULT_LLVM_VERSION "19.1.7")
|
||||
endif()
|
||||
|
||||
optionx(LLVM_VERSION STRING "The version of LLVM to use" DEFAULT ${DEFAULT_LLVM_VERSION})
|
||||
|
||||
|
||||
@@ -31,13 +31,6 @@ execute_process(
|
||||
ERROR_QUIET
|
||||
)
|
||||
|
||||
if(MACOS_VERSION VERSION_LESS ${CMAKE_OSX_DEPLOYMENT_TARGET})
|
||||
message(FATAL_ERROR "Your computer is running macOS ${MACOS_VERSION}, which is older than the target macOS SDK ${CMAKE_OSX_DEPLOYMENT_TARGET}. To fix this, either:\n"
|
||||
" - Upgrade your computer to macOS ${CMAKE_OSX_DEPLOYMENT_TARGET} or newer\n"
|
||||
" - Download a newer version of the macOS SDK from Apple: https://developer.apple.com/download/all/?q=xcode\n"
|
||||
" - Set -DCMAKE_OSX_DEPLOYMENT_TARGET=${MACOS_VERSION}\n")
|
||||
endif()
|
||||
|
||||
execute_process(
|
||||
COMMAND xcrun --sdk macosx --show-sdk-path
|
||||
OUTPUT_VARIABLE DEFAULT_CMAKE_OSX_SYSROOT
|
||||
|
||||
@@ -1,14 +1,25 @@
|
||||
# NOTE: Changes to this file trigger QEMU JIT stress tests in CI.
|
||||
# See scripts/verify-jit-stress-qemu.sh for details.
|
||||
|
||||
option(WEBKIT_VERSION "The version of WebKit to use")
|
||||
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
|
||||
option(WEBKIT_BUILD_TYPE "The build type for local WebKit (defaults to CMAKE_BUILD_TYPE)")
|
||||
|
||||
if(NOT WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION 1d0216219a3c52cb85195f48f19ba7d5db747ff7)
|
||||
set(WEBKIT_VERSION 515344bc5d65aa2d4f9ff277b5fb944f0e051dcd)
|
||||
endif()
|
||||
|
||||
# Use preview build URL for Windows ARM64 until the fix is merged to main
|
||||
set(WEBKIT_PREVIEW_PR 140)
|
||||
|
||||
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)
|
||||
string(SUBSTRING ${WEBKIT_VERSION} 0 8 WEBKIT_VERSION_SHORT)
|
||||
|
||||
if(WEBKIT_LOCAL)
|
||||
set(DEFAULT_WEBKIT_PATH ${VENDOR_PATH}/WebKit/WebKitBuild/${CMAKE_BUILD_TYPE})
|
||||
if(NOT WEBKIT_BUILD_TYPE)
|
||||
set(WEBKIT_BUILD_TYPE ${CMAKE_BUILD_TYPE})
|
||||
endif()
|
||||
set(DEFAULT_WEBKIT_PATH ${VENDOR_PATH}/WebKit/WebKitBuild/${WEBKIT_BUILD_TYPE})
|
||||
else()
|
||||
set(DEFAULT_WEBKIT_PATH ${CACHE_PATH}/webkit-${WEBKIT_VERSION_PREFIX})
|
||||
endif()
|
||||
@@ -23,19 +34,153 @@ set(WEBKIT_INCLUDE_PATH ${WEBKIT_PATH}/include)
|
||||
set(WEBKIT_LIB_PATH ${WEBKIT_PATH}/lib)
|
||||
|
||||
if(WEBKIT_LOCAL)
|
||||
if(EXISTS ${WEBKIT_PATH}/cmakeconfig.h)
|
||||
# You may need to run:
|
||||
# make jsc-compile-debug jsc-copy-headers
|
||||
include_directories(
|
||||
${WEBKIT_PATH}
|
||||
${WEBKIT_PATH}/JavaScriptCore/Headers
|
||||
${WEBKIT_PATH}/JavaScriptCore/Headers/JavaScriptCore
|
||||
${WEBKIT_PATH}/JavaScriptCore/PrivateHeaders
|
||||
${WEBKIT_PATH}/bmalloc/Headers
|
||||
${WEBKIT_PATH}/WTF/Headers
|
||||
${WEBKIT_PATH}/JavaScriptCore/DerivedSources/inspector
|
||||
${WEBKIT_PATH}/JavaScriptCore/PrivateHeaders/JavaScriptCore
|
||||
set(WEBKIT_SOURCE_DIR ${VENDOR_PATH}/WebKit)
|
||||
|
||||
if(WIN32)
|
||||
# --- Build ICU from source (Windows only) ---
|
||||
# On macOS, ICU is found automatically (Homebrew icu4c for headers, system for libs).
|
||||
# On Linux, ICU is found automatically from system packages (e.g. libicu-dev).
|
||||
# On Windows, there is no system ICU, so we build it from source.
|
||||
set(ICU_LOCAL_ROOT ${VENDOR_PATH}/WebKit/WebKitBuild/icu)
|
||||
if(NOT EXISTS ${ICU_LOCAL_ROOT}/lib/sicudt.lib)
|
||||
message(STATUS "Building ICU from source...")
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm64|ARM64|aarch64|AARCH64")
|
||||
set(ICU_PLATFORM "ARM64")
|
||||
else()
|
||||
set(ICU_PLATFORM "x64")
|
||||
endif()
|
||||
execute_process(
|
||||
COMMAND powershell -ExecutionPolicy Bypass -File
|
||||
${WEBKIT_SOURCE_DIR}/build-icu.ps1
|
||||
-Platform ${ICU_PLATFORM}
|
||||
-BuildType ${WEBKIT_BUILD_TYPE}
|
||||
-OutputDir ${ICU_LOCAL_ROOT}
|
||||
RESULT_VARIABLE ICU_BUILD_RESULT
|
||||
)
|
||||
if(NOT ICU_BUILD_RESULT EQUAL 0)
|
||||
message(FATAL_ERROR "Failed to build ICU (exit code: ${ICU_BUILD_RESULT}).")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# Copy ICU libs to WEBKIT_LIB_PATH with the names BuildBun.cmake expects.
|
||||
# Prebuilt WebKit uses 's' prefix (static) and 'd' suffix (debug).
|
||||
file(MAKE_DIRECTORY ${WEBKIT_LIB_PATH})
|
||||
if(WEBKIT_BUILD_TYPE STREQUAL "Debug")
|
||||
set(ICU_SUFFIX "d")
|
||||
else()
|
||||
set(ICU_SUFFIX "")
|
||||
endif()
|
||||
file(COPY_FILE ${ICU_LOCAL_ROOT}/lib/sicudt.lib ${WEBKIT_LIB_PATH}/sicudt${ICU_SUFFIX}.lib ONLY_IF_DIFFERENT)
|
||||
file(COPY_FILE ${ICU_LOCAL_ROOT}/lib/icuin.lib ${WEBKIT_LIB_PATH}/sicuin${ICU_SUFFIX}.lib ONLY_IF_DIFFERENT)
|
||||
file(COPY_FILE ${ICU_LOCAL_ROOT}/lib/icuuc.lib ${WEBKIT_LIB_PATH}/sicuuc${ICU_SUFFIX}.lib ONLY_IF_DIFFERENT)
|
||||
endif()
|
||||
|
||||
# --- Configure JSC ---
|
||||
message(STATUS "Configuring JSC from local WebKit source at ${WEBKIT_SOURCE_DIR}...")
|
||||
|
||||
set(JSC_CMAKE_ARGS
|
||||
-S ${WEBKIT_SOURCE_DIR}
|
||||
-B ${WEBKIT_PATH}
|
||||
-G ${CMAKE_GENERATOR}
|
||||
-DPORT=JSCOnly
|
||||
-DENABLE_STATIC_JSC=ON
|
||||
-DUSE_THIN_ARCHIVES=OFF
|
||||
-DENABLE_FTL_JIT=ON
|
||||
-DCMAKE_EXPORT_COMPILE_COMMANDS=ON
|
||||
-DUSE_BUN_JSC_ADDITIONS=ON
|
||||
-DUSE_BUN_EVENT_LOOP=ON
|
||||
-DENABLE_BUN_SKIP_FAILING_ASSERTIONS=ON
|
||||
-DALLOW_LINE_AND_COLUMN_NUMBER_IN_BUILTINS=ON
|
||||
-DCMAKE_BUILD_TYPE=${WEBKIT_BUILD_TYPE}
|
||||
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
|
||||
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
|
||||
-DENABLE_REMOTE_INSPECTOR=ON
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
# ICU paths and Windows-specific compiler/linker settings
|
||||
list(APPEND JSC_CMAKE_ARGS
|
||||
-DICU_ROOT=${ICU_LOCAL_ROOT}
|
||||
-DICU_LIBRARY=${ICU_LOCAL_ROOT}/lib
|
||||
-DICU_INCLUDE_DIR=${ICU_LOCAL_ROOT}/include
|
||||
-DCMAKE_LINKER=lld-link
|
||||
)
|
||||
# Static CRT and U_STATIC_IMPLEMENTATION
|
||||
if(WEBKIT_BUILD_TYPE STREQUAL "Debug")
|
||||
set(JSC_MSVC_RUNTIME "MultiThreadedDebug")
|
||||
else()
|
||||
set(JSC_MSVC_RUNTIME "MultiThreaded")
|
||||
endif()
|
||||
list(APPEND JSC_CMAKE_ARGS
|
||||
-DCMAKE_MSVC_RUNTIME_LIBRARY=${JSC_MSVC_RUNTIME}
|
||||
"-DCMAKE_C_FLAGS=/DU_STATIC_IMPLEMENTATION"
|
||||
"-DCMAKE_CXX_FLAGS=/DU_STATIC_IMPLEMENTATION /clang:-fno-c++-static-destructors"
|
||||
)
|
||||
endif()
|
||||
|
||||
if(ENABLE_ASAN)
|
||||
list(APPEND JSC_CMAKE_ARGS -DENABLE_SANITIZERS=address)
|
||||
endif()
|
||||
|
||||
# Pass through ccache if available
|
||||
if(CMAKE_C_COMPILER_LAUNCHER)
|
||||
list(APPEND JSC_CMAKE_ARGS -DCMAKE_C_COMPILER_LAUNCHER=${CMAKE_C_COMPILER_LAUNCHER})
|
||||
endif()
|
||||
if(CMAKE_CXX_COMPILER_LAUNCHER)
|
||||
list(APPEND JSC_CMAKE_ARGS -DCMAKE_CXX_COMPILER_LAUNCHER=${CMAKE_CXX_COMPILER_LAUNCHER})
|
||||
endif()
|
||||
|
||||
execute_process(
|
||||
COMMAND ${CMAKE_COMMAND} ${JSC_CMAKE_ARGS}
|
||||
RESULT_VARIABLE JSC_CONFIGURE_RESULT
|
||||
)
|
||||
if(NOT JSC_CONFIGURE_RESULT EQUAL 0)
|
||||
message(FATAL_ERROR "Failed to configure JSC (exit code: ${JSC_CONFIGURE_RESULT}). "
|
||||
"Check the output above for errors.")
|
||||
endif()
|
||||
|
||||
if(WIN32)
|
||||
set(JSC_BYPRODUCTS
|
||||
${WEBKIT_LIB_PATH}/JavaScriptCore.lib
|
||||
${WEBKIT_LIB_PATH}/WTF.lib
|
||||
${WEBKIT_LIB_PATH}/bmalloc.lib
|
||||
)
|
||||
else()
|
||||
set(JSC_BYPRODUCTS
|
||||
${WEBKIT_LIB_PATH}/libJavaScriptCore.a
|
||||
${WEBKIT_LIB_PATH}/libWTF.a
|
||||
${WEBKIT_LIB_PATH}/libbmalloc.a
|
||||
)
|
||||
endif()
|
||||
|
||||
if(WIN32)
|
||||
add_custom_target(jsc ALL
|
||||
COMMAND ${CMAKE_COMMAND} --build ${WEBKIT_PATH} --config ${WEBKIT_BUILD_TYPE} --target jsc
|
||||
BYPRODUCTS ${JSC_BYPRODUCTS}
|
||||
COMMENT "Building JSC (${WEBKIT_PATH})"
|
||||
)
|
||||
else()
|
||||
add_custom_target(jsc ALL
|
||||
COMMAND ${CMAKE_COMMAND} --build ${WEBKIT_PATH} --config ${WEBKIT_BUILD_TYPE} --target jsc
|
||||
BYPRODUCTS ${JSC_BYPRODUCTS}
|
||||
COMMENT "Building JSC (${WEBKIT_PATH})"
|
||||
USES_TERMINAL
|
||||
)
|
||||
endif()
|
||||
|
||||
include_directories(
|
||||
${WEBKIT_PATH}
|
||||
${WEBKIT_PATH}/JavaScriptCore/Headers
|
||||
${WEBKIT_PATH}/JavaScriptCore/Headers/JavaScriptCore
|
||||
${WEBKIT_PATH}/JavaScriptCore/PrivateHeaders
|
||||
${WEBKIT_PATH}/bmalloc/Headers
|
||||
${WEBKIT_PATH}/WTF/Headers
|
||||
${WEBKIT_PATH}/JavaScriptCore/PrivateHeaders/JavaScriptCore
|
||||
)
|
||||
|
||||
# On Windows, add ICU headers from the local ICU build
|
||||
if(WIN32)
|
||||
include_directories(${ICU_LOCAL_ROOT}/include)
|
||||
endif()
|
||||
|
||||
# After this point, only prebuilt WebKit is supported
|
||||
@@ -52,7 +197,7 @@ else()
|
||||
message(FATAL_ERROR "Unsupported operating system: ${CMAKE_SYSTEM_NAME}")
|
||||
endif()
|
||||
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm64|aarch64")
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm64|ARM64|aarch64|AARCH64")
|
||||
set(WEBKIT_ARCH "arm64")
|
||||
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "amd64|x86_64|x64|AMD64")
|
||||
set(WEBKIT_ARCH "amd64")
|
||||
@@ -81,7 +226,14 @@ endif()
|
||||
|
||||
setx(WEBKIT_NAME bun-webkit-${WEBKIT_OS}-${WEBKIT_ARCH}${WEBKIT_SUFFIX})
|
||||
set(WEBKIT_FILENAME ${WEBKIT_NAME}.tar.gz)
|
||||
setx(WEBKIT_DOWNLOAD_URL https://github.com/oven-sh/WebKit/releases/download/autobuild-${WEBKIT_VERSION}/${WEBKIT_FILENAME})
|
||||
|
||||
if(WEBKIT_VERSION MATCHES "^autobuild-")
|
||||
set(WEBKIT_TAG ${WEBKIT_VERSION})
|
||||
else()
|
||||
set(WEBKIT_TAG autobuild-${WEBKIT_VERSION})
|
||||
endif()
|
||||
|
||||
setx(WEBKIT_DOWNLOAD_URL https://github.com/oven-sh/WebKit/releases/download/${WEBKIT_TAG}/${WEBKIT_FILENAME})
|
||||
|
||||
if(EXISTS ${WEBKIT_PATH}/package.json)
|
||||
file(READ ${WEBKIT_PATH}/package.json WEBKIT_PACKAGE_JSON)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm64|aarch64")
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm64|ARM64|aarch64|AARCH64")
|
||||
set(DEFAULT_ZIG_ARCH "aarch64")
|
||||
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "amd64|x86_64|x64|AMD64")
|
||||
set(DEFAULT_ZIG_ARCH "x86_64")
|
||||
|
||||
@@ -7,9 +7,9 @@ Bytecode caching is a build-time optimization that dramatically improves applica
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic usage
|
||||
### Basic usage (CommonJS)
|
||||
|
||||
Enable bytecode caching with the `--bytecode` flag:
|
||||
Enable bytecode caching with the `--bytecode` flag. Without `--format`, this defaults to CommonJS:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
bun build ./index.ts --target=bun --bytecode --outdir=./dist
|
||||
@@ -17,7 +17,7 @@ bun build ./index.ts --target=bun --bytecode --outdir=./dist
|
||||
|
||||
This generates two files:
|
||||
|
||||
- `dist/index.js` - Your bundled JavaScript
|
||||
- `dist/index.js` - Your bundled JavaScript (CommonJS)
|
||||
- `dist/index.jsc` - The bytecode cache file
|
||||
|
||||
At runtime, Bun automatically detects and uses the `.jsc` file:
|
||||
@@ -28,14 +28,24 @@ bun ./dist/index.js # Automatically uses index.jsc
|
||||
|
||||
### With standalone executables
|
||||
|
||||
When creating executables with `--compile`, bytecode is embedded into the binary:
|
||||
When creating executables with `--compile`, bytecode is embedded into the binary. Both ESM and CommonJS formats are supported:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
# ESM (requires --compile)
|
||||
bun build ./cli.ts --compile --bytecode --format=esm --outfile=mycli
|
||||
|
||||
# CommonJS (works with or without --compile)
|
||||
bun build ./cli.ts --compile --bytecode --outfile=mycli
|
||||
```
|
||||
|
||||
The resulting executable contains both the code and bytecode, giving you maximum performance in a single file.
|
||||
|
||||
### ESM bytecode
|
||||
|
||||
ESM bytecode requires `--compile` because Bun embeds module metadata (import/export information) in the compiled binary. This metadata allows the JavaScript engine to skip parsing entirely at runtime.
|
||||
|
||||
Without `--compile`, ESM bytecode would still require parsing the source to analyze module dependencies—defeating the purpose of bytecode caching.
|
||||
|
||||
### Combining with other optimizations
|
||||
|
||||
Bytecode works great with minification and source maps:
|
||||
@@ -90,35 +100,9 @@ Larger applications benefit more because they have more code to parse.
|
||||
- ❌ **Code that runs once**
|
||||
- ❌ **Development builds**
|
||||
- ❌ **Size-constrained environments**
|
||||
- ❌ **Code with top-level await** (not supported)
|
||||
|
||||
## Limitations
|
||||
|
||||
### CommonJS only
|
||||
|
||||
Bytecode caching currently works with CommonJS output format. Bun's bundler automatically converts most ESM code to CommonJS, but **top-level await** is the exception:
|
||||
|
||||
```js
|
||||
// This prevents bytecode caching
|
||||
const data = await fetch("https://api.example.com");
|
||||
export default data;
|
||||
```
|
||||
|
||||
**Why**: Top-level await requires async module evaluation, which can't be represented in CommonJS. The module graph becomes asynchronous, and the CommonJS wrapper function model breaks down.
|
||||
|
||||
**Workaround**: Move async initialization into a function:
|
||||
|
||||
```js
|
||||
async function init() {
|
||||
const data = await fetch("https://api.example.com");
|
||||
return data;
|
||||
}
|
||||
|
||||
export default init;
|
||||
```
|
||||
|
||||
Now the module exports a function that the consumer can await when needed.
|
||||
|
||||
### Version compatibility
|
||||
|
||||
Bytecode is **not portable across Bun versions**. The bytecode format is tied to JavaScriptCore's internal representation, which changes between versions.
|
||||
@@ -236,8 +220,6 @@ It's normal for it it to log a cache miss multiple times since Bun doesn't curre
|
||||
- Compressing `.jsc` files for network transfer (gzip/brotli)
|
||||
- Evaluating if the startup performance gain is worth the size increase
|
||||
|
||||
**Top-level await**: Not supported. Refactor to use async initialization functions.
|
||||
|
||||
## What is bytecode?
|
||||
|
||||
When you run JavaScript, the JavaScript engine doesn't execute your source code directly. Instead, it goes through several steps:
|
||||
|
||||
@@ -322,10 +322,7 @@ Using bytecode compilation, `tsc` starts 2x faster:
|
||||
|
||||
Bytecode compilation moves parsing overhead for large input files from runtime to bundle time. Your app starts faster, in exchange for making the `bun build` command a little slower. It doesn't obscure source code.
|
||||
|
||||
<Warning>
|
||||
**Experimental:** Bytecode compilation is an experimental feature. Only `cjs` format is supported (which means no
|
||||
top-level-await). Let us know if you run into any issues!
|
||||
</Warning>
|
||||
<Note>Bytecode compilation supports both `cjs` and `esm` formats when used with `--compile`.</Note>
|
||||
|
||||
### What do these flags do?
|
||||
|
||||
@@ -365,6 +362,23 @@ The `--bytecode` argument enables bytecode compilation. Every time you run JavaS
|
||||
console.log(process.execArgv); // ["--smol", "--user-agent=MyBot"]
|
||||
```
|
||||
|
||||
### Runtime arguments via `BUN_OPTIONS`
|
||||
|
||||
The `BUN_OPTIONS` environment variable is applied to standalone executables, allowing you to pass runtime flags without recompiling:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
# Enable CPU profiling on a compiled executable
|
||||
BUN_OPTIONS="--cpu-prof" ./myapp
|
||||
|
||||
# Enable heap profiling with markdown output
|
||||
BUN_OPTIONS="--heap-prof-md" ./myapp
|
||||
|
||||
# Combine multiple flags
|
||||
BUN_OPTIONS="--smol --cpu-prof-md" ./myapp
|
||||
```
|
||||
|
||||
This is useful for debugging or profiling production executables without rebuilding them.
|
||||
|
||||
---
|
||||
|
||||
## Automatic config loading
|
||||
|
||||
@@ -1333,6 +1333,50 @@ Generate metadata about the build in a structured format. The metafile contains
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
#### Markdown metafile
|
||||
|
||||
Use `--metafile-md` to generate a markdown metafile, which is LLM-friendly and easy to read in the terminal:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
bun build ./src/index.ts --outdir ./dist --metafile-md ./dist/meta.md
|
||||
```
|
||||
|
||||
Both `--metafile` and `--metafile-md` can be used together:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
bun build ./src/index.ts --outdir ./dist --metafile ./dist/meta.json --metafile-md ./dist/meta.md
|
||||
```
|
||||
|
||||
#### `metafile` option formats
|
||||
|
||||
In the JavaScript API, `metafile` accepts several forms:
|
||||
|
||||
```ts title="build.ts" icon="/icons/typescript.svg"
|
||||
// Boolean — include metafile in the result object
|
||||
await Bun.build({
|
||||
entrypoints: ["./src/index.ts"],
|
||||
outdir: "./dist",
|
||||
metafile: true,
|
||||
});
|
||||
|
||||
// String — write JSON metafile to a specific path
|
||||
await Bun.build({
|
||||
entrypoints: ["./src/index.ts"],
|
||||
outdir: "./dist",
|
||||
metafile: "./dist/meta.json",
|
||||
});
|
||||
|
||||
// Object — specify separate paths for JSON and markdown output
|
||||
await Bun.build({
|
||||
entrypoints: ["./src/index.ts"],
|
||||
outdir: "./dist",
|
||||
metafile: {
|
||||
json: "./dist/meta.json",
|
||||
markdown: "./dist/meta.md",
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
The metafile structure contains:
|
||||
|
||||
```ts
|
||||
@@ -1464,22 +1508,43 @@ BuildArtifact (entry-point) {
|
||||
|
||||
## Bytecode
|
||||
|
||||
The `bytecode: boolean` option can be used to generate bytecode for any JavaScript/TypeScript entrypoints. This can greatly improve startup times for large applications. Only supported for `"cjs"` format, only supports `"target": "bun"` and dependent on a matching version of Bun. This adds a corresponding `.jsc` file for each entrypoint.
|
||||
The `bytecode: boolean` option can be used to generate bytecode for any JavaScript/TypeScript entrypoints. This can greatly improve startup times for large applications. Requires `"target": "bun"` and is dependent on a matching version of Bun.
|
||||
|
||||
- **CommonJS**: Works with or without `compile: true`. Generates a `.jsc` file alongside each entrypoint.
|
||||
- **ESM**: Requires `compile: true`. Bytecode and module metadata are embedded in the standalone executable.
|
||||
|
||||
Without an explicit `format`, bytecode defaults to CommonJS.
|
||||
|
||||
<Tabs>
|
||||
<Tab title="JavaScript">
|
||||
```ts title="build.ts" icon="/icons/typescript.svg"
|
||||
// CommonJS bytecode (generates .jsc files)
|
||||
await Bun.build({
|
||||
entrypoints: ["./index.tsx"],
|
||||
outdir: "./out",
|
||||
bytecode: true,
|
||||
})
|
||||
|
||||
// ESM bytecode (requires compile)
|
||||
await Bun.build({
|
||||
entrypoints: ["./index.tsx"],
|
||||
outfile: "./mycli",
|
||||
bytecode: true,
|
||||
format: "esm",
|
||||
compile: true,
|
||||
})
|
||||
```
|
||||
|
||||
</Tab>
|
||||
<Tab title="CLI">
|
||||
```bash terminal icon="terminal"
|
||||
# CommonJS bytecode
|
||||
bun build ./index.tsx --outdir ./out --bytecode
|
||||
|
||||
# ESM bytecode (requires --compile)
|
||||
bun build ./index.tsx --outfile ./mycli --bytecode --format=esm --compile
|
||||
```
|
||||
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
@@ -1646,7 +1711,10 @@ interface BuildConfig {
|
||||
* start times, but will make the final output larger and slightly increase
|
||||
* memory usage.
|
||||
*
|
||||
* Bytecode is currently only supported for CommonJS (`format: "cjs"`).
|
||||
* - CommonJS: works with or without `compile: true`
|
||||
* - ESM: requires `compile: true`
|
||||
*
|
||||
* Without an explicit `format`, defaults to CommonJS.
|
||||
*
|
||||
* Must be `target: "bun"`
|
||||
* @default false
|
||||
|
||||
@@ -150,6 +150,9 @@
|
||||
"/runtime/secrets",
|
||||
"/runtime/console",
|
||||
"/runtime/yaml",
|
||||
"/runtime/markdown",
|
||||
"/runtime/json5",
|
||||
"/runtime/jsonl",
|
||||
"/runtime/html-rewriter",
|
||||
"/runtime/hashing",
|
||||
"/runtime/glob",
|
||||
@@ -497,6 +500,7 @@
|
||||
"/guides/runtime/import-json",
|
||||
"/guides/runtime/import-toml",
|
||||
"/guides/runtime/import-yaml",
|
||||
"/guides/runtime/import-json5",
|
||||
"/guides/runtime/import-html",
|
||||
"/guides/util/import-meta-dir",
|
||||
"/guides/util/import-meta-file",
|
||||
|
||||
74
docs/guides/runtime/import-json5.mdx
Normal file
74
docs/guides/runtime/import-json5.mdx
Normal file
@@ -0,0 +1,74 @@
|
||||
---
|
||||
title: Import a JSON5 file
|
||||
sidebarTitle: Import JSON5
|
||||
mode: center
|
||||
---
|
||||
|
||||
Bun natively supports `.json5` imports.
|
||||
|
||||
```json5 config.json5 icon="file-code"
|
||||
{
|
||||
// Comments are allowed
|
||||
database: {
|
||||
host: "localhost",
|
||||
port: 5432,
|
||||
name: "myapp",
|
||||
},
|
||||
|
||||
server: {
|
||||
port: 3000,
|
||||
timeout: 30,
|
||||
},
|
||||
|
||||
features: {
|
||||
auth: true,
|
||||
rateLimit: true,
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Import the file like any other source file.
|
||||
|
||||
```ts config.ts icon="/icons/typescript.svg"
|
||||
import config from "./config.json5";
|
||||
|
||||
config.database.host; // => "localhost"
|
||||
config.server.port; // => 3000
|
||||
config.features.auth; // => true
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
You can also use named imports to destructure top-level properties:
|
||||
|
||||
```ts config.ts icon="/icons/typescript.svg"
|
||||
import { database, server, features } from "./config.json5";
|
||||
|
||||
console.log(database.name); // => "myapp"
|
||||
console.log(server.timeout); // => 30
|
||||
console.log(features.rateLimit); // => true
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
For parsing JSON5 strings at runtime, use `Bun.JSON5.parse()`:
|
||||
|
||||
```ts config.ts icon="/icons/typescript.svg"
|
||||
const data = JSON5.parse(`{
|
||||
name: 'John Doe',
|
||||
age: 30,
|
||||
hobbies: [
|
||||
'reading',
|
||||
'coding',
|
||||
],
|
||||
}`);
|
||||
|
||||
console.log(data.name); // => "John Doe"
|
||||
console.log(data.hobbies); // => ["reading", "coding"]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > JSON5](/runtime/json5) for complete documentation on JSON5 support in Bun.
|
||||
@@ -26,21 +26,6 @@ The `bun` CLI contains a Node.js-compatible package manager designed to be a dra
|
||||
|
||||
</Note>
|
||||
|
||||
<Accordion title="For Linux users">
|
||||
The recommended minimum Linux Kernel version is 5.6. If you're on Linux kernel 5.1 - 5.5, `bun install` will work, but HTTP requests will be slow due to a lack of support for io_uring's `connect()` operation.
|
||||
|
||||
If you're using Ubuntu 20.04, here's how to install a [newer kernel](https://wiki.ubuntu.com/Kernel/LTSEnablementStack):
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
# If this returns a version >= 5.6, you don't need to do anything
|
||||
uname -r
|
||||
|
||||
# Install the official Ubuntu hardware enablement kernel
|
||||
sudo apt install --install-recommends linux-generic-hwe-20.04
|
||||
```
|
||||
|
||||
</Accordion>
|
||||
|
||||
To install all dependencies of a project:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
|
||||
@@ -97,6 +97,31 @@ Filters respect your [workspace configuration](/pm/workspaces): If you have a `p
|
||||
bun run --filter foo myscript
|
||||
```
|
||||
|
||||
### Parallel and sequential mode
|
||||
|
||||
Combine `--filter` or `--workspaces` with `--parallel` or `--sequential` to run scripts across workspace packages with Foreman-style prefixed output:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
# Run "build" in all matching packages concurrently
|
||||
bun run --parallel --filter '*' build
|
||||
|
||||
# Run "build" in all workspace packages sequentially
|
||||
bun run --sequential --workspaces build
|
||||
|
||||
# Run glob-matched scripts across all packages
|
||||
bun run --parallel --filter '*' "build:*"
|
||||
|
||||
# Continue running even if one package's script fails
|
||||
bun run --parallel --no-exit-on-error --filter '*' test
|
||||
|
||||
# Run multiple scripts across all packages
|
||||
bun run --parallel --filter '*' build lint
|
||||
```
|
||||
|
||||
Each line of output is prefixed with the package and script name (e.g. `pkg-a:build | ...`). Without `--filter`/`--workspaces`, the prefix is just the script name (e.g. `build | ...`). When a package's `package.json` has no `name` field, the relative path from the workspace root is used instead.
|
||||
|
||||
Use `--if-present` with `--workspaces` to skip packages that don't have the requested script instead of erroring.
|
||||
|
||||
### Dependency Order
|
||||
|
||||
Bun will respect package dependency order when running scripts. Say you have a package `foo` that depends on another package `bar` in your workspace, and both packages have a `build` script. When you run `bun --filter '*' build`, you will notice that `foo` will only start running once `bar` is done.
|
||||
|
||||
@@ -227,6 +227,26 @@ bun --cpu-prof script.js
|
||||
|
||||
This generates a `.cpuprofile` file you can open in Chrome DevTools (Performance tab → Load profile) or VS Code's CPU profiler.
|
||||
|
||||
### Markdown output
|
||||
|
||||
Use `--cpu-prof-md` to generate a markdown CPU profile, which is grep-friendly and designed for LLM analysis:
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
bun --cpu-prof-md script.js
|
||||
```
|
||||
|
||||
Both `--cpu-prof` and `--cpu-prof-md` can be used together to generate both formats at once:
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
bun --cpu-prof --cpu-prof-md script.js
|
||||
```
|
||||
|
||||
You can also trigger profiling via the `BUN_OPTIONS` environment variable:
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
BUN_OPTIONS="--cpu-prof-md" bun script.js
|
||||
```
|
||||
|
||||
### Options
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
@@ -234,8 +254,43 @@ bun --cpu-prof --cpu-prof-name my-profile.cpuprofile script.js
|
||||
bun --cpu-prof --cpu-prof-dir ./profiles script.js
|
||||
```
|
||||
|
||||
| Flag | Description |
|
||||
| ---------------------------- | -------------------- |
|
||||
| `--cpu-prof` | Enable profiling |
|
||||
| `--cpu-prof-name <filename>` | Set output filename |
|
||||
| `--cpu-prof-dir <dir>` | Set output directory |
|
||||
| Flag | Description |
|
||||
| ---------------------------- | ----------------------------------------------------------- |
|
||||
| `--cpu-prof` | Generate a `.cpuprofile` JSON file (Chrome DevTools format) |
|
||||
| `--cpu-prof-md` | Generate a markdown CPU profile (grep/LLM-friendly) |
|
||||
| `--cpu-prof-name <filename>` | Set output filename |
|
||||
| `--cpu-prof-dir <dir>` | Set output directory |
|
||||
|
||||
## Heap profiling
|
||||
|
||||
Generate heap snapshots on exit to analyze memory usage and find memory leaks.
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
bun --heap-prof script.js
|
||||
```
|
||||
|
||||
This generates a V8 `.heapsnapshot` file that can be loaded in Chrome DevTools (Memory tab → Load).
|
||||
|
||||
### Markdown output
|
||||
|
||||
Use `--heap-prof-md` to generate a markdown heap profile for CLI analysis:
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
bun --heap-prof-md script.js
|
||||
```
|
||||
|
||||
<Note>If both `--heap-prof` and `--heap-prof-md` are specified, the markdown format is used.</Note>
|
||||
|
||||
### Options
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
bun --heap-prof --heap-prof-name my-snapshot.heapsnapshot script.js
|
||||
bun --heap-prof --heap-prof-dir ./profiles script.js
|
||||
```
|
||||
|
||||
| Flag | Description |
|
||||
| ----------------------------- | ------------------------------------------ |
|
||||
| `--heap-prof` | Generate a V8 `.heapsnapshot` file on exit |
|
||||
| `--heap-prof-md` | Generate a markdown heap profile on exit |
|
||||
| `--heap-prof-name <filename>` | Set output filename |
|
||||
| `--heap-prof-dir <dir>` | Set output directory |
|
||||
|
||||
@@ -35,7 +35,7 @@ winget install "Visual Studio Community 2022" --override "--add Microsoft.Visual
|
||||
|
||||
After Visual Studio, you need the following:
|
||||
|
||||
- LLVM 19.1.7
|
||||
- LLVM (19.1.7 for x64, 21.1.8 for ARM64)
|
||||
- Go
|
||||
- Rust
|
||||
- NASM
|
||||
@@ -47,25 +47,35 @@ After Visual Studio, you need the following:
|
||||
|
||||
[Scoop](https://scoop.sh) can be used to install these remaining tools easily.
|
||||
|
||||
```ps1 Scoop
|
||||
```ps1 Scoop (x64)
|
||||
irm https://get.scoop.sh | iex
|
||||
scoop install nodejs-lts go rust nasm ruby perl ccache
|
||||
# scoop seems to be buggy if you install llvm and the rest at the same time
|
||||
scoop install llvm@19.1.7
|
||||
```
|
||||
|
||||
For Windows ARM64, download LLVM 21.1.8 directly from GitHub releases (first version with ARM64 Windows builds):
|
||||
|
||||
```ps1 ARM64
|
||||
# Download and install LLVM for ARM64
|
||||
Invoke-WebRequest -Uri "https://github.com/llvm/llvm-project/releases/download/llvmorg-21.1.8/LLVM-21.1.8-woa64.exe" -OutFile "$env:TEMP\LLVM-21.1.8-woa64.exe"
|
||||
Start-Process -FilePath "$env:TEMP\LLVM-21.1.8-woa64.exe" -ArgumentList "/S" -Wait
|
||||
```
|
||||
|
||||
<Note>
|
||||
Please do not use WinGet/other package manager for these, as you will likely install Strawberry Perl instead of a more
|
||||
minimal installation of Perl. Strawberry Perl includes many other utilities that get installed into `$Env:PATH` that
|
||||
will conflict with MSVC and break the build.
|
||||
</Note>
|
||||
|
||||
If you intend on building WebKit locally (optional), you should install these packages:
|
||||
If you intend on building WebKit locally (optional, x64 only), you should install these packages:
|
||||
|
||||
```ps1 Scoop
|
||||
scoop install make cygwin python
|
||||
```
|
||||
|
||||
<Note>Cygwin is not required for ARM64 builds as WebKit is provided as a pre-built binary.</Note>
|
||||
|
||||
From here on out, it is **expected you use a PowerShell Terminal with `.\scripts\vs-shell.ps1` sourced**. This script is available in the Bun repository and can be loaded by executing it:
|
||||
|
||||
```ps1
|
||||
|
||||
@@ -266,18 +266,13 @@ git clone https://github.com/oven-sh/WebKit vendor/WebKit
|
||||
# Check out the commit hash specified in `set(WEBKIT_VERSION <commit_hash>)` in cmake/tools/SetupWebKit.cmake
|
||||
git -C vendor/WebKit checkout <commit_hash>
|
||||
|
||||
# Make a debug build of JSC. This will output build artifacts in ./vendor/WebKit/WebKitBuild/Debug
|
||||
# Optionally, you can use `bun run jsc:build` for a release build
|
||||
bun run jsc:build:debug && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
|
||||
|
||||
# After an initial run of `make jsc-debug`, you can rebuild JSC with:
|
||||
cmake --build vendor/WebKit/WebKitBuild/Debug --target jsc && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
|
||||
|
||||
# Build bun with the local JSC build
|
||||
# Build bun with the local JSC build — this automatically configures and builds JSC
|
||||
bun run build:local
|
||||
```
|
||||
|
||||
Using `bun run build:local` will build Bun in the `./build/debug-local` directory (instead of `./build/debug`), you'll have to change a couple of places to use this new directory:
|
||||
`bun run build:local` handles everything: configuring JSC, building JSC, and building Bun. On subsequent runs, JSC will incrementally rebuild if any WebKit sources changed. `ninja -Cbuild/debug-local` also works after the first build, and will build Bun+JSC.
|
||||
|
||||
The build output goes to `./build/debug-local` (instead of `./build/debug`), so you'll need to update a couple of places:
|
||||
|
||||
- The first line in `src/js/builtins.d.ts`
|
||||
- The `CompilationDatabase` line in `.clangd` config should be `CompilationDatabase: build/debug-local`
|
||||
@@ -288,7 +283,7 @@ Note that the WebKit folder, including build artifacts, is 8GB+ in size.
|
||||
|
||||
If you are using a JSC debug build and using VScode, make sure to run the `C/C++: Select a Configuration` command to configure intellisense to find the debug headers.
|
||||
|
||||
Note that if you change make changes to our [WebKit fork](https://github.com/oven-sh/WebKit), you will also have to change `SetupWebKit.cmake` to point to the commit hash.
|
||||
Note that if you make changes to our [WebKit fork](https://github.com/oven-sh/WebKit), you will also have to change `SetupWebKit.cmake` to point to the commit hash.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
|
||||
@@ -55,5 +55,5 @@ Click the link in the right column to jump to the associated documentation.
|
||||
| Stream Processing | [`Bun.readableStreamTo*()`](/runtime/utils#bun-readablestreamto), `Bun.readableStreamToBytes()`, `Bun.readableStreamToBlob()`, `Bun.readableStreamToFormData()`, `Bun.readableStreamToJSON()`, `Bun.readableStreamToArray()` |
|
||||
| Memory & Buffer Management | `Bun.ArrayBufferSink`, `Bun.allocUnsafe`, `Bun.concatArrayBuffers` |
|
||||
| Module Resolution | [`Bun.resolveSync()`](/runtime/utils#bun-resolvesync) |
|
||||
| Parsing & Formatting | [`Bun.semver`](/runtime/semver), `Bun.TOML.parse`, [`Bun.color`](/runtime/color) |
|
||||
| Parsing & Formatting | [`Bun.semver`](/runtime/semver), `Bun.TOML.parse`, [`Bun.markdown`](/runtime/markdown), [`Bun.color`](/runtime/color) |
|
||||
| Low-level / Internals | `Bun.mmap`, `Bun.gc`, `Bun.generateHeapSnapshot`, [`bun:jsc`](https://bun.com/reference/bun/jsc) |
|
||||
|
||||
@@ -5,7 +5,7 @@ description: "File types and loaders supported by Bun's bundler and runtime"
|
||||
|
||||
The Bun bundler implements a set of default loaders out of the box. As a rule of thumb, the bundler and the runtime both support the same set of file types out of the box.
|
||||
|
||||
`.js` `.cjs` `.mjs` `.mts` `.cts` `.ts` `.tsx` `.jsx` `.css` `.json` `.jsonc` `.toml` `.yaml` `.yml` `.txt` `.wasm` `.node` `.html` `.sh`
|
||||
`.js` `.cjs` `.mjs` `.mts` `.cts` `.ts` `.tsx` `.jsx` `.css` `.json` `.jsonc` `.json5` `.toml` `.yaml` `.yml` `.txt` `.wasm` `.node` `.html` `.sh`
|
||||
|
||||
Bun uses the file extension to determine which built-in _loader_ should be used to parse the file. Every loader has a name, such as `js`, `tsx`, or `json`. These names are used when building [plugins](/bundler/plugins) that extend Bun with custom loaders.
|
||||
|
||||
@@ -197,6 +197,53 @@ export default {
|
||||
|
||||
</CodeGroup>
|
||||
|
||||
### `json5`
|
||||
|
||||
**JSON5 loader**. Default for `.json5`.
|
||||
|
||||
JSON5 files can be directly imported. Bun will parse them with its fast native JSON5 parser. JSON5 is a superset of JSON that supports comments, trailing commas, unquoted keys, single-quoted strings, and more.
|
||||
|
||||
```ts
|
||||
import config from "./config.json5";
|
||||
console.log(config);
|
||||
|
||||
// via import attribute:
|
||||
import data from "./data.txt" with { type: "json5" };
|
||||
```
|
||||
|
||||
During bundling, the parsed JSON5 is inlined into the bundle as a JavaScript object.
|
||||
|
||||
```ts
|
||||
var config = {
|
||||
name: "my-app",
|
||||
version: "1.0.0",
|
||||
// ...other fields
|
||||
};
|
||||
```
|
||||
|
||||
If a `.json5` file is passed as an entrypoint, it will be converted to a `.js` module that `export default`s the parsed object.
|
||||
|
||||
<CodeGroup>
|
||||
|
||||
```json5 Input
|
||||
{
|
||||
// Configuration
|
||||
name: "John Doe",
|
||||
age: 35,
|
||||
email: "johndoe@example.com",
|
||||
}
|
||||
```
|
||||
|
||||
```ts Output
|
||||
export default {
|
||||
name: "John Doe",
|
||||
age: 35,
|
||||
email: "johndoe@example.com",
|
||||
};
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
|
||||
### `text`
|
||||
|
||||
**Text loader**. Default for `.txt`.
|
||||
|
||||
271
docs/runtime/json5.mdx
Normal file
271
docs/runtime/json5.mdx
Normal file
@@ -0,0 +1,271 @@
|
||||
---
|
||||
title: JSON5
|
||||
description: Use Bun's built-in support for JSON5 files through both runtime APIs and bundler integration
|
||||
---
|
||||
|
||||
In Bun, JSON5 is a first-class citizen alongside JSON, TOML, and YAML. You can:
|
||||
|
||||
- Parse and stringify JSON5 with `Bun.JSON5.parse` and `Bun.JSON5.stringify`
|
||||
- `import` & `require` JSON5 files as modules at runtime (including hot reloading & watch mode support)
|
||||
- `import` & `require` JSON5 files in frontend apps via Bun's bundler
|
||||
|
||||
---
|
||||
|
||||
## Conformance
|
||||
|
||||
Bun's JSON5 parser passes 100% of the [official JSON5 test suite](https://github.com/json5/json5-tests). The parser is written in Zig for optimal performance. You can view our [translated test suite](https://github.com/oven-sh/bun/blob/main/test/js/bun/json5/json5-test-suite.test.ts) to see every test case.
|
||||
|
||||
---
|
||||
|
||||
## Runtime API
|
||||
|
||||
### `Bun.JSON5.parse()`
|
||||
|
||||
Parse a JSON5 string into a JavaScript value.
|
||||
|
||||
```ts
|
||||
import { JSON5 } from "bun";
|
||||
|
||||
const data = JSON5.parse(`{
|
||||
// JSON5 supports comments
|
||||
name: 'my-app',
|
||||
version: '1.0.0',
|
||||
debug: true,
|
||||
|
||||
// trailing commas are allowed
|
||||
tags: ['web', 'api',],
|
||||
}`);
|
||||
|
||||
console.log(data);
|
||||
// {
|
||||
// name: "my-app",
|
||||
// version: "1.0.0",
|
||||
// debug: true,
|
||||
// tags: ["web", "api"]
|
||||
// }
|
||||
```
|
||||
|
||||
#### Supported JSON5 Features
|
||||
|
||||
JSON5 is a superset of JSON based on ECMAScript 5.1 syntax. It supports:
|
||||
|
||||
- **Comments**: single-line (`//`) and multi-line (`/* */`)
|
||||
- **Trailing commas**: in objects and arrays
|
||||
- **Unquoted keys**: valid ECMAScript 5.1 identifiers can be used as keys
|
||||
- **Single-quoted strings**: in addition to double-quoted strings
|
||||
- **Multi-line strings**: using backslash line continuations
|
||||
- **Hex numbers**: `0xFF`
|
||||
- **Leading & trailing decimal points**: `.5` and `5.`
|
||||
- **Infinity and NaN**: positive and negative
|
||||
- **Explicit plus sign**: `+42`
|
||||
|
||||
```ts
|
||||
const data = JSON5.parse(`{
|
||||
// Unquoted keys
|
||||
unquoted: 'keys work',
|
||||
|
||||
// Single and double quotes
|
||||
single: 'single-quoted',
|
||||
double: "double-quoted",
|
||||
|
||||
// Trailing commas
|
||||
trailing: 'comma',
|
||||
|
||||
// Special numbers
|
||||
hex: 0xDEADbeef,
|
||||
half: .5,
|
||||
to: Infinity,
|
||||
nan: NaN,
|
||||
|
||||
// Multi-line strings
|
||||
multiline: 'line 1 \
|
||||
line 2',
|
||||
}`);
|
||||
```
|
||||
|
||||
#### Error Handling
|
||||
|
||||
`Bun.JSON5.parse()` throws a `SyntaxError` if the input is invalid JSON5:
|
||||
|
||||
```ts
|
||||
try {
|
||||
JSON5.parse("{invalid}");
|
||||
} catch (error) {
|
||||
console.error("Failed to parse JSON5:", error.message);
|
||||
}
|
||||
```
|
||||
|
||||
### `Bun.JSON5.stringify()`
|
||||
|
||||
Stringify a JavaScript value to a JSON5 string.
|
||||
|
||||
```ts
|
||||
import { JSON5 } from "bun";
|
||||
|
||||
const str = JSON5.stringify({ name: "my-app", version: "1.0.0" });
|
||||
console.log(str);
|
||||
// {name:'my-app',version:'1.0.0'}
|
||||
```
|
||||
|
||||
#### Pretty Printing
|
||||
|
||||
Pass a `space` argument to format the output with indentation:
|
||||
|
||||
```ts
|
||||
const pretty = JSON5.stringify(
|
||||
{
|
||||
name: "my-app",
|
||||
debug: true,
|
||||
tags: ["web", "api"],
|
||||
},
|
||||
null,
|
||||
2,
|
||||
);
|
||||
|
||||
console.log(pretty);
|
||||
// {
|
||||
// name: 'my-app',
|
||||
// debug: true,
|
||||
// tags: [
|
||||
// 'web',
|
||||
// 'api',
|
||||
// ],
|
||||
// }
|
||||
```
|
||||
|
||||
The `space` argument can be a number (number of spaces) or a string (used as the indent character):
|
||||
|
||||
```ts
|
||||
// Tab indentation
|
||||
JSON5.stringify(data, null, "\t");
|
||||
```
|
||||
|
||||
#### Special Values
|
||||
|
||||
Unlike `JSON.stringify`, `JSON5.stringify` preserves special numeric values:
|
||||
|
||||
```ts
|
||||
JSON5.stringify({ inf: Infinity, ninf: -Infinity, nan: NaN });
|
||||
// {inf:Infinity,ninf:-Infinity,nan:NaN}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Module Import
|
||||
|
||||
### ES Modules
|
||||
|
||||
You can import JSON5 files directly as ES modules:
|
||||
|
||||
```json5 config.json5
|
||||
{
|
||||
// Database configuration
|
||||
database: {
|
||||
host: "localhost",
|
||||
port: 5432,
|
||||
name: "myapp",
|
||||
},
|
||||
|
||||
features: {
|
||||
auth: true,
|
||||
rateLimit: true,
|
||||
analytics: false,
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
#### Default Import
|
||||
|
||||
```ts app.ts icon="/icons/typescript.svg"
|
||||
import config from "./config.json5";
|
||||
|
||||
console.log(config.database.host); // "localhost"
|
||||
console.log(config.features.auth); // true
|
||||
```
|
||||
|
||||
#### Named Imports
|
||||
|
||||
You can destructure top-level properties as named imports:
|
||||
|
||||
```ts app.ts icon="/icons/typescript.svg"
|
||||
import { database, features } from "./config.json5";
|
||||
|
||||
console.log(database.host); // "localhost"
|
||||
console.log(features.rateLimit); // true
|
||||
```
|
||||
|
||||
### CommonJS
|
||||
|
||||
JSON5 files can also be required in CommonJS:
|
||||
|
||||
```ts app.ts icon="/icons/typescript.svg"
|
||||
const config = require("./config.json5");
|
||||
console.log(config.database.name); // "myapp"
|
||||
|
||||
// Destructuring also works
|
||||
const { database, features } = require("./config.json5");
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Hot Reloading with JSON5
|
||||
|
||||
When you run your application with `bun --hot`, changes to JSON5 files are automatically detected and reloaded:
|
||||
|
||||
```json5 config.json5
|
||||
{
|
||||
server: {
|
||||
port: 3000,
|
||||
host: "localhost",
|
||||
},
|
||||
features: {
|
||||
debug: true,
|
||||
verbose: false,
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
```ts server.ts icon="/icons/typescript.svg"
|
||||
import { server, features } from "./config.json5";
|
||||
|
||||
Bun.serve({
|
||||
port: server.port,
|
||||
hostname: server.host,
|
||||
fetch(req) {
|
||||
if (features.verbose) {
|
||||
console.log(`${req.method} ${req.url}`);
|
||||
}
|
||||
return new Response("Hello World");
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
Run with hot reloading:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
bun --hot server.ts
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Bundler Integration
|
||||
|
||||
When you import JSON5 files and bundle with Bun, the JSON5 is parsed at build time and included as a JavaScript module:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
bun build app.ts --outdir=dist
|
||||
```
|
||||
|
||||
This means:
|
||||
|
||||
- Zero runtime JSON5 parsing overhead in production
|
||||
- Smaller bundle sizes
|
||||
- Tree-shaking support for unused properties (named imports)
|
||||
|
||||
### Dynamic Imports
|
||||
|
||||
JSON5 files can be dynamically imported:
|
||||
|
||||
```ts
|
||||
const config = await import("./config.json5");
|
||||
```
|
||||
188
docs/runtime/jsonl.mdx
Normal file
188
docs/runtime/jsonl.mdx
Normal file
@@ -0,0 +1,188 @@
|
||||
---
|
||||
title: JSONL
|
||||
description: Parse newline-delimited JSON (JSONL) with Bun's built-in streaming parser
|
||||
---
|
||||
|
||||
Bun has built-in support for parsing [JSONL](https://jsonlines.org/) (newline-delimited JSON), where each line is a separate JSON value. The parser is implemented in C++ using JavaScriptCore's optimized JSON parser and supports streaming use cases.
|
||||
|
||||
```ts
|
||||
const results = Bun.JSONL.parse('{"name":"Alice"}\n{"name":"Bob"}\n');
|
||||
// [{ name: "Alice" }, { name: "Bob" }]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## `Bun.JSONL.parse()`
|
||||
|
||||
Parse a complete JSONL input and return an array of all parsed values.
|
||||
|
||||
```ts
|
||||
import { JSONL } from "bun";
|
||||
|
||||
const input = '{"id":1,"name":"Alice"}\n{"id":2,"name":"Bob"}\n{"id":3,"name":"Charlie"}\n';
|
||||
const records = JSONL.parse(input);
|
||||
console.log(records);
|
||||
// [
|
||||
// { id: 1, name: "Alice" },
|
||||
// { id: 2, name: "Bob" },
|
||||
// { id: 3, name: "Charlie" }
|
||||
// ]
|
||||
```
|
||||
|
||||
Input can be a string or a `Uint8Array`:
|
||||
|
||||
```ts
|
||||
const buffer = new TextEncoder().encode('{"a":1}\n{"b":2}\n');
|
||||
const results = Bun.JSONL.parse(buffer);
|
||||
// [{ a: 1 }, { b: 2 }]
|
||||
```
|
||||
|
||||
When passed a `Uint8Array`, a UTF-8 BOM at the start of the buffer is automatically skipped.
|
||||
|
||||
### Error handling
|
||||
|
||||
If the input contains invalid JSON, `Bun.JSONL.parse()` throws a `SyntaxError`:
|
||||
|
||||
```ts
|
||||
try {
|
||||
Bun.JSONL.parse('{"valid":true}\n{invalid}\n');
|
||||
} catch (error) {
|
||||
console.error(error); // SyntaxError: Failed to parse JSONL
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## `Bun.JSONL.parseChunk()`
|
||||
|
||||
For streaming scenarios, `parseChunk` parses as many complete values as possible from the input and reports how far it got. This is useful when receiving data incrementally (e.g., from a network stream) and you need to know where to resume parsing.
|
||||
|
||||
```ts
|
||||
const chunk = '{"id":1}\n{"id":2}\n{"id":3';
|
||||
|
||||
const result = Bun.JSONL.parseChunk(chunk);
|
||||
console.log(result.values); // [{ id: 1 }, { id: 2 }]
|
||||
console.log(result.read); // 17 — characters consumed
|
||||
console.log(result.done); // false — incomplete value remains
|
||||
console.log(result.error); // null — no parse error
|
||||
```
|
||||
|
||||
### Return value
|
||||
|
||||
`parseChunk` returns an object with four properties:
|
||||
|
||||
| Property | Type | Description |
|
||||
| -------- | --------------------- | ----------------------------------------------------------------------- |
|
||||
| `values` | `any[]` | Array of successfully parsed JSON values |
|
||||
| `read` | `number` | Number of bytes (for `Uint8Array`) or characters (for strings) consumed |
|
||||
| `done` | `boolean` | `true` if the entire input was consumed with no remaining data |
|
||||
| `error` | `SyntaxError \| null` | Parse error, or `null` if no error occurred |
|
||||
|
||||
### Streaming example
|
||||
|
||||
Use `read` to slice off consumed input and carry forward the remainder:
|
||||
|
||||
```ts
|
||||
let buffer = "";
|
||||
|
||||
async function processStream(stream: ReadableStream<string>) {
|
||||
for await (const chunk of stream) {
|
||||
buffer += chunk;
|
||||
const result = Bun.JSONL.parseChunk(buffer);
|
||||
|
||||
for (const value of result.values) {
|
||||
handleRecord(value);
|
||||
}
|
||||
|
||||
// Keep only the unconsumed portion
|
||||
buffer = buffer.slice(result.read);
|
||||
}
|
||||
|
||||
// Handle any remaining data
|
||||
if (buffer.length > 0) {
|
||||
const final = Bun.JSONL.parseChunk(buffer);
|
||||
for (const value of final.values) {
|
||||
handleRecord(value);
|
||||
}
|
||||
if (final.error) {
|
||||
console.error("Parse error in final chunk:", final.error.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Byte offsets with `Uint8Array`
|
||||
|
||||
When the input is a `Uint8Array`, you can pass optional `start` and `end` byte offsets:
|
||||
|
||||
```ts
|
||||
const buf = new TextEncoder().encode('{"a":1}\n{"b":2}\n{"c":3}\n');
|
||||
|
||||
// Parse starting from byte 8
|
||||
const result = Bun.JSONL.parseChunk(buf, 8);
|
||||
console.log(result.values); // [{ b: 2 }, { c: 3 }]
|
||||
console.log(result.read); // 24
|
||||
|
||||
// Parse a specific range
|
||||
const partial = Bun.JSONL.parseChunk(buf, 0, 8);
|
||||
console.log(partial.values); // [{ a: 1 }]
|
||||
```
|
||||
|
||||
The `read` value is always a byte offset into the original buffer, making it easy to use with `TypedArray.subarray()` for zero-copy streaming:
|
||||
|
||||
```ts
|
||||
let buf = new Uint8Array(0);
|
||||
|
||||
async function processBinaryStream(stream: ReadableStream<Uint8Array>) {
|
||||
for await (const chunk of stream) {
|
||||
// Append chunk to buffer
|
||||
const newBuf = new Uint8Array(buf.length + chunk.length);
|
||||
newBuf.set(buf);
|
||||
newBuf.set(chunk, buf.length);
|
||||
buf = newBuf;
|
||||
|
||||
const result = Bun.JSONL.parseChunk(buf);
|
||||
|
||||
for (const value of result.values) {
|
||||
handleRecord(value);
|
||||
}
|
||||
|
||||
// Keep unconsumed bytes
|
||||
buf = buf.slice(result.read);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Error recovery
|
||||
|
||||
Unlike `parse()`, `parseChunk()` does not throw on invalid JSON. Instead, it returns the error in the `error` property, along with any values that were successfully parsed before the error:
|
||||
|
||||
```ts
|
||||
const input = '{"a":1}\n{invalid}\n{"b":2}\n';
|
||||
const result = Bun.JSONL.parseChunk(input);
|
||||
|
||||
console.log(result.values); // [{ a: 1 }] — values parsed before the error
|
||||
console.log(result.error); // SyntaxError
|
||||
console.log(result.read); // 7 — position up to last successful parse
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Supported value types
|
||||
|
||||
Each line can be any valid JSON value, not just objects:
|
||||
|
||||
```ts
|
||||
const input = '42\n"hello"\ntrue\nnull\n[1,2,3]\n{"key":"value"}\n';
|
||||
const values = Bun.JSONL.parse(input);
|
||||
// [42, "hello", true, null, [1, 2, 3], { key: "value" }]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Performance notes
|
||||
|
||||
- **ASCII fast path**: Pure ASCII input is parsed directly without copying, using a zero-allocation `StringView`.
|
||||
- **UTF-8 support**: Non-ASCII `Uint8Array` input is decoded to UTF-16 using SIMD-accelerated conversion.
|
||||
- **BOM handling**: UTF-8 BOM (`0xEF 0xBB 0xBF`) at the start of a `Uint8Array` is automatically skipped.
|
||||
- **Pre-built object shape**: The result object from `parseChunk` uses a cached structure for fast property access.
|
||||
344
docs/runtime/markdown.mdx
Normal file
344
docs/runtime/markdown.mdx
Normal file
@@ -0,0 +1,344 @@
|
||||
---
|
||||
title: Markdown
|
||||
description: Parse and render Markdown with Bun's built-in Markdown API, supporting GFM extensions and custom rendering callbacks
|
||||
---
|
||||
|
||||
{% callout type="note" %}
|
||||
**Unstable API** — This API is under active development and may change in future versions of Bun.
|
||||
{% /callout %}
|
||||
|
||||
Bun includes a fast, built-in Markdown parser written in Zig. It supports GitHub Flavored Markdown (GFM) extensions and provides three APIs:
|
||||
|
||||
- `Bun.markdown.html()` — render Markdown to an HTML string
|
||||
- `Bun.markdown.render()` — render Markdown with custom callbacks for each element
|
||||
- `Bun.markdown.react()` — render Markdown to React JSX elements
|
||||
|
||||
---
|
||||
|
||||
## `Bun.markdown.html()`
|
||||
|
||||
Convert a Markdown string to HTML.
|
||||
|
||||
```ts
|
||||
const html = Bun.markdown.html("# Hello **world**");
|
||||
// "<h1>Hello <strong>world</strong></h1>\n"
|
||||
```
|
||||
|
||||
GFM extensions like tables, strikethrough, and task lists are enabled by default:
|
||||
|
||||
```ts
|
||||
const html = Bun.markdown.html(`
|
||||
| Feature | Status |
|
||||
|-------------|--------|
|
||||
| Tables | ~~done~~ |
|
||||
| Strikethrough| ~~done~~ |
|
||||
| Task lists | done |
|
||||
`);
|
||||
```
|
||||
|
||||
### Options
|
||||
|
||||
Pass an options object as the second argument to configure the parser:
|
||||
|
||||
```ts
|
||||
const html = Bun.markdown.html("some markdown", {
|
||||
tables: true, // GFM tables (default: true)
|
||||
strikethrough: true, // GFM strikethrough (default: true)
|
||||
tasklists: true, // GFM task lists (default: true)
|
||||
tagFilter: true, // GFM tag filter for disallowed HTML tags
|
||||
autolinks: true, // Autolink URLs, emails, and www. links
|
||||
});
|
||||
```
|
||||
|
||||
All available options:
|
||||
|
||||
| Option | Default | Description |
|
||||
| ---------------------- | ------- | ----------------------------------------------------------- |
|
||||
| `tables` | `false` | GFM tables |
|
||||
| `strikethrough` | `false` | GFM strikethrough (`~~text~~`) |
|
||||
| `tasklists` | `false` | GFM task lists (`- [x] item`) |
|
||||
| `autolinks` | `false` | Enable autolinks — see [Autolinks](#autolinks) |
|
||||
| `headings` | `false` | Heading IDs and autolinks — see [Heading IDs](#heading-ids) |
|
||||
| `hardSoftBreaks` | `false` | Treat soft line breaks as hard breaks |
|
||||
| `wikiLinks` | `false` | Enable `[[wiki links]]` |
|
||||
| `underline` | `false` | `__text__` renders as `<u>` instead of `<strong>` |
|
||||
| `latexMath` | `false` | Enable `$inline$` and `$$display$$` math |
|
||||
| `collapseWhitespace` | `false` | Collapse whitespace in text |
|
||||
| `permissiveAtxHeaders` | `false` | ATX headers without space after `#` |
|
||||
| `noIndentedCodeBlocks` | `false` | Disable indented code blocks |
|
||||
| `noHtmlBlocks` | `false` | Disable HTML blocks |
|
||||
| `noHtmlSpans` | `false` | Disable inline HTML |
|
||||
| `tagFilter` | `false` | GFM tag filter for disallowed HTML tags |
|
||||
|
||||
#### Autolinks
|
||||
|
||||
Pass `true` to enable all autolink types, or an object for granular control:
|
||||
|
||||
```ts
|
||||
// Enable all autolinks (URL, WWW, email)
|
||||
Bun.markdown.html("Visit www.example.com", { autolinks: true });
|
||||
|
||||
// Enable only specific types
|
||||
Bun.markdown.html("Visit www.example.com", {
|
||||
autolinks: { url: true, www: true },
|
||||
});
|
||||
```
|
||||
|
||||
#### Heading IDs
|
||||
|
||||
Pass `true` to enable both heading IDs and autolink headings, or an object for granular control:
|
||||
|
||||
```ts
|
||||
// Enable heading IDs and autolink headings
|
||||
Bun.markdown.html("## Hello World", { headings: true });
|
||||
// '<h2 id="hello-world"><a href="#hello-world">Hello World</a></h2>\n'
|
||||
|
||||
// Enable only heading IDs (no autolink)
|
||||
Bun.markdown.html("## Hello World", { headings: { ids: true } });
|
||||
// '<h2 id="hello-world">Hello World</h2>\n'
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## `Bun.markdown.render()`
|
||||
|
||||
Parse Markdown and render it using custom JavaScript callbacks. This gives you full control over the output format — you can generate HTML with custom classes, React elements, ANSI terminal output, or any other string format.
|
||||
|
||||
```ts
|
||||
const result = Bun.markdown.render("# Hello **world**", {
|
||||
heading: (children, { level }) => `<h${level} class="title">${children}</h${level}>`,
|
||||
strong: children => `<b>${children}</b>`,
|
||||
paragraph: children => `<p>${children}</p>`,
|
||||
});
|
||||
// '<h1 class="title">Hello <b>world</b></h1>'
|
||||
```
|
||||
|
||||
### Callback signature
|
||||
|
||||
Each callback receives:
|
||||
|
||||
1. **`children`** — the accumulated content of the element as a string
|
||||
2. **`meta`** (optional) — an object with element-specific metadata
|
||||
|
||||
Return a string to replace the element's rendering. Return `null` or `undefined` to omit the element from the output entirely. If no callback is registered for an element, its children pass through unchanged.
|
||||
|
||||
### Block callbacks
|
||||
|
||||
| Callback | Meta | Description |
|
||||
| ------------ | ------------------------------------------- | ---------------------------------------------------------------------------------------- |
|
||||
| `heading` | `{ level: number, id?: string }` | Heading level 1–6. `id` is set when `headings: { ids: true }` is enabled |
|
||||
| `paragraph` | — | Paragraph block |
|
||||
| `blockquote` | — | Blockquote block |
|
||||
| `code` | `{ language?: string }` | Fenced or indented code block. `language` is the info-string when specified on the fence |
|
||||
| `list` | `{ ordered: boolean, start?: number }` | Ordered or unordered list. `start` is the start number for ordered lists |
|
||||
| `listItem` | `{ checked?: boolean }` | List item. `checked` is set for task list items (`- [x]` / `- [ ]`) |
|
||||
| `hr` | — | Horizontal rule |
|
||||
| `table` | — | Table block |
|
||||
| `thead` | — | Table head |
|
||||
| `tbody` | — | Table body |
|
||||
| `tr` | — | Table row |
|
||||
| `th` | `{ align?: "left" \| "center" \| "right" }` | Table header cell. `align` is set when alignment is specified |
|
||||
| `td` | `{ align?: "left" \| "center" \| "right" }` | Table data cell. `align` is set when alignment is specified |
|
||||
| `html` | — | Raw HTML content |
|
||||
|
||||
### Inline callbacks
|
||||
|
||||
| Callback | Meta | Description |
|
||||
| --------------- | ---------------------------------- | ---------------------------- |
|
||||
| `strong` | — | Strong emphasis (`**text**`) |
|
||||
| `emphasis` | — | Emphasis (`*text*`) |
|
||||
| `link` | `{ href: string, title?: string }` | Link |
|
||||
| `image` | `{ src: string, title?: string }` | Image |
|
||||
| `codespan` | — | Inline code (`` `code` ``) |
|
||||
| `strikethrough` | — | Strikethrough (`~~text~~`) |
|
||||
| `text` | — | Plain text content |
|
||||
|
||||
### Examples
|
||||
|
||||
#### Custom HTML with classes
|
||||
|
||||
```ts
|
||||
const html = Bun.markdown.render("# Title\n\nHello **world**", {
|
||||
heading: (children, { level }) => `<h${level} class="heading heading-${level}">${children}</h${level}>`,
|
||||
paragraph: children => `<p class="body">${children}</p>`,
|
||||
strong: children => `<strong class="bold">${children}</strong>`,
|
||||
});
|
||||
```
|
||||
|
||||
#### Stripping all formatting
|
||||
|
||||
```ts
|
||||
const plaintext = Bun.markdown.render("# Hello **world**", {
|
||||
heading: children => children,
|
||||
paragraph: children => children,
|
||||
strong: children => children,
|
||||
emphasis: children => children,
|
||||
link: children => children,
|
||||
image: () => "",
|
||||
code: children => children,
|
||||
codespan: children => children,
|
||||
});
|
||||
// "Hello world"
|
||||
```
|
||||
|
||||
#### Omitting elements
|
||||
|
||||
Return `null` or `undefined` to remove an element from the output:
|
||||
|
||||
```ts
|
||||
const result = Bun.markdown.render("# Title\n\n\n\nHello", {
|
||||
image: () => null, // Remove all images
|
||||
heading: children => children,
|
||||
paragraph: children => children + "\n",
|
||||
});
|
||||
// "Title\nHello\n"
|
||||
```
|
||||
|
||||
#### ANSI terminal output
|
||||
|
||||
```ts
|
||||
const ansi = Bun.markdown.render("# Hello\n\nThis is **bold** and *italic*", {
|
||||
heading: (children, { level }) => `\x1b[1;4m${children}\x1b[0m\n`,
|
||||
paragraph: children => children + "\n",
|
||||
strong: children => `\x1b[1m${children}\x1b[22m`,
|
||||
emphasis: children => `\x1b[3m${children}\x1b[23m`,
|
||||
});
|
||||
```
|
||||
|
||||
#### Code block syntax highlighting
|
||||
|
||||
````ts
|
||||
const result = Bun.markdown.render("```js\nconsole.log('hi')\n```", {
|
||||
code: (children, meta) => {
|
||||
const lang = meta?.language ?? "";
|
||||
return `<pre><code class="language-${lang}">${children}</code></pre>`;
|
||||
},
|
||||
});
|
||||
````
|
||||
|
||||
### Parser options
|
||||
|
||||
Parser options are passed as a separate third argument:
|
||||
|
||||
```ts
|
||||
const result = Bun.markdown.render(
|
||||
"Visit www.example.com",
|
||||
{
|
||||
link: (children, { href }) => `[${children}](${href})`,
|
||||
paragraph: children => children,
|
||||
},
|
||||
{ autolinks: true },
|
||||
);
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## `Bun.markdown.react()`
|
||||
|
||||
Render Markdown directly to React elements. Returns a `<Fragment>` that you can use as a component return value.
|
||||
|
||||
```tsx
|
||||
function Markdown({ text }: { text: string }) {
|
||||
return Bun.markdown.react(text);
|
||||
}
|
||||
```
|
||||
|
||||
### Server-side rendering
|
||||
|
||||
Works with `renderToString()` and React Server Components:
|
||||
|
||||
```tsx
|
||||
import { renderToString } from "react-dom/server";
|
||||
|
||||
const html = renderToString(Bun.markdown.react("# Hello **world**"));
|
||||
// "<h1>Hello <strong>world</strong></h1>"
|
||||
```
|
||||
|
||||
### Component overrides
|
||||
|
||||
Replace any HTML element with a custom React component by passing it in the second argument, keyed by tag name:
|
||||
|
||||
```tsx
|
||||
function Code({ language, children }) {
|
||||
return (
|
||||
<pre data-language={language}>
|
||||
<code>{children}</code>
|
||||
</pre>
|
||||
);
|
||||
}
|
||||
|
||||
function Link({ href, title, children }) {
|
||||
return (
|
||||
<a href={href} title={title} target="_blank" rel="noopener noreferrer">
|
||||
{children}
|
||||
</a>
|
||||
);
|
||||
}
|
||||
|
||||
function Heading({ id, children }) {
|
||||
return (
|
||||
<h2 id={id}>
|
||||
<a href={`#${id}`}>{children}</a>
|
||||
</h2>
|
||||
);
|
||||
}
|
||||
|
||||
const el = Bun.markdown.react(
|
||||
content,
|
||||
{
|
||||
pre: Code,
|
||||
a: Link,
|
||||
h2: Heading,
|
||||
},
|
||||
{ headings: { ids: true } },
|
||||
);
|
||||
```
|
||||
|
||||
#### Available overrides
|
||||
|
||||
Every HTML tag produced by the parser can be overridden:
|
||||
|
||||
| Option | Props | Description |
|
||||
| ------------ | ---------------------------- | --------------------------------------------------------------- |
|
||||
| `h1`–`h6` | `{ id?, children }` | Headings. `id` is set when `headings: { ids: true }` is enabled |
|
||||
| `p` | `{ children }` | Paragraph |
|
||||
| `blockquote` | `{ children }` | Blockquote |
|
||||
| `pre` | `{ language?, children }` | Code block. `language` is the info string (e.g. `"js"`) |
|
||||
| `hr` | `{}` | Horizontal rule (no children) |
|
||||
| `ul` | `{ children }` | Unordered list |
|
||||
| `ol` | `{ start, children }` | Ordered list. `start` is the first item number |
|
||||
| `li` | `{ checked?, children }` | List item. `checked` is set for task list items |
|
||||
| `table` | `{ children }` | Table |
|
||||
| `thead` | `{ children }` | Table head |
|
||||
| `tbody` | `{ children }` | Table body |
|
||||
| `tr` | `{ children }` | Table row |
|
||||
| `th` | `{ align?, children }` | Table header cell |
|
||||
| `td` | `{ align?, children }` | Table data cell |
|
||||
| `em` | `{ children }` | Emphasis (`*text*`) |
|
||||
| `strong` | `{ children }` | Strong (`**text**`) |
|
||||
| `a` | `{ href, title?, children }` | Link |
|
||||
| `img` | `{ src, alt?, title? }` | Image (no children) |
|
||||
| `code` | `{ children }` | Inline code |
|
||||
| `del` | `{ children }` | Strikethrough (`~~text~~`) |
|
||||
| `br` | `{}` | Hard line break (no children) |
|
||||
|
||||
### React 18 and older
|
||||
|
||||
By default, elements use `Symbol.for('react.transitional.element')` as the `$$typeof` symbol. For React 18 and older, pass `reactVersion: 18` in the options (third argument):
|
||||
|
||||
```tsx
|
||||
function Markdown({ text }: { text: string }) {
|
||||
return Bun.markdown.react(text, undefined, { reactVersion: 18 });
|
||||
}
|
||||
```
|
||||
|
||||
### Parser options
|
||||
|
||||
All [parser options](#options) are passed as the third argument:
|
||||
|
||||
```tsx
|
||||
const el = Bun.markdown.react("## Hello World", undefined, {
|
||||
headings: { ids: true },
|
||||
autolinks: true,
|
||||
});
|
||||
```
|
||||
@@ -165,7 +165,7 @@ This page is updated regularly to reflect compatibility status of the latest ver
|
||||
|
||||
### [`node:inspector`](https://nodejs.org/api/inspector.html)
|
||||
|
||||
🔴 Not implemented.
|
||||
🟡 Partially implemented. `Profiler` API is supported (`Profiler.enable`, `Profiler.disable`, `Profiler.start`, `Profiler.stop`, `Profiler.setSamplingInterval`). Other inspector APIs are not yet implemented.
|
||||
|
||||
### [`node:repl`](https://nodejs.org/api/repl.html)
|
||||
|
||||
|
||||
@@ -135,6 +135,18 @@ await s3file.write(JSON.stringify({ name: "John", age: 30 }), {
|
||||
type: "application/json",
|
||||
});
|
||||
|
||||
// Write with content encoding (e.g. for pre-compressed data)
|
||||
await s3file.write(compressedData, {
|
||||
type: "application/json",
|
||||
contentEncoding: "gzip",
|
||||
});
|
||||
|
||||
// Write with content disposition
|
||||
await s3file.write(pdfData, {
|
||||
type: "application/pdf",
|
||||
contentDisposition: 'attachment; filename="report.pdf"',
|
||||
});
|
||||
|
||||
// Write using a writer (streaming)
|
||||
const writer = s3file.writer({ type: "application/json" });
|
||||
writer.write("Hello");
|
||||
@@ -188,7 +200,13 @@ const download = s3.presign("my-file.txt"); // GET, text/plain, expires in 24 ho
|
||||
const upload = s3.presign("my-file", {
|
||||
expiresIn: 3600, // 1 hour
|
||||
method: "PUT",
|
||||
type: "application/json", // No extension for inferring, so we can specify the content type to be JSON
|
||||
type: "application/json", // Sets response-content-type in the presigned URL
|
||||
});
|
||||
|
||||
// Presign with content disposition (e.g. force download with a specific filename)
|
||||
const downloadUrl = s3.presign("report.pdf", {
|
||||
expiresIn: 3600,
|
||||
contentDisposition: 'attachment; filename="quarterly-report.pdf"',
|
||||
});
|
||||
|
||||
// You can call .presign() if on a file reference, but avoid doing so
|
||||
|
||||
@@ -460,7 +460,7 @@ console.log(result); // Blob(13) { size: 13, type: "text/plain" }
|
||||
For cross-platform compatibility, Bun Shell implements a set of builtin commands, in addition to reading commands from the PATH environment variable.
|
||||
|
||||
- `cd`: change the working directory
|
||||
- `ls`: list files in a directory
|
||||
- `ls`: list files in a directory (supports `-l` for long listing format)
|
||||
- `rm`: remove files and directories
|
||||
- `echo`: print text
|
||||
- `pwd`: print the working directory
|
||||
|
||||
@@ -880,6 +880,94 @@ npm/strip-ansi 212,992 chars long-ansi 1.36 ms/iter 1.38 ms
|
||||
|
||||
---
|
||||
|
||||
## `Bun.wrapAnsi()`
|
||||
|
||||
<Note>Drop-in replacement for `wrap-ansi` npm package</Note>
|
||||
|
||||
`Bun.wrapAnsi(input: string, columns: number, options?: WrapAnsiOptions): string`
|
||||
|
||||
Wrap text to a specified column width while preserving ANSI escape codes, hyperlinks, and handling Unicode/emoji width correctly. This is a native, high-performance alternative to the popular [`wrap-ansi`](https://www.npmjs.com/package/wrap-ansi) npm package.
|
||||
|
||||
```ts
|
||||
// Basic wrapping at 20 columns
|
||||
Bun.wrapAnsi("The quick brown fox jumps over the lazy dog", 20);
|
||||
// => "The quick brown fox\njumps over the lazy\ndog"
|
||||
|
||||
// Preserves ANSI escape codes
|
||||
Bun.wrapAnsi("\u001b[31mThe quick brown fox jumps over the lazy dog\u001b[0m", 20);
|
||||
// => "\u001b[31mThe quick brown fox\njumps over the lazy\ndog\u001b[0m"
|
||||
```
|
||||
|
||||
### Options
|
||||
|
||||
```ts
|
||||
Bun.wrapAnsi("Hello World", 5, {
|
||||
hard: true, // Break words that exceed column width (default: false)
|
||||
wordWrap: true, // Wrap at word boundaries (default: true)
|
||||
trim: true, // Trim leading/trailing whitespace per line (default: true)
|
||||
ambiguousIsNarrow: true, // Treat ambiguous-width characters as narrow (default: true)
|
||||
});
|
||||
```
|
||||
|
||||
| Option | Default | Description |
|
||||
| ------------------- | ------- | --------------------------------------------------------------------------------------------------------------- |
|
||||
| `hard` | `false` | If `true`, break words in the middle if they exceed the column width. |
|
||||
| `wordWrap` | `true` | If `true`, wrap at word boundaries. If `false`, only break at explicit newlines. |
|
||||
| `trim` | `true` | If `true`, trim leading and trailing whitespace from each line. |
|
||||
| `ambiguousIsNarrow` | `true` | If `true`, treat ambiguous-width Unicode characters as 1 column wide. If `false`, treat them as 2 columns wide. |
|
||||
|
||||
TypeScript definition:
|
||||
|
||||
```ts expandable
|
||||
namespace Bun {
|
||||
export function wrapAnsi(
|
||||
/**
|
||||
* The string to wrap
|
||||
*/
|
||||
input: string,
|
||||
/**
|
||||
* The maximum column width
|
||||
*/
|
||||
columns: number,
|
||||
/**
|
||||
* Wrapping options
|
||||
*/
|
||||
options?: {
|
||||
/**
|
||||
* If `true`, break words in the middle if they don't fit on a line.
|
||||
* If `false`, only break at word boundaries.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
hard?: boolean;
|
||||
/**
|
||||
* If `true`, wrap at word boundaries when possible.
|
||||
* If `false`, don't perform word wrapping (only wrap at explicit newlines).
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
wordWrap?: boolean;
|
||||
/**
|
||||
* If `true`, trim leading and trailing whitespace from each line.
|
||||
* If `false`, preserve whitespace.
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
trim?: boolean;
|
||||
/**
|
||||
* When it's ambiguous and `true`, count ambiguous width characters as 1 character wide.
|
||||
* If `false`, count them as 2 characters wide.
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
ambiguousIsNarrow?: boolean;
|
||||
},
|
||||
): string;
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## `serialize` & `deserialize` in `bun:jsc`
|
||||
|
||||
To save a JavaScript value into an ArrayBuffer & back, use `serialize` and `deserialize` from the `"bun:jsc"` module.
|
||||
|
||||
@@ -50,7 +50,8 @@ bun build <entry points>
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="--format" type="string" default="esm">
|
||||
Module format of the output bundle. One of <code>esm</code>, <code>cjs</code>, or <code>iife</code>
|
||||
Module format of the output bundle. One of <code>esm</code>, <code>cjs</code>, or <code>iife</code>. Defaults to{" "}
|
||||
<code>cjs</code> when <code>--bytecode</code> is used.
|
||||
</ParamField>
|
||||
|
||||
### File Naming
|
||||
|
||||
@@ -40,6 +40,18 @@ bun run <file or script>
|
||||
Run a script in all workspace packages (from the <code>workspaces</code> field in <code>package.json</code>)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="--parallel" type="boolean">
|
||||
Run multiple scripts or workspace scripts concurrently with prefixed output
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="--sequential" type="boolean">
|
||||
Run multiple scripts or workspace scripts one after another with prefixed output
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="--no-exit-on-error" type="boolean">
|
||||
When using <code>--parallel</code> or <code>--sequential</code>, continue running other scripts when one fails
|
||||
</ParamField>
|
||||
|
||||
### Runtime & Process Control
|
||||
|
||||
<ParamField path="--bun" type="boolean">
|
||||
|
||||
@@ -131,6 +131,7 @@
|
||||
stdenv = pkgs.clangStdenv;
|
||||
}) {
|
||||
inherit packages;
|
||||
hardeningDisable = [ "fortify" ];
|
||||
|
||||
shellHook = ''
|
||||
# Set up build environment
|
||||
|
||||
24
meta.json
Normal file
24
meta.json
Normal file
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"inputs": {
|
||||
"../../tmp/test-entry.js": {
|
||||
"bytes": 21,
|
||||
"imports": [
|
||||
],
|
||||
"format": "esm"
|
||||
}
|
||||
},
|
||||
"outputs": {
|
||||
"./test-entry.js": {
|
||||
"bytes": 49,
|
||||
"inputs": {
|
||||
"../../tmp/test-entry.js": {
|
||||
"bytesInOutput": 22
|
||||
}
|
||||
},
|
||||
"imports": [
|
||||
],
|
||||
"exports": [],
|
||||
"entryPoint": "../../tmp/test-entry.js"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -35,7 +35,7 @@ JSC_DEFINE_HOST_FUNCTION(jsFunctionWrite, (JSC::JSGlobalObject * globalObject,
|
||||
JSValue arg1 = callframe->argument(0);
|
||||
JSValue toWriteArg = callframe->argument(1);
|
||||
auto &vm = globalObject->vm();
|
||||
auto scope = DECLARE_CATCH_SCOPE(vm);
|
||||
auto scope = DECLARE_TOP_EXCEPTION_SCOPE(vm);
|
||||
|
||||
int32_t fd = STDOUT_FILENO;
|
||||
if (callframe->argumentCount() > 1) {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "bun",
|
||||
"version": "1.3.7",
|
||||
"version": "1.3.9",
|
||||
"workspaces": [
|
||||
"./packages/bun-types",
|
||||
"./packages/@types/bun"
|
||||
|
||||
@@ -71,8 +71,25 @@ async function buildRootModule(dryRun?: boolean) {
|
||||
js: "// Source code: https://github.com/oven-sh/bun/blob/main/packages/bun-release/scripts/npm-postinstall.ts",
|
||||
},
|
||||
});
|
||||
write(join(cwd, "bin", "bun.exe"), "");
|
||||
write(join(cwd, "bin", "bunx.exe"), "");
|
||||
// Create placeholder scripts that print an error message if postinstall hasn't run.
|
||||
// On Unix, these are executed as shell scripts despite the .exe extension.
|
||||
// Do NOT add a shebang (#!/bin/sh) here — npm's cmd-shim reads shebangs to generate
|
||||
// .ps1/.cmd wrappers BEFORE postinstall runs, and bakes the interpreter path in.
|
||||
// A #!/bin/sh shebang breaks Windows because the wrappers reference /bin/sh which
|
||||
// doesn't exist, even after postinstall replaces the placeholder with the real binary.
|
||||
const placeholderScript = `echo "Error: Bun's postinstall script was not run." >&2
|
||||
echo "" >&2
|
||||
echo "This occurs when using --ignore-scripts during installation, or when using a" >&2
|
||||
echo "package manager like pnpm that does not run postinstall scripts by default." >&2
|
||||
echo "" >&2
|
||||
echo "To fix this, run the postinstall script manually:" >&2
|
||||
echo " cd node_modules/bun && node install.js" >&2
|
||||
echo "" >&2
|
||||
echo "Or reinstall bun without the --ignore-scripts flag." >&2
|
||||
exit 1
|
||||
`;
|
||||
write(join(cwd, "bin", "bun.exe"), placeholderScript);
|
||||
write(join(cwd, "bin", "bunx.exe"), placeholderScript);
|
||||
write(
|
||||
join(cwd, "bin", "README.txt"),
|
||||
`The 'bun.exe' file is a placeholder for the binary file, which
|
||||
|
||||
693
packages/bun-types/bun.d.ts
vendored
693
packages/bun-types/bun.d.ts
vendored
@@ -610,6 +610,97 @@ declare module "bun" {
|
||||
*/
|
||||
function stripANSI(input: string): string;
|
||||
|
||||
interface WrapAnsiOptions {
|
||||
/**
|
||||
* If `true`, break words in the middle if they don't fit on a line.
|
||||
* If `false`, only break at word boundaries.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
hard?: boolean;
|
||||
|
||||
/**
|
||||
* If `true`, wrap at word boundaries when possible.
|
||||
* If `false`, don't perform word wrapping (only wrap at explicit newlines).
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
wordWrap?: boolean;
|
||||
|
||||
/**
|
||||
* If `true`, trim leading and trailing whitespace from each line.
|
||||
* If `false`, preserve whitespace.
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
trim?: boolean;
|
||||
|
||||
/**
|
||||
* When it's ambiguous and `true`, count ambiguous width characters as 1 character wide.
|
||||
* If `false`, count them as 2 characters wide.
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
ambiguousIsNarrow?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrap a string to fit within the specified column width, preserving ANSI escape codes.
|
||||
*
|
||||
* This function is designed to be compatible with the popular "wrap-ansi" NPM package.
|
||||
*
|
||||
* Features:
|
||||
* - Preserves ANSI escape codes (colors, styles) across line breaks
|
||||
* - Supports SGR codes (colors, bold, italic, etc.) and OSC 8 hyperlinks
|
||||
* - Respects Unicode display widths (full-width characters, emoji)
|
||||
* - Word wrapping at word boundaries (configurable)
|
||||
*
|
||||
* @category Utilities
|
||||
*
|
||||
* @param input The string to wrap
|
||||
* @param columns The maximum column width
|
||||
* @param options Wrapping options
|
||||
* @returns The wrapped string
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* import { wrapAnsi } from "bun";
|
||||
*
|
||||
* console.log(wrapAnsi("hello world", 5));
|
||||
* // Output:
|
||||
* // hello
|
||||
* // world
|
||||
*
|
||||
* // Preserves ANSI colors across line breaks
|
||||
* console.log(wrapAnsi("\u001b[31mhello world\u001b[0m", 5));
|
||||
* // Output:
|
||||
* // \u001b[31mhello\u001b[0m
|
||||
* // \u001b[31mworld\u001b[0m
|
||||
*
|
||||
* // Hard wrap long words
|
||||
* console.log(wrapAnsi("abcdefghij", 3, { hard: true }));
|
||||
* // Output:
|
||||
* // abc
|
||||
* // def
|
||||
* // ghi
|
||||
* // j
|
||||
* ```
|
||||
*/
|
||||
function wrapAnsi(
|
||||
/**
|
||||
* The string to wrap
|
||||
*/
|
||||
input: string,
|
||||
/**
|
||||
* The maximum column width
|
||||
*/
|
||||
columns: number,
|
||||
/**
|
||||
* Wrapping options
|
||||
*/
|
||||
options?: WrapAnsiOptions,
|
||||
): string;
|
||||
|
||||
/**
|
||||
* TOML related APIs
|
||||
*/
|
||||
@@ -652,6 +743,101 @@ declare module "bun" {
|
||||
export function parse(input: string): unknown;
|
||||
}
|
||||
|
||||
/**
|
||||
* JSONL (JSON Lines) related APIs.
|
||||
*
|
||||
* Each line in the input is expected to be a valid JSON value separated by newlines.
|
||||
*/
|
||||
namespace JSONL {
|
||||
/**
|
||||
* The result of `Bun.JSONL.parseChunk`.
|
||||
*/
|
||||
interface ParseChunkResult {
|
||||
/** The successfully parsed JSON values. */
|
||||
values: unknown[];
|
||||
/** How far into the input was consumed. When the input is a string, this is a character offset. When the input is a `TypedArray`, this is a byte offset. Use `input.slice(read)` or `input.subarray(read)` to get the unconsumed remainder. */
|
||||
read: number;
|
||||
/** `true` if all input was consumed successfully. `false` if the input ends with an incomplete value or a parse error occurred. */
|
||||
done: boolean;
|
||||
/** A `SyntaxError` if a parse error occurred, otherwise `null`. Values parsed before the error are still available in `values`. */
|
||||
error: SyntaxError | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a JSONL (JSON Lines) string into an array of JavaScript values.
|
||||
*
|
||||
* If a parse error occurs and no values were successfully parsed, throws
|
||||
* a `SyntaxError`. If values were parsed before the error, returns the
|
||||
* successfully parsed values without throwing.
|
||||
*
|
||||
* Incomplete trailing values (e.g. from a partial chunk) are silently
|
||||
* ignored and not included in the result.
|
||||
*
|
||||
* When a `TypedArray` is passed, the bytes are parsed directly without
|
||||
* copying if the content is ASCII.
|
||||
*
|
||||
* @param input The JSONL string or typed array to parse
|
||||
* @returns An array of parsed values
|
||||
* @throws {SyntaxError} If the input starts with invalid JSON and no values could be parsed
|
||||
*
|
||||
* @example
|
||||
* ```js
|
||||
* const items = Bun.JSONL.parse('{"a":1}\n{"b":2}\n');
|
||||
* // [{ a: 1 }, { b: 2 }]
|
||||
*
|
||||
* // From a Uint8Array (zero-copy for ASCII):
|
||||
* const buf = new TextEncoder().encode('{"a":1}\n{"b":2}\n');
|
||||
* const items = Bun.JSONL.parse(buf);
|
||||
* // [{ a: 1 }, { b: 2 }]
|
||||
*
|
||||
* // Partial results on error after valid values:
|
||||
* const partial = Bun.JSONL.parse('{"a":1}\n{bad}\n');
|
||||
* // [{ a: 1 }]
|
||||
*
|
||||
* // Throws when no valid values precede the error:
|
||||
* Bun.JSONL.parse('{bad}\n'); // throws SyntaxError
|
||||
* ```
|
||||
*/
|
||||
export function parse(input: string | NodeJS.TypedArray | DataView<ArrayBuffer> | ArrayBufferLike): unknown[];
|
||||
|
||||
/**
|
||||
* Parse a JSONL chunk, designed for streaming use.
|
||||
*
|
||||
* Never throws on parse errors. Instead, returns whatever values were
|
||||
* successfully parsed along with an `error` property containing the
|
||||
* `SyntaxError` (or `null` on success). Use `read` to determine how
|
||||
* much input was consumed and `done` to check if all input was parsed.
|
||||
*
|
||||
* When a `TypedArray` is passed, the bytes are parsed directly without
|
||||
* copying if the content is ASCII. Optional `start` and `end` parameters
|
||||
* allow slicing without copying, and `read` will be a byte offset into
|
||||
* the original typed array.
|
||||
*
|
||||
* @param input The JSONL string or typed array to parse
|
||||
* @param start Byte offset to start parsing from (typed array only, default: 0)
|
||||
* @param end Byte offset to stop parsing at (typed array only, default: input.byteLength)
|
||||
* @returns An object with `values`, `read`, `done`, and `error` properties
|
||||
*
|
||||
* @example
|
||||
* ```js
|
||||
* let buffer = new Uint8Array(0);
|
||||
* for await (const chunk of stream) {
|
||||
* buffer = Buffer.concat([buffer, chunk]);
|
||||
* const { values, read, error } = Bun.JSONL.parseChunk(buffer);
|
||||
* if (error) throw error;
|
||||
* for (const value of values) handle(value);
|
||||
* buffer = buffer.subarray(read);
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export function parseChunk(input: string): ParseChunkResult;
|
||||
export function parseChunk(
|
||||
input: NodeJS.TypedArray | DataView<ArrayBuffer> | ArrayBufferLike,
|
||||
start?: number,
|
||||
end?: number,
|
||||
): ParseChunkResult;
|
||||
}
|
||||
|
||||
/**
|
||||
* YAML related APIs
|
||||
*/
|
||||
@@ -719,6 +905,480 @@ declare module "bun" {
|
||||
export function stringify(input: unknown, replacer?: undefined | null, space?: string | number): string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Markdown related APIs.
|
||||
*
|
||||
* Provides fast markdown parsing and rendering with three output modes:
|
||||
* - `html()` — render to an HTML string
|
||||
* - `render()` — render with custom callbacks for each element
|
||||
* - `react()` — parse to React-compatible JSX elements
|
||||
*
|
||||
* Supports GFM extensions (tables, strikethrough, task lists, autolinks) and
|
||||
* component overrides to replace default HTML tags with custom components.
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* // Render markdown to HTML
|
||||
* const html = Bun.markdown.html("# Hello **world**");
|
||||
* // "<h1>Hello <strong>world</strong></h1>\n"
|
||||
*
|
||||
* // Render with custom callbacks
|
||||
* const ansi = Bun.markdown.render("# Hello **world**", {
|
||||
* heading: (children, { level }) => `\x1b[1m${children}\x1b[0m\n`,
|
||||
* strong: (children) => `\x1b[1m${children}\x1b[22m`,
|
||||
* paragraph: (children) => children + "\n",
|
||||
* });
|
||||
*
|
||||
* // Render as a React component
|
||||
* function Markdown({ text }: { text: string }) {
|
||||
* return Bun.markdown.react(text);
|
||||
* }
|
||||
*
|
||||
* // With component overrides
|
||||
* const element = Bun.markdown.react("# Hello", { h1: MyHeadingComponent });
|
||||
* ```
|
||||
*/
|
||||
namespace markdown {
|
||||
/**
|
||||
* Options for configuring the markdown parser.
|
||||
*
|
||||
* By default, GFM extensions (tables, strikethrough, task lists) are enabled.
|
||||
*/
|
||||
interface Options {
|
||||
/** Enable GFM tables. Default: `true`. */
|
||||
tables?: boolean;
|
||||
/** Enable GFM strikethrough (`~~text~~`). Default: `true`. */
|
||||
strikethrough?: boolean;
|
||||
/** Enable GFM task lists (`- [x] item`). Default: `true`. */
|
||||
tasklists?: boolean;
|
||||
/** Treat soft line breaks as hard line breaks. Default: `false`. */
|
||||
hardSoftBreaks?: boolean;
|
||||
/** Enable wiki-style links (`[[target]]` or `[[target|label]]`). Default: `false`. */
|
||||
wikiLinks?: boolean;
|
||||
/** Enable underline syntax (`__text__` renders as `<u>` instead of `<strong>`). Default: `false`. */
|
||||
underline?: boolean;
|
||||
/** Enable LaTeX math (`$inline$` and `$$display$$`). Default: `false`. */
|
||||
latexMath?: boolean;
|
||||
/** Collapse whitespace in text content. Default: `false`. */
|
||||
collapseWhitespace?: boolean;
|
||||
/** Allow ATX headers without a space after `#`. Default: `false`. */
|
||||
permissiveAtxHeaders?: boolean;
|
||||
/** Disable indented code blocks. Default: `false`. */
|
||||
noIndentedCodeBlocks?: boolean;
|
||||
/** Disable HTML blocks. Default: `false`. */
|
||||
noHtmlBlocks?: boolean;
|
||||
/** Disable inline HTML spans. Default: `false`. */
|
||||
noHtmlSpans?: boolean;
|
||||
/**
|
||||
* Enable the GFM tag filter, which replaces `<` with `<` for disallowed
|
||||
* HTML tags (e.g. `<script>`, `<style>`, `<iframe>`). Default: `false`.
|
||||
*/
|
||||
tagFilter?: boolean;
|
||||
/**
|
||||
* Enable autolinks. Pass `true` to enable all autolink types (URL, WWW, email),
|
||||
* or an object to enable individually.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Enable all autolinks
|
||||
* { autolinks: true }
|
||||
* // Enable only URL and email autolinks
|
||||
* { autolinks: { url: true, email: true } }
|
||||
* ```
|
||||
*/
|
||||
autolinks?: boolean | { url?: boolean; www?: boolean; email?: boolean };
|
||||
/**
|
||||
* Configure heading IDs and autolink headings. Pass `true` to enable both
|
||||
* heading IDs and autolink headings, or an object to configure individually.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Enable both heading IDs and autolink headings
|
||||
* { headings: true }
|
||||
* // Enable only heading IDs
|
||||
* { headings: { ids: true } }
|
||||
* ```
|
||||
*/
|
||||
headings?: boolean | { ids?: boolean; autolink?: boolean };
|
||||
}
|
||||
|
||||
/** A component that accepts props `P`: a function, class, or HTML tag name. */
|
||||
type Component<P = {}> = string | ((props: P) => any) | (new (props: P) => any);
|
||||
|
||||
interface ChildrenProps {
|
||||
children: import("./jsx.d.ts").JSX.Element[];
|
||||
}
|
||||
interface HeadingProps extends ChildrenProps {
|
||||
/** Heading ID slug. Set when `headings: { ids: true }` is enabled. */
|
||||
id?: string;
|
||||
}
|
||||
interface OrderedListProps extends ChildrenProps {
|
||||
/** The start number. */
|
||||
start: number;
|
||||
}
|
||||
interface ListItemProps extends ChildrenProps {
|
||||
/** Task list checked state. Set for `- [x]` / `- [ ]` items. */
|
||||
checked?: boolean;
|
||||
}
|
||||
interface CodeBlockProps extends ChildrenProps {
|
||||
/** The info-string language (e.g. `"js"`). */
|
||||
language?: string;
|
||||
}
|
||||
interface CellProps extends ChildrenProps {
|
||||
/** Column alignment. */
|
||||
align?: "left" | "center" | "right";
|
||||
}
|
||||
interface LinkProps extends ChildrenProps {
|
||||
/** Link URL. */
|
||||
href: string;
|
||||
/** Link title attribute. */
|
||||
title?: string;
|
||||
}
|
||||
interface ImageProps {
|
||||
/** Image URL. */
|
||||
src: string;
|
||||
/** Alt text. */
|
||||
alt?: string;
|
||||
/** Image title attribute. */
|
||||
title?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Component overrides for `react()`.
|
||||
*
|
||||
* Replace default HTML tags with custom React components. Each override
|
||||
* receives the same props the default element would get.
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* function Code({ language, children }: { language?: string; children: React.ReactNode }) {
|
||||
* return <pre data-language={language}><code>{children}</code></pre>;
|
||||
* }
|
||||
* Bun.markdown.react(text, { pre: Code });
|
||||
* ```
|
||||
*/
|
||||
interface ComponentOverrides {
|
||||
h1?: Component<HeadingProps>;
|
||||
h2?: Component<HeadingProps>;
|
||||
h3?: Component<HeadingProps>;
|
||||
h4?: Component<HeadingProps>;
|
||||
h5?: Component<HeadingProps>;
|
||||
h6?: Component<HeadingProps>;
|
||||
p?: Component<ChildrenProps>;
|
||||
blockquote?: Component<ChildrenProps>;
|
||||
ul?: Component<ChildrenProps>;
|
||||
ol?: Component<OrderedListProps>;
|
||||
li?: Component<ListItemProps>;
|
||||
pre?: Component<CodeBlockProps>;
|
||||
hr?: Component<{}>;
|
||||
html?: Component<ChildrenProps>;
|
||||
table?: Component<ChildrenProps>;
|
||||
thead?: Component<ChildrenProps>;
|
||||
tbody?: Component<ChildrenProps>;
|
||||
tr?: Component<ChildrenProps>;
|
||||
th?: Component<CellProps>;
|
||||
td?: Component<CellProps>;
|
||||
em?: Component<ChildrenProps>;
|
||||
strong?: Component<ChildrenProps>;
|
||||
a?: Component<LinkProps>;
|
||||
img?: Component<ImageProps>;
|
||||
code?: Component<ChildrenProps>;
|
||||
del?: Component<ChildrenProps>;
|
||||
math?: Component<ChildrenProps>;
|
||||
u?: Component<ChildrenProps>;
|
||||
br?: Component<{}>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Callbacks for `render()`. Each callback receives the accumulated children
|
||||
* as a string and optional metadata, and returns a string.
|
||||
*
|
||||
* Return `null` or `undefined` to omit the element from the output.
|
||||
* If no callback is registered for an element, its children pass through unchanged.
|
||||
*/
|
||||
/** Meta passed to the `heading` callback. */
|
||||
interface HeadingMeta {
|
||||
/** Heading level (1–6). */
|
||||
level: number;
|
||||
/** Heading ID slug. Set when `headings: { ids: true }` is enabled. */
|
||||
id?: string;
|
||||
}
|
||||
|
||||
/** Meta passed to the `code` callback. */
|
||||
interface CodeBlockMeta {
|
||||
/** The info-string language (e.g. `"js"`). */
|
||||
language?: string;
|
||||
}
|
||||
|
||||
/** Meta passed to the `list` callback. */
|
||||
interface ListMeta {
|
||||
/** Whether this is an ordered list. */
|
||||
ordered: boolean;
|
||||
/** The start number for ordered lists. */
|
||||
start?: number;
|
||||
}
|
||||
|
||||
/** Meta passed to the `listItem` callback. */
|
||||
interface ListItemMeta {
|
||||
/** Task list checked state. Set for `- [x]` / `- [ ]` items. */
|
||||
checked?: boolean;
|
||||
}
|
||||
|
||||
/** Meta passed to `th` and `td` callbacks. */
|
||||
interface CellMeta {
|
||||
/** Column alignment. */
|
||||
align?: "left" | "center" | "right";
|
||||
}
|
||||
|
||||
/** Meta passed to the `link` callback. */
|
||||
interface LinkMeta {
|
||||
/** Link URL. */
|
||||
href: string;
|
||||
/** Link title attribute. */
|
||||
title?: string;
|
||||
}
|
||||
|
||||
/** Meta passed to the `image` callback. */
|
||||
interface ImageMeta {
|
||||
/** Image URL. */
|
||||
src: string;
|
||||
/** Image title attribute. */
|
||||
title?: string;
|
||||
}
|
||||
|
||||
interface RenderCallbacks {
|
||||
/** Heading (level 1–6). `id` is set when `headings: { ids: true }` is enabled. */
|
||||
heading?: (children: string, meta: HeadingMeta) => string | null | undefined;
|
||||
/** Paragraph. */
|
||||
paragraph?: (children: string) => string | null | undefined;
|
||||
/** Blockquote. */
|
||||
blockquote?: (children: string) => string | null | undefined;
|
||||
/** Code block. `meta.language` is the info-string (e.g. `"js"`). Only passed for fenced code blocks with a language. */
|
||||
code?: (children: string, meta?: CodeBlockMeta) => string | null | undefined;
|
||||
/** Ordered or unordered list. `start` is the first item number for ordered lists. */
|
||||
list?: (children: string, meta: ListMeta) => string | null | undefined;
|
||||
/** List item. `meta.checked` is set for task list items (`- [x]` / `- [ ]`). Only passed for task list items. */
|
||||
listItem?: (children: string, meta?: ListItemMeta) => string | null | undefined;
|
||||
/** Horizontal rule. */
|
||||
hr?: (children: string) => string | null | undefined;
|
||||
/** Table. */
|
||||
table?: (children: string) => string | null | undefined;
|
||||
/** Table head. */
|
||||
thead?: (children: string) => string | null | undefined;
|
||||
/** Table body. */
|
||||
tbody?: (children: string) => string | null | undefined;
|
||||
/** Table row. */
|
||||
tr?: (children: string) => string | null | undefined;
|
||||
/** Table header cell. `meta.align` is set when column alignment is specified. */
|
||||
th?: (children: string, meta?: CellMeta) => string | null | undefined;
|
||||
/** Table data cell. `meta.align` is set when column alignment is specified. */
|
||||
td?: (children: string, meta?: CellMeta) => string | null | undefined;
|
||||
/** Raw HTML content. */
|
||||
html?: (children: string) => string | null | undefined;
|
||||
/** Strong emphasis (`**text**`). */
|
||||
strong?: (children: string) => string | null | undefined;
|
||||
/** Emphasis (`*text*`). */
|
||||
emphasis?: (children: string) => string | null | undefined;
|
||||
/** Link. `href` is the URL, `title` is the optional title attribute. */
|
||||
link?: (children: string, meta: LinkMeta) => string | null | undefined;
|
||||
/** Image. `src` is the URL, `title` is the optional title attribute. */
|
||||
image?: (children: string, meta: ImageMeta) => string | null | undefined;
|
||||
/** Inline code (`` `code` ``). */
|
||||
codespan?: (children: string) => string | null | undefined;
|
||||
/** Strikethrough (`~~text~~`). */
|
||||
strikethrough?: (children: string) => string | null | undefined;
|
||||
/** Plain text content. */
|
||||
text?: (text: string) => string | null | undefined;
|
||||
}
|
||||
|
||||
/** Options for `react()` — parser options and element symbol configuration. */
|
||||
interface ReactOptions extends Options {
|
||||
/**
|
||||
* Which `$$typeof` symbol to use on the generated elements.
|
||||
* - `19` (default): `Symbol.for('react.transitional.element')`
|
||||
* - `18`: `Symbol.for('react.element')` — use this for React 18 and older
|
||||
*/
|
||||
reactVersion?: 18 | 19;
|
||||
}
|
||||
|
||||
/**
|
||||
* Render markdown to an HTML string.
|
||||
*
|
||||
* @param input The markdown string or buffer to render
|
||||
* @param options Parser options
|
||||
* @returns An HTML string
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const html = Bun.markdown.html("# Hello **world**");
|
||||
* // "<h1>Hello <strong>world</strong></h1>\n"
|
||||
*
|
||||
* // With options
|
||||
* const html = Bun.markdown.html("## Hello", { headings: { ids: true } });
|
||||
* // '<h2 id="hello">Hello</h2>\n'
|
||||
* ```
|
||||
*/
|
||||
export function html(
|
||||
input: string | NodeJS.TypedArray | DataView<ArrayBuffer> | ArrayBufferLike,
|
||||
options?: Options,
|
||||
): string;
|
||||
|
||||
/**
|
||||
* Render markdown with custom JavaScript callbacks for each element.
|
||||
*
|
||||
* Each callback receives the accumulated children as a string and optional
|
||||
* metadata, and returns a string. Return `null` or `undefined` to omit
|
||||
* an element. If no callback is registered, children pass through unchanged.
|
||||
*
|
||||
* Parser options are passed as a separate third argument.
|
||||
*
|
||||
* @param input The markdown string to render
|
||||
* @param callbacks Callbacks for each element type
|
||||
* @param options Parser options
|
||||
* @returns The accumulated string output
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Custom HTML with classes
|
||||
* const html = Bun.markdown.render("# Title\n\nHello **world**", {
|
||||
* heading: (children, { level }) => `<h${level} class="title">${children}</h${level}>`,
|
||||
* paragraph: (children) => `<p>${children}</p>`,
|
||||
* strong: (children) => `<b>${children}</b>`,
|
||||
* });
|
||||
*
|
||||
* // ANSI terminal output
|
||||
* const ansi = Bun.markdown.render("# Hello\n\n**bold**", {
|
||||
* heading: (children) => `\x1b[1;4m${children}\x1b[0m\n`,
|
||||
* paragraph: (children) => children + "\n",
|
||||
* strong: (children) => `\x1b[1m${children}\x1b[22m`,
|
||||
* });
|
||||
*
|
||||
* // With parser options as third argument
|
||||
* const text = Bun.markdown.render("Visit www.example.com", {
|
||||
* link: (children, { href }) => `[${children}](${href})`,
|
||||
* paragraph: (children) => children,
|
||||
* }, { autolinks: true });
|
||||
* ```
|
||||
*/
|
||||
export function render(
|
||||
input: string | NodeJS.TypedArray | DataView<ArrayBuffer> | ArrayBufferLike,
|
||||
callbacks?: RenderCallbacks,
|
||||
options?: Options,
|
||||
): string;
|
||||
|
||||
/**
|
||||
* Render markdown to React JSX elements.
|
||||
*
|
||||
* Returns a React Fragment containing the parsed markdown as children.
|
||||
* Can be returned directly from a component or passed to `renderToString()`.
|
||||
*
|
||||
* Override any HTML element with a custom component by passing it in the
|
||||
* second argument, keyed by tag name. Custom components receive the same props
|
||||
* the default elements would (e.g. `href` for links, `language` for code blocks).
|
||||
*
|
||||
* Parser options (including `reactVersion`) are passed as a separate third argument.
|
||||
* Uses `Symbol.for('react.transitional.element')` by default (React 19).
|
||||
* Pass `reactVersion: 18` for React 18 and older.
|
||||
*
|
||||
* @param input The markdown string or buffer to parse
|
||||
* @param components Component overrides keyed by HTML tag name
|
||||
* @param options Parser options and element symbol configuration
|
||||
* @returns A React Fragment element containing the parsed markdown
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* // Use directly as a component return value
|
||||
* function Markdown({ text }: { text: string }) {
|
||||
* return Bun.markdown.react(text);
|
||||
* }
|
||||
*
|
||||
* // Server-side rendering
|
||||
* import { renderToString } from "react-dom/server";
|
||||
* const html = renderToString(Bun.markdown.react("# Hello **world**"));
|
||||
*
|
||||
* // Custom components receive element props
|
||||
* function Code({ language, children }: { language?: string; children: React.ReactNode }) {
|
||||
* return <pre data-language={language}><code>{children}</code></pre>;
|
||||
* }
|
||||
* function Link({ href, children }: { href: string; children: React.ReactNode }) {
|
||||
* return <a href={href} target="_blank">{children}</a>;
|
||||
* }
|
||||
* const el = Bun.markdown.react(text, { pre: Code, a: Link });
|
||||
*
|
||||
* // For React 18 and older
|
||||
* const el18 = Bun.markdown.react(text, undefined, { reactVersion: 18 });
|
||||
* ```
|
||||
*/
|
||||
export function react(
|
||||
input: string | NodeJS.TypedArray | DataView<ArrayBuffer> | ArrayBufferLike,
|
||||
components?: ComponentOverrides,
|
||||
options?: ReactOptions,
|
||||
): import("./jsx.d.ts").JSX.Element;
|
||||
}
|
||||
|
||||
/**
|
||||
* JSON5 related APIs
|
||||
*/
|
||||
namespace JSON5 {
|
||||
/**
|
||||
* Parse a JSON5 string into a JavaScript value.
|
||||
*
|
||||
* JSON5 is a superset of JSON based on ECMAScript 5.1 that supports
|
||||
* comments, trailing commas, unquoted keys, single-quoted strings,
|
||||
* hex numbers, Infinity, NaN, and more.
|
||||
*
|
||||
* @category Utilities
|
||||
*
|
||||
* @param input The JSON5 string to parse
|
||||
* @returns A JavaScript value
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* import { JSON5 } from "bun";
|
||||
*
|
||||
* const result = JSON5.parse(`{
|
||||
* // This is a comment
|
||||
* name: 'my-app',
|
||||
* version: '1.0.0', // trailing comma is allowed
|
||||
* hex: 0xDEADbeef,
|
||||
* half: .5,
|
||||
* infinity: Infinity,
|
||||
* }`);
|
||||
* ```
|
||||
*/
|
||||
export function parse(input: string): unknown;
|
||||
|
||||
/**
|
||||
* Convert a JavaScript value into a JSON5 string. Object keys that are
|
||||
* valid identifiers are unquoted, strings use double quotes, `Infinity`
|
||||
* and `NaN` are represented as literals, and indented output includes
|
||||
* trailing commas.
|
||||
*
|
||||
* @category Utilities
|
||||
*
|
||||
* @param input The JavaScript value to stringify.
|
||||
* @param replacer Currently not supported.
|
||||
* @param space A number for how many spaces each level of indentation gets, or a string used as indentation.
|
||||
* The number is clamped between 0 and 10, and the first 10 characters of the string are used.
|
||||
* @returns A JSON5 string, or `undefined` if the input is `undefined`, a function, or a symbol.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* import { JSON5 } from "bun";
|
||||
*
|
||||
* console.log(JSON5.stringify({ a: 1, b: "two" }));
|
||||
* // {a:1,b:"two"}
|
||||
*
|
||||
* console.log(JSON5.stringify({ a: 1, b: 2 }, null, 2));
|
||||
* // {
|
||||
* // a: 1,
|
||||
* // b: 2,
|
||||
* // }
|
||||
* ```
|
||||
*/
|
||||
export function stringify(input: unknown, replacer?: undefined | null, space?: string | number): string | undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Synchronously resolve a `moduleId` as though it were imported from `parent`
|
||||
*
|
||||
@@ -1654,6 +2314,17 @@ declare module "bun" {
|
||||
* @default "warn"
|
||||
*/
|
||||
logLevel?: "verbose" | "debug" | "info" | "warn" | "error";
|
||||
|
||||
/**
|
||||
* Enable REPL mode transforms:
|
||||
* - Wraps top-level inputs that appear to be object literals (inputs starting with '{' without trailing ';') in parentheses
|
||||
* - Hoists all declarations as var for REPL persistence across vm.runInContext calls
|
||||
* - Wraps last expression in { __proto__: null, value: expr } for result capture
|
||||
* - Wraps code in sync/async IIFE to avoid parentheses around object literals
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
replMode?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1760,14 +2431,15 @@ declare module "bun" {
|
||||
type Architecture = "x64" | "arm64";
|
||||
type Libc = "glibc" | "musl";
|
||||
type SIMD = "baseline" | "modern";
|
||||
type Target =
|
||||
type CompileTarget =
|
||||
| `bun-darwin-${Architecture}`
|
||||
| `bun-darwin-x64-${SIMD}`
|
||||
| `bun-darwin-${Architecture}-${SIMD}`
|
||||
| `bun-linux-${Architecture}`
|
||||
| `bun-linux-${Architecture}-${Libc}`
|
||||
| `bun-linux-${Architecture}-${SIMD}`
|
||||
| `bun-linux-${Architecture}-${SIMD}-${Libc}`
|
||||
| "bun-windows-x64"
|
||||
| `bun-windows-x64-${SIMD}`
|
||||
| `bun-linux-x64-${SIMD}-${Libc}`;
|
||||
| `bun-windows-x64-${SIMD}`;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1922,7 +2594,10 @@ declare module "bun" {
|
||||
* start times, but will make the final output larger and slightly increase
|
||||
* memory usage.
|
||||
*
|
||||
* Bytecode is currently only supported for CommonJS (`format: "cjs"`).
|
||||
* - CommonJS: works with or without `compile: true`
|
||||
* - ESM: requires `compile: true`
|
||||
*
|
||||
* Without an explicit `format`, defaults to CommonJS.
|
||||
*
|
||||
* Must be `target: "bun"`
|
||||
* @default false
|
||||
@@ -2102,7 +2777,7 @@ declare module "bun" {
|
||||
}
|
||||
|
||||
interface CompileBuildOptions {
|
||||
target?: Bun.Build.Target;
|
||||
target?: Bun.Build.CompileTarget;
|
||||
execArgv?: string[];
|
||||
executablePath?: string;
|
||||
outfile?: string;
|
||||
@@ -2184,7 +2859,7 @@ declare module "bun" {
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
compile: boolean | Bun.Build.Target | CompileBuildOptions;
|
||||
compile: boolean | Bun.Build.CompileTarget | CompileBuildOptions;
|
||||
|
||||
/**
|
||||
* Splitting is not currently supported with `.compile`
|
||||
@@ -5007,7 +5682,7 @@ declare module "bun" {
|
||||
*
|
||||
* This will apply to all sockets from the same {@link Listener}. it is per socket only for {@link Bun.connect}.
|
||||
*/
|
||||
reload(handler: SocketHandler): void;
|
||||
reload(options: Pick<SocketOptions<Data>, "socket">): void;
|
||||
|
||||
/**
|
||||
* Get the server that created this socket
|
||||
@@ -5350,7 +6025,7 @@ declare module "bun" {
|
||||
stop(closeActiveConnections?: boolean): void;
|
||||
ref(): void;
|
||||
unref(): void;
|
||||
reload(options: Pick<Partial<SocketOptions>, "socket">): void;
|
||||
reload(options: Pick<SocketOptions<Data>, "socket">): void;
|
||||
data: Data;
|
||||
}
|
||||
interface TCPSocketListener<Data = unknown> extends SocketListener<Data> {
|
||||
|
||||
5
packages/bun-types/extensions.d.ts
vendored
5
packages/bun-types/extensions.d.ts
vendored
@@ -23,6 +23,11 @@ declare module "*.jsonc" {
|
||||
export = contents;
|
||||
}
|
||||
|
||||
declare module "*.json5" {
|
||||
var contents: any;
|
||||
export = contents;
|
||||
}
|
||||
|
||||
declare module "*/bun.lock" {
|
||||
var contents: import("bun").BunLockFile;
|
||||
export = contents;
|
||||
|
||||
11
packages/bun-types/jsx.d.ts
vendored
Normal file
11
packages/bun-types/jsx.d.ts
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
export {};
|
||||
|
||||
type ReactElement = typeof globalThis extends { React: infer React }
|
||||
? React extends { createElement(...args: any): infer R }
|
||||
? R
|
||||
: never
|
||||
: unknown;
|
||||
|
||||
export namespace JSX {
|
||||
export type Element = ReactElement;
|
||||
}
|
||||
@@ -315,6 +315,15 @@ int us_internal_ssl_socket_is_closed(struct us_internal_ssl_socket_t *s) {
|
||||
return us_socket_is_closed(0, &s->s);
|
||||
}
|
||||
|
||||
int us_internal_ssl_socket_is_handshake_finished(struct us_internal_ssl_socket_t *s) {
|
||||
if (!s || !s->ssl) return 0;
|
||||
return SSL_is_init_finished(s->ssl);
|
||||
}
|
||||
|
||||
int us_internal_ssl_socket_handshake_callback_has_fired(struct us_internal_ssl_socket_t *s) {
|
||||
if (!s) return 0;
|
||||
return s->handshake_state == HANDSHAKE_COMPLETED;
|
||||
}
|
||||
|
||||
void us_internal_trigger_handshake_callback_econnreset(struct us_internal_ssl_socket_t *s) {
|
||||
struct us_internal_ssl_socket_context_t *context =
|
||||
|
||||
@@ -439,6 +439,8 @@ void *us_internal_ssl_socket_ext(us_internal_ssl_socket_r s);
|
||||
void *us_internal_connecting_ssl_socket_ext(struct us_connecting_socket_t *c);
|
||||
int us_internal_ssl_socket_is_shut_down(us_internal_ssl_socket_r s);
|
||||
int us_internal_ssl_socket_is_closed(us_internal_ssl_socket_r s);
|
||||
int us_internal_ssl_socket_is_handshake_finished(us_internal_ssl_socket_r s);
|
||||
int us_internal_ssl_socket_handshake_callback_has_fired(us_internal_ssl_socket_r s);
|
||||
void us_internal_ssl_socket_shutdown(us_internal_ssl_socket_r s);
|
||||
|
||||
struct us_internal_ssl_socket_t *us_internal_ssl_socket_context_adopt_socket(
|
||||
|
||||
@@ -457,6 +457,12 @@ int us_socket_is_shut_down(int ssl, us_socket_r s) nonnull_fn_decl;
|
||||
/* Returns whether this socket has been closed. Only valid if memory has not yet been released. */
|
||||
int us_socket_is_closed(int ssl, us_socket_r s) nonnull_fn_decl;
|
||||
|
||||
/* Returns 1 if the TLS handshake has completed, 0 otherwise. For non-SSL sockets, always returns 1. */
|
||||
int us_socket_is_ssl_handshake_finished(int ssl, us_socket_r s) nonnull_fn_decl;
|
||||
|
||||
/* Returns 1 if the TLS handshake callback has been invoked, 0 otherwise. For non-SSL sockets, always returns 1. */
|
||||
int us_socket_ssl_handshake_callback_has_fired(int ssl, us_socket_r s) nonnull_fn_decl;
|
||||
|
||||
/* Immediately closes the socket */
|
||||
struct us_socket_t *us_socket_close(int ssl, us_socket_r s, int code, void *reason) __attribute__((nonnull(2)));
|
||||
|
||||
|
||||
@@ -128,6 +128,26 @@ int us_socket_is_closed(int ssl, struct us_socket_t *s) {
|
||||
return s->flags.is_closed;
|
||||
}
|
||||
|
||||
int us_socket_is_ssl_handshake_finished(int ssl, struct us_socket_t *s) {
|
||||
#ifndef LIBUS_NO_SSL
|
||||
if(ssl) {
|
||||
return us_internal_ssl_socket_is_handshake_finished((struct us_internal_ssl_socket_t *) s);
|
||||
}
|
||||
#endif
|
||||
// Non-SSL sockets are always "handshake finished"
|
||||
return 1;
|
||||
}
|
||||
|
||||
int us_socket_ssl_handshake_callback_has_fired(int ssl, struct us_socket_t *s) {
|
||||
#ifndef LIBUS_NO_SSL
|
||||
if(ssl) {
|
||||
return us_internal_ssl_socket_handshake_callback_has_fired((struct us_internal_ssl_socket_t *) s);
|
||||
}
|
||||
#endif
|
||||
// Non-SSL sockets are always "callback fired"
|
||||
return 1;
|
||||
}
|
||||
|
||||
int us_connecting_socket_is_closed(int ssl, struct us_connecting_socket_t *c) {
|
||||
return c->closed;
|
||||
}
|
||||
|
||||
@@ -204,26 +204,38 @@ namespace uWS {
|
||||
}
|
||||
|
||||
// do we have data to emit all?
|
||||
if (data.length() >= chunkSize(state)) {
|
||||
unsigned int remaining = chunkSize(state);
|
||||
if (data.length() >= remaining) {
|
||||
// emit all but 2 bytes then reset state to 0 and goto beginning
|
||||
// not fin
|
||||
std::string_view emitSoon;
|
||||
bool shouldEmit = false;
|
||||
if (chunkSize(state) > 2) {
|
||||
emitSoon = std::string_view(data.data(), chunkSize(state) - 2);
|
||||
shouldEmit = true;
|
||||
// Validate the chunk terminator (\r\n) accounting for partial reads
|
||||
switch (remaining) {
|
||||
default:
|
||||
// remaining > 2: emit data and validate full terminator
|
||||
emitSoon = std::string_view(data.data(), remaining - 2);
|
||||
shouldEmit = true;
|
||||
[[fallthrough]];
|
||||
case 2:
|
||||
// remaining >= 2: validate both \r and \n
|
||||
if (data[remaining - 2] != '\r' || data[remaining - 1] != '\n') {
|
||||
state = STATE_IS_ERROR;
|
||||
return std::nullopt;
|
||||
}
|
||||
break;
|
||||
case 1:
|
||||
// remaining == 1: only \n left to validate
|
||||
if (data[0] != '\n') {
|
||||
state = STATE_IS_ERROR;
|
||||
return std::nullopt;
|
||||
}
|
||||
break;
|
||||
case 0:
|
||||
// remaining == 0: terminator already consumed
|
||||
break;
|
||||
}
|
||||
// Validate that the chunk terminator is \r\n to prevent request smuggling
|
||||
// The last 2 bytes of the chunk must be exactly \r\n
|
||||
// Note: chunkSize always includes +2 for the terminator (added in consumeHexNumber),
|
||||
// and chunks with size 0 (chunkSize == 2) are handled earlier at line 190.
|
||||
// Therefore chunkSize >= 3 here, so no underflow is possible.
|
||||
size_t terminatorOffset = chunkSize(state) - 2;
|
||||
if (data[terminatorOffset] != '\r' || data[terminatorOffset + 1] != '\n') {
|
||||
state = STATE_IS_ERROR;
|
||||
return std::nullopt;
|
||||
}
|
||||
data.remove_prefix(chunkSize(state));
|
||||
data.remove_prefix(remaining);
|
||||
state = STATE_IS_CHUNKED;
|
||||
if (shouldEmit) {
|
||||
return emitSoon;
|
||||
@@ -232,19 +244,45 @@ namespace uWS {
|
||||
} else {
|
||||
/* We will consume all our input data */
|
||||
std::string_view emitSoon;
|
||||
if (chunkSize(state) > 2) {
|
||||
uint64_t maximalAppEmit = chunkSize(state) - 2;
|
||||
if (data.length() > maximalAppEmit) {
|
||||
unsigned int size = chunkSize(state);
|
||||
size_t len = data.length();
|
||||
if (size > 2) {
|
||||
uint64_t maximalAppEmit = size - 2;
|
||||
if (len > maximalAppEmit) {
|
||||
emitSoon = data.substr(0, maximalAppEmit);
|
||||
// Validate terminator bytes being consumed
|
||||
size_t terminatorBytesConsumed = len - maximalAppEmit;
|
||||
if (terminatorBytesConsumed >= 1 && data[maximalAppEmit] != '\r') {
|
||||
state = STATE_IS_ERROR;
|
||||
return std::nullopt;
|
||||
}
|
||||
if (terminatorBytesConsumed >= 2 && data[maximalAppEmit + 1] != '\n') {
|
||||
state = STATE_IS_ERROR;
|
||||
return std::nullopt;
|
||||
}
|
||||
} else {
|
||||
//cb(data);
|
||||
emitSoon = data;
|
||||
}
|
||||
} else if (size == 2) {
|
||||
// Only terminator bytes remain, validate what we have
|
||||
if (len >= 1 && data[0] != '\r') {
|
||||
state = STATE_IS_ERROR;
|
||||
return std::nullopt;
|
||||
}
|
||||
if (len >= 2 && data[1] != '\n') {
|
||||
state = STATE_IS_ERROR;
|
||||
return std::nullopt;
|
||||
}
|
||||
} else if (size == 1) {
|
||||
// Only \n remains
|
||||
if (data[0] != '\n') {
|
||||
state = STATE_IS_ERROR;
|
||||
return std::nullopt;
|
||||
}
|
||||
}
|
||||
decChunkSize(state, (unsigned int) data.length());
|
||||
decChunkSize(state, (unsigned int) len);
|
||||
state |= STATE_IS_CHUNKED;
|
||||
// new: decrease data by its size (bug)
|
||||
data.remove_prefix(data.length()); // ny bug fix för getNextChunk
|
||||
data.remove_prefix(len);
|
||||
if (emitSoon.length()) {
|
||||
return emitSoon;
|
||||
} else {
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
#pragma once
|
||||
|
||||
#ifndef UWS_HTTP_MAX_HEADERS_COUNT
|
||||
#define UWS_HTTP_MAX_HEADERS_COUNT 100
|
||||
#define UWS_HTTP_MAX_HEADERS_COUNT 200
|
||||
#endif
|
||||
|
||||
// todo: HttpParser is in need of a few clean-ups and refactorings
|
||||
|
||||
@@ -39,6 +39,7 @@ add_compile_definitions(
|
||||
CONFIG_TCC_PREDEFS
|
||||
ONE_SOURCE=0
|
||||
TCC_LIBTCC1="\\0"
|
||||
CONFIG_TCC_BACKTRACE=0
|
||||
)
|
||||
|
||||
if(APPLE)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#!/bin/sh
|
||||
# Version: 26
|
||||
# Version: 27
|
||||
|
||||
# A script that installs the dependencies needed to build and test Bun.
|
||||
# This should work on macOS and Linux with a POSIX shell.
|
||||
@@ -1061,6 +1061,11 @@ install_build_essentials() {
|
||||
go \
|
||||
xz
|
||||
install_packages apache2-utils
|
||||
# QEMU user-mode for baseline CPU verification in CI
|
||||
case "$arch" in
|
||||
x64) install_packages qemu-x86_64 ;;
|
||||
aarch64) install_packages qemu-aarch64 ;;
|
||||
esac
|
||||
;;
|
||||
esac
|
||||
|
||||
|
||||
@@ -23,7 +23,10 @@ const OS_NAME = platform().toLowerCase();
|
||||
const ARCH_NAME_RAW = arch();
|
||||
const IS_MAC = OS_NAME === "darwin";
|
||||
const IS_LINUX = OS_NAME === "linux";
|
||||
const IS_ARM64 = ARCH_NAME_RAW === "arm64" || ARCH_NAME_RAW === "aarch64";
|
||||
const IS_WINDOWS = OS_NAME === "win32";
|
||||
// On Windows, use PROCESSOR_ARCHITECTURE env var to get native arch (Bun may run under x64 emulation)
|
||||
const NATIVE_ARCH = IS_WINDOWS ? (process.env.PROCESSOR_ARCHITECTURE || ARCH_NAME_RAW).toUpperCase() : ARCH_NAME_RAW;
|
||||
const IS_ARM64 = NATIVE_ARCH === "ARM64" || NATIVE_ARCH === "AARCH64" || ARCH_NAME_RAW === "arm64";
|
||||
|
||||
// Paths
|
||||
const ROOT_DIR = resolve(import.meta.dir, "..");
|
||||
@@ -33,22 +36,54 @@ const WEBKIT_RELEASE_DIR = join(WEBKIT_BUILD_DIR, "Release");
|
||||
const WEBKIT_DEBUG_DIR = join(WEBKIT_BUILD_DIR, "Debug");
|
||||
const WEBKIT_RELEASE_DIR_LTO = join(WEBKIT_BUILD_DIR, "ReleaseLTO");
|
||||
|
||||
// Windows ICU paths - use vcpkg static build
|
||||
// Auto-detect triplet: prefer arm64 if it exists, otherwise x64
|
||||
const VCPKG_ARM64_PATH = join(WEBKIT_DIR, "vcpkg_installed", "arm64-windows-static");
|
||||
const VCPKG_X64_PATH = join(WEBKIT_DIR, "vcpkg_installed", "x64-windows-static");
|
||||
const VCPKG_ROOT = existsSync(VCPKG_ARM64_PATH) ? VCPKG_ARM64_PATH : VCPKG_X64_PATH;
|
||||
const ICU_INCLUDE_DIR = join(VCPKG_ROOT, "include");
|
||||
|
||||
// Get ICU library paths based on build config (debug uses 'd' suffix libraries)
|
||||
function getICULibraryPaths(config: BuildConfig) {
|
||||
const isDebug = config === "debug";
|
||||
// vcpkg static ICU libraries: release in lib/, debug in debug/lib/ with 'd' suffix
|
||||
const libDir = isDebug ? join(VCPKG_ROOT, "debug", "lib") : join(VCPKG_ROOT, "lib");
|
||||
const suffix = isDebug ? "d" : "";
|
||||
return {
|
||||
ICU_LIBRARY: libDir,
|
||||
ICU_DATA_LIBRARY: join(libDir, `sicudt${suffix}.lib`),
|
||||
ICU_I18N_LIBRARY: join(libDir, `sicuin${suffix}.lib`),
|
||||
ICU_UC_LIBRARY: join(libDir, `sicuuc${suffix}.lib`),
|
||||
};
|
||||
}
|
||||
|
||||
// Homebrew prefix detection
|
||||
const HOMEBREW_PREFIX = IS_ARM64 ? "/opt/homebrew/" : "/usr/local/";
|
||||
|
||||
// Compiler detection
|
||||
function findExecutable(names: string[]): string | null {
|
||||
for (const name of names) {
|
||||
const result = spawnSync("which", [name], { encoding: "utf8" });
|
||||
if (result.status === 0) {
|
||||
return result.stdout.trim();
|
||||
}
|
||||
const path = Bun.which(name);
|
||||
if (path) return path;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
const CC = findExecutable(["clang-19", "clang"]) || "clang";
|
||||
const CXX = findExecutable(["clang++-19", "clang++"]) || "clang++";
|
||||
// Detect ccache
|
||||
const CCACHE = findExecutable(["ccache"]);
|
||||
const HAS_CCACHE = CCACHE !== null;
|
||||
|
||||
// Configure compilers with ccache if available
|
||||
// On Windows, use clang-cl for MSVC compatibility
|
||||
const CC_BASE = IS_WINDOWS
|
||||
? findExecutable(["clang-cl.exe", "clang-cl"]) || "clang-cl"
|
||||
: findExecutable(["clang-19", "clang"]) || "clang";
|
||||
const CXX_BASE = IS_WINDOWS
|
||||
? findExecutable(["clang-cl.exe", "clang-cl"]) || "clang-cl"
|
||||
: findExecutable(["clang++-19", "clang++"]) || "clang++";
|
||||
|
||||
const CC = HAS_CCACHE ? CCACHE : CC_BASE;
|
||||
const CXX = HAS_CCACHE ? CCACHE : CXX_BASE;
|
||||
|
||||
// Build directory based on config
|
||||
const getBuildDir = (config: BuildConfig) => {
|
||||
@@ -63,7 +98,7 @@ const getBuildDir = (config: BuildConfig) => {
|
||||
};
|
||||
|
||||
// Common CMake flags
|
||||
const getCommonFlags = () => {
|
||||
const getCommonFlags = (config: BuildConfig) => {
|
||||
const flags = [
|
||||
"-DPORT=JSCOnly",
|
||||
"-DENABLE_STATIC_JSC=ON",
|
||||
@@ -74,16 +109,27 @@ const getCommonFlags = () => {
|
||||
"-DENABLE_FTL_JIT=ON",
|
||||
"-G",
|
||||
"Ninja",
|
||||
`-DCMAKE_C_COMPILER=${CC}`,
|
||||
`-DCMAKE_CXX_COMPILER=${CXX}`,
|
||||
];
|
||||
|
||||
// Configure compiler with ccache if available
|
||||
if (HAS_CCACHE) {
|
||||
flags.push(
|
||||
`-DCMAKE_C_COMPILER_LAUNCHER=${CCACHE}`,
|
||||
`-DCMAKE_CXX_COMPILER_LAUNCHER=${CCACHE}`,
|
||||
`-DCMAKE_C_COMPILER=${CC_BASE}`,
|
||||
`-DCMAKE_CXX_COMPILER=${CXX_BASE}`,
|
||||
);
|
||||
} else {
|
||||
flags.push(`-DCMAKE_C_COMPILER=${CC}`, `-DCMAKE_CXX_COMPILER=${CXX}`);
|
||||
}
|
||||
|
||||
if (IS_MAC) {
|
||||
flags.push(
|
||||
"-DENABLE_SINGLE_THREADED_VM_ENTRY_SCOPE=ON",
|
||||
"-DBUN_FAST_TLS=ON",
|
||||
"-DPTHREAD_JIT_PERMISSIONS_API=1",
|
||||
"-DUSE_PTHREAD_JIT_PERMISSIONS_API=ON",
|
||||
"-DENABLE_REMOTE_INSPECTOR=ON",
|
||||
);
|
||||
} else if (IS_LINUX) {
|
||||
flags.push(
|
||||
@@ -91,6 +137,27 @@ const getCommonFlags = () => {
|
||||
"-DUSE_VISIBILITY_ATTRIBUTE=1",
|
||||
"-DENABLE_REMOTE_INSPECTOR=ON",
|
||||
);
|
||||
} else if (IS_WINDOWS) {
|
||||
// Find lld-link for Windows builds
|
||||
const lldLink = findExecutable(["lld-link.exe", "lld-link"]) || "lld-link";
|
||||
// Get ICU library paths for this build config (debug uses 'd' suffix libraries)
|
||||
const icuPaths = getICULibraryPaths(config);
|
||||
|
||||
flags.push(
|
||||
"-DENABLE_REMOTE_INSPECTOR=ON",
|
||||
"-DUSE_VISIBILITY_ATTRIBUTE=1",
|
||||
"-DUSE_SYSTEM_MALLOC=ON",
|
||||
`-DCMAKE_LINKER=${lldLink}`,
|
||||
`-DICU_ROOT=${VCPKG_ROOT}`,
|
||||
`-DICU_LIBRARY=${icuPaths.ICU_LIBRARY}`,
|
||||
`-DICU_INCLUDE_DIR=${ICU_INCLUDE_DIR}`,
|
||||
// Explicitly set ICU library paths to use vcpkg static libs (debug has 'd' suffix)
|
||||
`-DICU_DATA_LIBRARY_RELEASE=${icuPaths.ICU_DATA_LIBRARY}`,
|
||||
`-DICU_I18N_LIBRARY_RELEASE=${icuPaths.ICU_I18N_LIBRARY}`,
|
||||
`-DICU_UC_LIBRARY_RELEASE=${icuPaths.ICU_UC_LIBRARY}`,
|
||||
"-DCMAKE_C_FLAGS=/DU_STATIC_IMPLEMENTATION",
|
||||
"-DCMAKE_CXX_FLAGS=/DU_STATIC_IMPLEMENTATION /clang:-fno-c++-static-destructors",
|
||||
);
|
||||
}
|
||||
|
||||
return flags;
|
||||
@@ -98,7 +165,7 @@ const getCommonFlags = () => {
|
||||
|
||||
// Build-specific CMake flags
|
||||
const getBuildFlags = (config: BuildConfig) => {
|
||||
const flags = [...getCommonFlags()];
|
||||
const flags = [...getCommonFlags(config)];
|
||||
|
||||
switch (config) {
|
||||
case "debug":
|
||||
@@ -106,24 +173,40 @@ const getBuildFlags = (config: BuildConfig) => {
|
||||
"-DCMAKE_BUILD_TYPE=Debug",
|
||||
"-DENABLE_BUN_SKIP_FAILING_ASSERTIONS=ON",
|
||||
"-DCMAKE_EXPORT_COMPILE_COMMANDS=ON",
|
||||
"-DENABLE_REMOTE_INSPECTOR=ON",
|
||||
"-DUSE_VISIBILITY_ATTRIBUTE=1",
|
||||
);
|
||||
|
||||
if (IS_MAC) {
|
||||
// Enable address sanitizer by default on Mac debug builds
|
||||
if (IS_MAC || IS_LINUX) {
|
||||
// Enable address sanitizer by default on Mac/Linux debug builds
|
||||
flags.push("-DENABLE_SANITIZERS=address");
|
||||
// To disable asan, comment the line above and uncomment:
|
||||
// flags.push("-DENABLE_MALLOC_HEAP_BREAKDOWN=ON");
|
||||
}
|
||||
|
||||
if (IS_WINDOWS) {
|
||||
flags.push("-DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreadedDebug");
|
||||
}
|
||||
break;
|
||||
|
||||
case "lto":
|
||||
flags.push("-DCMAKE_BUILD_TYPE=Release", "-DCMAKE_C_FLAGS=-flto=full", "-DCMAKE_CXX_FLAGS=-flto=full");
|
||||
flags.push("-DCMAKE_BUILD_TYPE=Release");
|
||||
if (IS_WINDOWS) {
|
||||
// On Windows, append LTO flags to existing Windows-specific flags
|
||||
flags.push(
|
||||
"-DCMAKE_C_FLAGS=/DU_STATIC_IMPLEMENTATION -flto=full",
|
||||
"-DCMAKE_CXX_FLAGS=/DU_STATIC_IMPLEMENTATION /clang:-fno-c++-static-destructors -flto=full",
|
||||
"-DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreaded",
|
||||
);
|
||||
} else {
|
||||
flags.push("-DCMAKE_C_FLAGS=-flto=full", "-DCMAKE_CXX_FLAGS=-flto=full");
|
||||
}
|
||||
break;
|
||||
|
||||
default: // release
|
||||
flags.push("-DCMAKE_BUILD_TYPE=RelWithDebInfo");
|
||||
if (IS_WINDOWS) {
|
||||
flags.push("-DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreaded");
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -134,17 +217,6 @@ const getBuildFlags = (config: BuildConfig) => {
|
||||
const getBuildEnv = () => {
|
||||
const env = { ...process.env };
|
||||
|
||||
const cflags = ["-ffat-lto-objects"];
|
||||
const cxxflags = ["-ffat-lto-objects"];
|
||||
|
||||
if (IS_LINUX && buildConfig !== "lto") {
|
||||
cflags.push("-Wl,--whole-archive");
|
||||
cxxflags.push("-Wl,--whole-archive", "-DUSE_BUN_JSC_ADDITIONS=ON", "-DUSE_BUN_EVENT_LOOP=ON");
|
||||
}
|
||||
|
||||
env.CFLAGS = (env.CFLAGS || "") + " " + cflags.join(" ");
|
||||
env.CXXFLAGS = (env.CXXFLAGS || "") + " " + cxxflags.join(" ");
|
||||
|
||||
if (IS_MAC) {
|
||||
env.ICU_INCLUDE_DIRS = `${HOMEBREW_PREFIX}opt/icu4c/include`;
|
||||
}
|
||||
@@ -179,6 +251,9 @@ function buildJSC() {
|
||||
|
||||
console.log(`Building JSC with configuration: ${buildConfig}`);
|
||||
console.log(`Build directory: ${buildDir}`);
|
||||
if (HAS_CCACHE) {
|
||||
console.log(`Using ccache for faster builds: ${CCACHE}`);
|
||||
}
|
||||
|
||||
// Create build directories
|
||||
if (!existsSync(buildDir)) {
|
||||
|
||||
@@ -14,6 +14,15 @@ import {
|
||||
startGroup,
|
||||
} from "./utils.mjs";
|
||||
|
||||
// Detect Windows ARM64 - bun may run under x64 emulation (WoW64), so check multiple indicators
|
||||
const isWindowsARM64 =
|
||||
isWindows &&
|
||||
(process.env.PROCESSOR_ARCHITECTURE === "ARM64" ||
|
||||
process.env.VSCMD_ARG_HOST_ARCH === "arm64" ||
|
||||
process.env.MSYSTEM_CARCH === "aarch64" ||
|
||||
(process.env.PROCESSOR_IDENTIFIER || "").includes("ARMv8") ||
|
||||
process.arch === "arm64");
|
||||
|
||||
if (globalThis.Bun) {
|
||||
await import("./glob-sources.mjs");
|
||||
}
|
||||
@@ -83,6 +92,23 @@ async function build(args) {
|
||||
generateOptions["--toolchain"] = toolchainPath;
|
||||
}
|
||||
|
||||
// Windows ARM64: automatically set required options
|
||||
if (isWindowsARM64) {
|
||||
// Use clang-cl instead of MSVC cl.exe for proper ARM64 flag support
|
||||
if (!generateOptions["-DCMAKE_C_COMPILER"]) {
|
||||
generateOptions["-DCMAKE_C_COMPILER"] = "clang-cl";
|
||||
}
|
||||
if (!generateOptions["-DCMAKE_CXX_COMPILER"]) {
|
||||
generateOptions["-DCMAKE_CXX_COMPILER"] = "clang-cl";
|
||||
}
|
||||
// Skip codegen by default since x64 bun crashes under WoW64 emulation
|
||||
// Can be overridden with -DSKIP_CODEGEN=OFF once ARM64 bun is available
|
||||
if (!generateOptions["-DSKIP_CODEGEN"]) {
|
||||
generateOptions["-DSKIP_CODEGEN"] = "ON";
|
||||
}
|
||||
console.log("Windows ARM64 detected: using clang-cl and SKIP_CODEGEN=ON");
|
||||
}
|
||||
|
||||
const generateArgs = Object.entries(generateOptions).flatMap(([flag, value]) =>
|
||||
flag.startsWith("-D") ? [`${flag}=${value}`] : [flag, value],
|
||||
);
|
||||
|
||||
@@ -49,9 +49,42 @@ const colors = {
|
||||
|
||||
// Parse command line arguments
|
||||
const args = process.argv.slice(2);
|
||||
|
||||
// Show help
|
||||
if (args.includes("--help") || args.includes("-h")) {
|
||||
console.log(`Usage: bun run scripts/buildkite-failures.ts [options] [build-id|branch|pr-url|buildkite-url]
|
||||
|
||||
Shows detailed error information from BuildKite build failures.
|
||||
Full logs are saved to /tmp/bun-build-{number}-{platform}-{step}.log
|
||||
|
||||
Arguments:
|
||||
build-id BuildKite build number (e.g., 35051)
|
||||
branch Git branch name (e.g., main, claude/fix-bug)
|
||||
pr-url GitHub PR URL (e.g., https://github.com/oven-sh/bun/pull/26173)
|
||||
buildkite-url BuildKite build URL
|
||||
#number GitHub PR number (e.g., #26173)
|
||||
(none) Uses current git branch
|
||||
|
||||
Options:
|
||||
--flaky, -f Include flaky test annotations
|
||||
--warnings, -w Include warning annotations
|
||||
--wait Poll continuously until build completes or fails
|
||||
--help, -h Show this help message
|
||||
|
||||
Examples:
|
||||
bun run scripts/buildkite-failures.ts # Current branch
|
||||
bun run scripts/buildkite-failures.ts main # Main branch
|
||||
bun run scripts/buildkite-failures.ts 35051 # Build #35051
|
||||
bun run scripts/buildkite-failures.ts #26173 # PR #26173
|
||||
bun run scripts/buildkite-failures.ts --wait # Wait for current branch build to complete
|
||||
`);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const showWarnings = args.includes("--warnings") || args.includes("-w");
|
||||
const showFlaky = args.includes("--flaky") || args.includes("-f");
|
||||
const inputArg = args.find(arg => !arg.startsWith("-"));
|
||||
const waitMode = args.includes("--wait");
|
||||
const inputArg = args.find(arg => !arg.startsWith("-") && !arg.startsWith("--"));
|
||||
|
||||
// Determine what type of input we have
|
||||
let buildNumber = null;
|
||||
@@ -114,38 +147,138 @@ if (!buildNumber) {
|
||||
buildNumber = match[1];
|
||||
}
|
||||
|
||||
// Fetch build JSON
|
||||
const buildResponse = await fetch(`https://buildkite.com/bun/bun/builds/${buildNumber}.json`);
|
||||
const build = await buildResponse.json();
|
||||
// Helper to format time ago
|
||||
function formatTimeAgo(dateStr: string | null): string {
|
||||
if (!dateStr) return "not started";
|
||||
const date = new Date(dateStr);
|
||||
const now = new Date();
|
||||
const diffMs = now.getTime() - date.getTime();
|
||||
const diffSecs = Math.floor(diffMs / 1000);
|
||||
const diffMins = Math.floor(diffSecs / 60);
|
||||
const diffHours = Math.floor(diffMins / 60);
|
||||
const diffDays = Math.floor(diffHours / 24);
|
||||
|
||||
// Calculate time ago
|
||||
const buildTime = new Date(build.started_at);
|
||||
const now = new Date();
|
||||
const diffMs = now.getTime() - buildTime.getTime();
|
||||
const diffSecs = Math.floor(diffMs / 1000);
|
||||
const diffMins = Math.floor(diffSecs / 60);
|
||||
const diffHours = Math.floor(diffMins / 60);
|
||||
const diffDays = Math.floor(diffHours / 24);
|
||||
|
||||
let timeAgo;
|
||||
if (diffDays > 0) {
|
||||
timeAgo = `${diffDays} day${diffDays !== 1 ? "s" : ""} ago`;
|
||||
} else if (diffHours > 0) {
|
||||
timeAgo = `${diffHours} hour${diffHours !== 1 ? "s" : ""} ago`;
|
||||
} else if (diffMins > 0) {
|
||||
timeAgo = `${diffMins} minute${diffMins !== 1 ? "s" : ""} ago`;
|
||||
} else {
|
||||
timeAgo = `${diffSecs} second${diffSecs !== 1 ? "s" : ""} ago`;
|
||||
if (diffDays > 0) return `${diffDays} day${diffDays !== 1 ? "s" : ""} ago`;
|
||||
if (diffHours > 0) return `${diffHours} hour${diffHours !== 1 ? "s" : ""} ago`;
|
||||
if (diffMins > 0) return `${diffMins} minute${diffMins !== 1 ? "s" : ""} ago`;
|
||||
return `${diffSecs} second${diffSecs !== 1 ? "s" : ""} ago`;
|
||||
}
|
||||
|
||||
// Helper to clear line for updates
|
||||
const clearLine = isTTY ? "\x1b[2K\r" : "";
|
||||
|
||||
// Poll for build status
|
||||
let build: any;
|
||||
let pollCount = 0;
|
||||
const pollInterval = 10000; // 10 seconds
|
||||
|
||||
while (true) {
|
||||
// Fetch build JSON
|
||||
const buildResponse = await fetch(`https://buildkite.com/bun/bun/builds/${buildNumber}.json`);
|
||||
build = await buildResponse.json();
|
||||
|
||||
// Check for failed jobs first (even if build is still running)
|
||||
const failedJobsEarly =
|
||||
build.jobs?.filter(
|
||||
(job: any) => job.exit_status && job.exit_status > 0 && !job.soft_failed && job.type === "script",
|
||||
) || [];
|
||||
|
||||
// In wait mode with failures, stop polling and show failures
|
||||
if (waitMode && failedJobsEarly.length > 0) {
|
||||
if (pollCount > 0) {
|
||||
process.stdout.write(clearLine);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
// Calculate time ago (use created_at as fallback for scheduled/pending builds)
|
||||
const timeAgo = formatTimeAgo(build.started_at || build.created_at);
|
||||
|
||||
// Check if build passed
|
||||
if (build.state === "passed") {
|
||||
if (pollCount > 0) {
|
||||
process.stdout.write(clearLine);
|
||||
}
|
||||
console.log(`${timeAgo} - build #${buildNumber} https://buildkite.com/bun/bun/builds/${buildNumber}\n`);
|
||||
console.log(`${colors.green}✅ Passed!${colors.reset}`);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
// Check if build was canceled
|
||||
if (build.state === "canceled" || build.state === "canceling") {
|
||||
if (pollCount > 0) {
|
||||
process.stdout.write(clearLine);
|
||||
}
|
||||
console.log(`${timeAgo} - build #${buildNumber} https://buildkite.com/bun/bun/builds/${buildNumber}\n`);
|
||||
console.log(`${colors.dim}🚫 Build was canceled${colors.reset}`);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
// Check if build is pending/running/scheduled
|
||||
if (
|
||||
build.state === "scheduled" ||
|
||||
build.state === "running" ||
|
||||
build.state === "creating" ||
|
||||
build.state === "started"
|
||||
) {
|
||||
const runningJobs = build.jobs?.filter((job: any) => job.state === "running") || [];
|
||||
const pendingJobs = build.jobs?.filter((job: any) => job.state === "scheduled" || job.state === "waiting") || [];
|
||||
const passedJobs = build.jobs?.filter((job: any) => job.state === "passed") || [];
|
||||
const totalJobs = build.jobs?.filter((job: any) => job.type === "script")?.length || 0;
|
||||
|
||||
if (waitMode) {
|
||||
// In wait mode, show a single updating line
|
||||
let statusMsg = "";
|
||||
if (build.state === "scheduled" || build.state === "creating") {
|
||||
statusMsg = `⏳ Waiting... (scheduled ${formatTimeAgo(build.created_at)})`;
|
||||
} else {
|
||||
statusMsg = `🔄 Running... ${passedJobs.length}/${totalJobs} passed, ${runningJobs.length} running`;
|
||||
}
|
||||
process.stdout.write(`${clearLine}${colors.dim}${statusMsg}${colors.reset}`);
|
||||
pollCount++;
|
||||
await Bun.sleep(pollInterval);
|
||||
continue;
|
||||
} else {
|
||||
// Not in wait mode, show full status and exit
|
||||
console.log(`${timeAgo} - build #${buildNumber} https://buildkite.com/bun/bun/builds/${buildNumber}\n`);
|
||||
|
||||
if (build.state === "scheduled" || build.state === "creating") {
|
||||
console.log(`${colors.dim}⏳ Build is scheduled/pending${colors.reset}`);
|
||||
if (build.created_at) {
|
||||
console.log(`${colors.dim} Created: ${formatTimeAgo(build.created_at)}${colors.reset}`);
|
||||
}
|
||||
} else {
|
||||
console.log(`${colors.dim}🔄 Build is running${colors.reset}`);
|
||||
if (build.started_at) {
|
||||
console.log(`${colors.dim} Started: ${formatTimeAgo(build.started_at)}${colors.reset}`);
|
||||
}
|
||||
console.log(
|
||||
`${colors.dim} Progress: ${passedJobs.length}/${totalJobs} jobs passed, ${runningJobs.length} running, ${pendingJobs.length} pending${colors.reset}`,
|
||||
);
|
||||
|
||||
if (runningJobs.length > 0) {
|
||||
console.log(`\n${colors.dim}Running jobs:${colors.reset}`);
|
||||
for (const job of runningJobs.slice(0, 5)) {
|
||||
const name = job.name || job.label || "Unknown";
|
||||
console.log(` ${colors.dim}• ${name}${colors.reset}`);
|
||||
}
|
||||
if (runningJobs.length > 5) {
|
||||
console.log(` ${colors.dim}... and ${runningJobs.length - 5} more${colors.reset}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
process.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
// Build is in a terminal state (failed, etc.) - break out of loop
|
||||
break;
|
||||
}
|
||||
|
||||
// Print header for failed build
|
||||
const timeAgo = formatTimeAgo(build.started_at || build.created_at);
|
||||
console.log(`${timeAgo} - build #${buildNumber} https://buildkite.com/bun/bun/builds/${buildNumber}\n`);
|
||||
|
||||
// Check if build passed
|
||||
if (build.state === "passed") {
|
||||
console.log(`${colors.green}✅ Passed!${colors.reset}`);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
// Get failed jobs
|
||||
const failedJobs =
|
||||
build.jobs?.filter(job => job.exit_status && job.exit_status > 0 && !job.soft_failed && job.type === "script") || [];
|
||||
@@ -734,7 +867,212 @@ if (registerRequestIndex !== -1) {
|
||||
console.log(` https://buildkite.com/bun/bun/builds/${buildNumber}#annotations`);
|
||||
}
|
||||
} else {
|
||||
console.log(`\n${colors.red}${colors.bold}${failedJobs.length} job failures${colors.reset}\n`);
|
||||
console.log("View detailed results at:");
|
||||
console.log(` https://buildkite.com/bun/bun/builds/${buildNumber}#annotations`);
|
||||
// No annotations found - show detailed job failure information
|
||||
if (failedJobs.length > 0) {
|
||||
console.log(`\n${colors.red}${colors.bold}${failedJobs.length} job failures${colors.reset}\n`);
|
||||
|
||||
// Show annotation counts if available
|
||||
const annotationCounts = build.annotation_counts_by_style;
|
||||
if (annotationCounts) {
|
||||
const errors = annotationCounts.error || 0;
|
||||
const warnings = annotationCounts.warning || 0;
|
||||
if (errors > 0 || warnings > 0) {
|
||||
const parts = [];
|
||||
if (errors > 0) parts.push(`${errors} error${errors !== 1 ? "s" : ""}`);
|
||||
if (warnings > 0) parts.push(`${warnings} warning${warnings !== 1 ? "s" : ""}`);
|
||||
console.log(
|
||||
`${colors.dim}Annotations: ${parts.join(", ")} - view at https://buildkite.com/bun/bun/builds/${buildNumber}#annotations${colors.reset}\n`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Group jobs by type
|
||||
const buildJobs = failedJobs.filter(job => (job.name || job.label || "").includes("build-"));
|
||||
const testJobs = failedJobs.filter(job => (job.name || job.label || "").includes("test"));
|
||||
const otherJobs = failedJobs.filter(
|
||||
job => !(job.name || job.label || "").includes("build-") && !(job.name || job.label || "").includes("test"),
|
||||
);
|
||||
|
||||
// Display build failures
|
||||
if (buildJobs.length > 0) {
|
||||
console.log(
|
||||
`${colors.bgRed}${colors.white}${colors.bold} Build Failures (${buildJobs.length}) ${colors.reset}\n`,
|
||||
);
|
||||
for (const job of buildJobs) {
|
||||
const name = (job.name || job.label || "Unknown").replace(/^:([^:]+):/, (_, emoji) => {
|
||||
const platform = emoji.toLowerCase();
|
||||
return platformMap[platform] || `:${emoji}:`;
|
||||
});
|
||||
const duration =
|
||||
job.started_at && job.finished_at
|
||||
? `${((new Date(job.finished_at).getTime() - new Date(job.started_at).getTime()) / 1000).toFixed(0)}s`
|
||||
: "N/A";
|
||||
console.log(` ${colors.red}✗${colors.reset} ${name}`);
|
||||
console.log(` ${colors.dim}Duration: ${duration} | Exit: ${job.exit_status}${colors.reset}`);
|
||||
console.log(` ${colors.dim}https://buildkite.com${job.path}${colors.reset}`);
|
||||
console.log();
|
||||
}
|
||||
}
|
||||
|
||||
// Display test failures
|
||||
if (testJobs.length > 0) {
|
||||
console.log(`${colors.bgBlue}${colors.white}${colors.bold} Test Failures (${testJobs.length}) ${colors.reset}\n`);
|
||||
for (const job of testJobs) {
|
||||
const name = (job.name || job.label || "Unknown").replace(/^:([^:]+):/, (_, emoji) => {
|
||||
const platform = emoji.toLowerCase();
|
||||
return platformMap[platform] || `:${emoji}:`;
|
||||
});
|
||||
const duration =
|
||||
job.started_at && job.finished_at
|
||||
? `${((new Date(job.finished_at).getTime() - new Date(job.started_at).getTime()) / 1000).toFixed(0)}s`
|
||||
: "N/A";
|
||||
console.log(` ${colors.red}✗${colors.reset} ${name}`);
|
||||
console.log(` ${colors.dim}Duration: ${duration} | Exit: ${job.exit_status}${colors.reset}`);
|
||||
console.log(` ${colors.dim}https://buildkite.com${job.path}${colors.reset}`);
|
||||
console.log();
|
||||
}
|
||||
}
|
||||
|
||||
// Display other failures
|
||||
if (otherJobs.length > 0) {
|
||||
console.log(
|
||||
`${colors.bgBlue}${colors.white}${colors.bold} Other Failures (${otherJobs.length}) ${colors.reset}\n`,
|
||||
);
|
||||
for (const job of otherJobs) {
|
||||
const name = (job.name || job.label || "Unknown").replace(/^:([^:]+):/, (_, emoji) => {
|
||||
const platform = emoji.toLowerCase();
|
||||
return platformMap[platform] || `:${emoji}:`;
|
||||
});
|
||||
const duration =
|
||||
job.started_at && job.finished_at
|
||||
? `${((new Date(job.finished_at).getTime() - new Date(job.started_at).getTime()) / 1000).toFixed(0)}s`
|
||||
: "N/A";
|
||||
console.log(` ${colors.red}✗${colors.reset} ${name}`);
|
||||
console.log(` ${colors.dim}Duration: ${duration} | Exit: ${job.exit_status}${colors.reset}`);
|
||||
console.log(` ${colors.dim}https://buildkite.com${job.path}${colors.reset}`);
|
||||
console.log();
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch and display logs for all failed jobs
|
||||
// Use the public BuildKite log endpoint
|
||||
console.log(`${colors.dim}Fetching logs for ${failedJobs.length} failed jobs...${colors.reset}\n`);
|
||||
|
||||
for (const job of failedJobs) {
|
||||
const name = (job.name || job.label || "Unknown").replace(/^:([^:]+):/, (_, emoji) => {
|
||||
const platform = emoji.toLowerCase();
|
||||
return platformMap[platform] || `:${emoji}:`;
|
||||
});
|
||||
|
||||
// Create a sanitized filename from the job name
|
||||
// e.g., ":darwin: aarch64 - build-cpp" -> "darwin-aarch64-build-cpp"
|
||||
const sanitizedName = (job.name || job.label || "unknown")
|
||||
.replace(/^:([^:]+):\s*/, "$1-") // :darwin: -> darwin-
|
||||
.replace(/\s+-\s+/g, "-") // " - " -> "-"
|
||||
.replace(/[^a-zA-Z0-9-]/g, "-") // Replace other chars with -
|
||||
.replace(/-+/g, "-") // Collapse multiple -
|
||||
.replace(/^-|-$/g, "") // Remove leading/trailing -
|
||||
.toLowerCase();
|
||||
|
||||
const logFilePath = `/tmp/bun-build-${buildNumber}-${sanitizedName}.log`;
|
||||
|
||||
try {
|
||||
const logResponse = await fetch(
|
||||
`https://buildkite.com/organizations/bun/pipelines/bun/builds/${buildNumber}/jobs/${job.id}/log`,
|
||||
);
|
||||
|
||||
if (logResponse.ok) {
|
||||
const logData = await logResponse.json();
|
||||
let output = logData.output || "";
|
||||
|
||||
// Convert HTML to readable text (without ANSI codes for file output)
|
||||
const plainOutput = output
|
||||
// Remove timestamp tags
|
||||
.replace(/<time[^>]*>[^<]*<\/time>/g, "")
|
||||
// Remove all span tags
|
||||
.replace(/<span[^>]*>([^<]*)<\/span>/g, "$1")
|
||||
// Remove remaining HTML tags
|
||||
.replace(/<[^>]+>/g, "")
|
||||
// Decode HTML entities
|
||||
.replace(/&/g, "&")
|
||||
.replace(/</g, "<")
|
||||
.replace(/>/g, ">")
|
||||
.replace(/"/g, '"')
|
||||
.replace(/'/g, "'")
|
||||
.replace(///g, "/")
|
||||
.replace(/ /g, " ");
|
||||
|
||||
// Write the full log to a file
|
||||
await Bun.write(logFilePath, plainOutput);
|
||||
|
||||
// Extract unique error messages for display
|
||||
const lines = plainOutput.split("\n");
|
||||
const uniqueErrors = new Set<string>();
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
|
||||
// Look for actual error messages
|
||||
const isError =
|
||||
(line.includes("error:") && !line.includes('error: script "') && !line.includes("error: exit")) ||
|
||||
line.includes("fatal error:") ||
|
||||
line.includes("panic:") ||
|
||||
line.includes("undefined reference");
|
||||
|
||||
if (isError) {
|
||||
// Extract just the error message part (remove path prefixes and timestamps)
|
||||
const errorMsg = line
|
||||
.replace(/^.*?\d{4}-\d{2}-\d{2}T[\d:.]+Z/, "") // Remove timestamps
|
||||
.replace(/^.*?\/[^\s]*:\d+:\d+:\s*/, "") // Remove file paths
|
||||
.trim();
|
||||
|
||||
if (errorMsg && !uniqueErrors.has(errorMsg)) {
|
||||
uniqueErrors.add(errorMsg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Display job info with log file path
|
||||
console.log(`${colors.bgBlue}${colors.white}${colors.bold} ${name} ${colors.reset}`);
|
||||
console.log(` ${colors.dim}Log: ${logFilePath}${colors.reset}`);
|
||||
|
||||
if (uniqueErrors.size > 0) {
|
||||
console.log(` ${colors.red}Errors (${uniqueErrors.size}):${colors.reset}`);
|
||||
let count = 0;
|
||||
for (const err of uniqueErrors) {
|
||||
if (count >= 5) {
|
||||
console.log(` ${colors.dim}... and ${uniqueErrors.size - 5} more${colors.reset}`);
|
||||
break;
|
||||
}
|
||||
console.log(` ${colors.red}•${colors.reset} ${err.slice(0, 120)}${err.length > 120 ? "..." : ""}`);
|
||||
count++;
|
||||
}
|
||||
} else {
|
||||
// Show last few lines as a preview
|
||||
const lastLines = lines.slice(-5).filter(l => l.trim());
|
||||
if (lastLines.length > 0) {
|
||||
console.log(` ${colors.dim}Last output:${colors.reset}`);
|
||||
for (const line of lastLines) {
|
||||
console.log(` ${colors.dim}${line.slice(0, 100)}${line.length > 100 ? "..." : ""}${colors.reset}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (logData.truncated) {
|
||||
console.log(` ${colors.dim}(Log was truncated by BuildKite)${colors.reset}`);
|
||||
}
|
||||
} else {
|
||||
console.log(`${colors.bgBlue}${colors.white}${colors.bold} ${name} ${colors.reset}`);
|
||||
console.log(` ${colors.dim}Failed to fetch log: ${logResponse.status}${colors.reset}`);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log(`${colors.bgBlue}${colors.white}${colors.bold} ${name} ${colors.reset}`);
|
||||
console.log(` ${colors.dim}Error fetching log: ${e.message}${colors.reset}`);
|
||||
}
|
||||
console.log();
|
||||
}
|
||||
} else {
|
||||
console.log("View detailed results at:");
|
||||
console.log(` https://buildkite.com/bun/bun/builds/${buildNumber}#annotations`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1585,6 +1585,9 @@ function isNodeTest(path) {
|
||||
if (isCI && isMacOS && isX64) {
|
||||
return false;
|
||||
}
|
||||
if (!isJavaScript(path)) {
|
||||
return false;
|
||||
}
|
||||
const unixPath = path.replaceAll(sep, "/");
|
||||
return (
|
||||
unixPath.includes("js/node/test/parallel/") ||
|
||||
|
||||
82
scripts/update-uucode.sh
Executable file
82
scripts/update-uucode.sh
Executable file
@@ -0,0 +1,82 @@
|
||||
#!/bin/bash
|
||||
# Updates the vendored uucode library and regenerates grapheme tables.
|
||||
#
|
||||
# Usage:
|
||||
# ./scripts/update-uucode.sh # update from default URL
|
||||
# ./scripts/update-uucode.sh /path/to/uucode # update from local directory
|
||||
# ./scripts/update-uucode.sh https://url.tar.gz # update from URL
|
||||
#
|
||||
# After running, verify with:
|
||||
# bun bd test test/js/bun/util/stringWidth.test.ts
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
BUN_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
UUCODE_DIR="$BUN_ROOT/src/deps/uucode"
|
||||
ZIG="$BUN_ROOT/vendor/zig/zig"
|
||||
|
||||
if [ ! -x "$ZIG" ]; then
|
||||
echo "error: zig not found at $ZIG"
|
||||
echo " run scripts/bootstrap.sh first"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
update_from_dir() {
|
||||
local src="$1"
|
||||
echo "Updating uucode from: $src"
|
||||
rm -rf "$UUCODE_DIR"
|
||||
mkdir -p "$UUCODE_DIR"
|
||||
cp -r "$src"/* "$UUCODE_DIR/"
|
||||
}
|
||||
|
||||
update_from_url() {
|
||||
local url="$1"
|
||||
local tmp
|
||||
tmp=$(mktemp -d)
|
||||
trap "rm -rf $tmp" EXIT
|
||||
|
||||
echo "Downloading uucode from: $url"
|
||||
curl -fsSL "$url" | tar -xz -C "$tmp" --strip-components=1
|
||||
|
||||
update_from_dir "$tmp"
|
||||
}
|
||||
|
||||
# Handle source argument
|
||||
if [ $# -ge 1 ]; then
|
||||
SOURCE="$1"
|
||||
if [ -d "$SOURCE" ]; then
|
||||
update_from_dir "$SOURCE"
|
||||
elif [[ "$SOURCE" == http* ]]; then
|
||||
update_from_url "$SOURCE"
|
||||
else
|
||||
echo "error: argument must be a directory or URL"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
# Default: use the zig global cache if available
|
||||
CACHED=$(find "$HOME/.cache/zig/p" -maxdepth 1 -name "uucode-*" -type d 2>/dev/null | sort -V | tail -1)
|
||||
if [ -n "$CACHED" ]; then
|
||||
update_from_dir "$CACHED"
|
||||
else
|
||||
echo "error: no uucode source specified and none found in zig cache"
|
||||
echo ""
|
||||
echo "usage: $0 <path-to-uucode-dir-or-url>"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Regenerating grapheme tables..."
|
||||
cd "$BUN_ROOT"
|
||||
"$ZIG" build generate-grapheme-tables
|
||||
|
||||
echo ""
|
||||
echo "Done. Updated files:"
|
||||
echo " src/deps/uucode/ (vendored library)"
|
||||
echo " src/string/immutable/grapheme_tables.zig (regenerated)"
|
||||
echo ""
|
||||
echo "Next steps:"
|
||||
echo " 1. bun bd test test/js/bun/util/stringWidth.test.ts"
|
||||
echo " 2. git add src/deps/uucode src/string/immutable/grapheme_tables.zig"
|
||||
echo " 3. git commit -m 'Update uucode to <version>'"
|
||||
100
scripts/verify-baseline-cpu.sh
Executable file
100
scripts/verify-baseline-cpu.sh
Executable file
@@ -0,0 +1,100 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Verify that a Bun binary doesn't use CPU instructions beyond its baseline target.
|
||||
# Uses QEMU user-mode emulation with restricted CPU features.
|
||||
# Any illegal instruction (SIGILL) causes exit code 132 and fails the build.
|
||||
#
|
||||
# QEMU must be pre-installed in the CI image (see .buildkite/Dockerfile and
|
||||
# scripts/bootstrap.sh).
|
||||
|
||||
ARCH=""
|
||||
BINARY=""
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
--arch) ARCH="$2"; shift 2 ;;
|
||||
--binary) BINARY="$2"; shift 2 ;;
|
||||
*) echo "Unknown arg: $1"; exit 1 ;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ -z "$ARCH" ] || [ -z "$BINARY" ]; then
|
||||
echo "Usage: $0 --arch <x64|aarch64> --binary <path>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -f "$BINARY" ]; then
|
||||
echo "ERROR: Binary not found: $BINARY"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Select QEMU binary and CPU model
|
||||
HOST_ARCH=$(uname -m)
|
||||
if [ "$ARCH" = "x64" ]; then
|
||||
QEMU_BIN="qemu-x86_64"
|
||||
if [ -f "/usr/bin/qemu-x86_64-static" ]; then
|
||||
QEMU_BIN="qemu-x86_64-static"
|
||||
fi
|
||||
QEMU_CPU="Nehalem"
|
||||
CPU_DESC="Nehalem (SSE4.2, no AVX/AVX2/AVX512)"
|
||||
elif [ "$ARCH" = "aarch64" ]; then
|
||||
QEMU_BIN="qemu-aarch64"
|
||||
if [ -f "/usr/bin/qemu-aarch64-static" ]; then
|
||||
QEMU_BIN="qemu-aarch64-static"
|
||||
fi
|
||||
# cortex-a53 is ARMv8.0-A (no LSE atomics, no SVE). It's the most widely
|
||||
# supported ARMv8.0 model across QEMU versions.
|
||||
QEMU_CPU="cortex-a53"
|
||||
CPU_DESC="Cortex-A53 (ARMv8.0-A+CRC, no LSE/SVE)"
|
||||
else
|
||||
echo "ERROR: Unknown arch: $ARCH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! command -v "$QEMU_BIN" &>/dev/null; then
|
||||
echo "ERROR: $QEMU_BIN not found. It must be pre-installed in the CI image."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
BINARY_NAME=$(basename "$BINARY")
|
||||
|
||||
echo "--- Verifying $BINARY_NAME on $CPU_DESC"
|
||||
echo " Binary: $BINARY"
|
||||
echo " QEMU: $QEMU_BIN -cpu $QEMU_CPU"
|
||||
echo " Host: $HOST_ARCH"
|
||||
echo ""
|
||||
|
||||
run_test() {
|
||||
local label="$1"
|
||||
shift
|
||||
echo "+++ $BINARY_NAME: $label"
|
||||
if "$QEMU_BIN" -cpu "$QEMU_CPU" "$@"; then
|
||||
echo " PASS"
|
||||
return 0
|
||||
else
|
||||
local exit_code=$?
|
||||
echo ""
|
||||
if [ $exit_code -eq 132 ]; then
|
||||
echo " FAIL: Illegal instruction (SIGILL)"
|
||||
echo ""
|
||||
echo " The $BINARY_NAME binary uses CPU instructions not available on $QEMU_CPU."
|
||||
if [ "$ARCH" = "x64" ]; then
|
||||
echo " The baseline x64 build targets Nehalem (SSE4.2)."
|
||||
echo " AVX, AVX2, and AVX512 instructions are not allowed."
|
||||
else
|
||||
echo " The aarch64 build targets Cortex-A53 (ARMv8.0-A+CRC)."
|
||||
echo " LSE atomics, SVE, and dotprod instructions are not allowed."
|
||||
fi
|
||||
else
|
||||
echo " FAIL: exit code $exit_code"
|
||||
fi
|
||||
exit $exit_code
|
||||
fi
|
||||
}
|
||||
|
||||
run_test "bun --version" "$BINARY" --version
|
||||
run_test "bun -e eval" "$BINARY" -e "console.log(JSON.stringify({ok:1+1}))"
|
||||
|
||||
echo ""
|
||||
echo " All checks passed for $BINARY_NAME on $QEMU_CPU."
|
||||
148
scripts/verify-jit-stress-qemu.sh
Executable file
148
scripts/verify-jit-stress-qemu.sh
Executable file
@@ -0,0 +1,148 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Run JSC JIT stress tests under QEMU to verify that JIT-compiled code
|
||||
# doesn't use CPU instructions beyond the baseline target.
|
||||
#
|
||||
# This script exercises all JIT tiers (DFG, FTL, Wasm BBQ/OMG) and catches
|
||||
# cases where JIT-generated code emits AVX instructions on x64 or LSE
|
||||
# atomics on aarch64.
|
||||
#
|
||||
# See: test/js/bun/jsc-stress/ for the test fixtures.
|
||||
|
||||
ARCH=""
|
||||
BINARY=""
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
--arch) ARCH="$2"; shift 2 ;;
|
||||
--binary) BINARY="$2"; shift 2 ;;
|
||||
*) echo "Unknown arg: $1"; exit 1 ;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ -z "$ARCH" ] || [ -z "$BINARY" ]; then
|
||||
echo "Usage: $0 --arch <x64|aarch64> --binary <path>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -f "$BINARY" ]; then
|
||||
echo "ERROR: Binary not found: $BINARY"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Convert to absolute path for use after pushd
|
||||
BINARY="$(cd "$(dirname "$BINARY")" && pwd)/$(basename "$BINARY")"
|
||||
|
||||
# Select QEMU binary and CPU model
|
||||
if [ "$ARCH" = "x64" ]; then
|
||||
QEMU_BIN="qemu-x86_64"
|
||||
if [ -f "/usr/bin/qemu-x86_64-static" ]; then
|
||||
QEMU_BIN="qemu-x86_64-static"
|
||||
fi
|
||||
QEMU_CPU="Nehalem"
|
||||
CPU_DESC="Nehalem (SSE4.2, no AVX/AVX2/AVX512)"
|
||||
elif [ "$ARCH" = "aarch64" ]; then
|
||||
QEMU_BIN="qemu-aarch64"
|
||||
if [ -f "/usr/bin/qemu-aarch64-static" ]; then
|
||||
QEMU_BIN="qemu-aarch64-static"
|
||||
fi
|
||||
QEMU_CPU="cortex-a53"
|
||||
CPU_DESC="Cortex-A53 (ARMv8.0-A+CRC, no LSE/SVE)"
|
||||
else
|
||||
echo "ERROR: Unknown arch: $ARCH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! command -v "$QEMU_BIN" &>/dev/null; then
|
||||
echo "ERROR: $QEMU_BIN not found. It must be pre-installed in the CI image."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
BINARY_NAME=$(basename "$BINARY")
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
FIXTURES_DIR="$REPO_ROOT/test/js/bun/jsc-stress/fixtures"
|
||||
WASM_FIXTURES_DIR="$FIXTURES_DIR/wasm"
|
||||
PRELOAD_PATH="$REPO_ROOT/test/js/bun/jsc-stress/preload.js"
|
||||
|
||||
echo "--- Running JSC JIT stress tests on $CPU_DESC"
|
||||
echo " Binary: $BINARY"
|
||||
echo " QEMU: $QEMU_BIN -cpu $QEMU_CPU"
|
||||
echo ""
|
||||
|
||||
SIGILL_FAILURES=0
|
||||
OTHER_FAILURES=0
|
||||
PASSED=0
|
||||
|
||||
run_fixture() {
|
||||
local fixture="$1"
|
||||
local fixture_name
|
||||
fixture_name=$(basename "$fixture")
|
||||
|
||||
echo "+++ $fixture_name"
|
||||
if "$QEMU_BIN" -cpu "$QEMU_CPU" "$BINARY" --preload "$PRELOAD_PATH" "$fixture" 2>&1; then
|
||||
echo " PASS"
|
||||
((PASSED++))
|
||||
return 0
|
||||
else
|
||||
local exit_code=$?
|
||||
if [ $exit_code -eq 132 ]; then
|
||||
echo " FAIL: Illegal instruction (SIGILL)"
|
||||
echo ""
|
||||
echo " JIT-compiled code in $fixture_name uses CPU instructions not available on $QEMU_CPU."
|
||||
if [ "$ARCH" = "x64" ]; then
|
||||
echo " The baseline x64 build targets Nehalem (SSE4.2)."
|
||||
echo " JIT must not emit AVX, AVX2, or AVX512 instructions."
|
||||
else
|
||||
echo " The aarch64 build targets Cortex-A53 (ARMv8.0-A+CRC)."
|
||||
echo " JIT must not emit LSE atomics, SVE, or dotprod instructions."
|
||||
fi
|
||||
((SIGILL_FAILURES++))
|
||||
else
|
||||
# Non-SIGILL failures are warnings (test issues, not CPU instruction issues)
|
||||
echo " WARN: exit code $exit_code (not a CPU instruction issue)"
|
||||
((OTHER_FAILURES++))
|
||||
fi
|
||||
return $exit_code
|
||||
fi
|
||||
}
|
||||
|
||||
# Run JS fixtures (DFG/FTL)
|
||||
echo "--- JS fixtures (DFG/FTL)"
|
||||
for fixture in "$FIXTURES_DIR"/*.js; do
|
||||
if [ -f "$fixture" ]; then
|
||||
run_fixture "$fixture" || true
|
||||
fi
|
||||
done
|
||||
|
||||
# Run Wasm fixtures (BBQ/OMG)
|
||||
echo "--- Wasm fixtures (BBQ/OMG)"
|
||||
for fixture in "$WASM_FIXTURES_DIR"/*.js; do
|
||||
if [ -f "$fixture" ]; then
|
||||
# Wasm tests need to run from the wasm fixtures directory
|
||||
# because they reference .wasm files relative to the script
|
||||
pushd "$WASM_FIXTURES_DIR" > /dev/null
|
||||
run_fixture "$fixture" || true
|
||||
popd > /dev/null
|
||||
fi
|
||||
done
|
||||
|
||||
echo ""
|
||||
echo "--- Summary"
|
||||
echo " Passed: $PASSED"
|
||||
echo " SIGILL failures: $SIGILL_FAILURES"
|
||||
echo " Other failures: $OTHER_FAILURES (warnings, not CPU instruction issues)"
|
||||
echo ""
|
||||
|
||||
if [ $SIGILL_FAILURES -gt 0 ]; then
|
||||
echo " FAILED: JIT-generated code uses unsupported CPU instructions."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $OTHER_FAILURES -gt 0 ]; then
|
||||
echo " Some tests failed for reasons unrelated to CPU instructions."
|
||||
echo " These are warnings and do not indicate JIT instruction issues."
|
||||
fi
|
||||
|
||||
echo " All JIT stress tests passed on $QEMU_CPU (no SIGILL)."
|
||||
@@ -3,6 +3,10 @@
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
# Detect system architecture
|
||||
$script:IsARM64 = [System.Runtime.InteropServices.RuntimeInformation]::OSArchitecture -eq [System.Runtime.InteropServices.Architecture]::Arm64
|
||||
$script:VsArch = if ($script:IsARM64) { "arm64" } else { "amd64" }
|
||||
|
||||
if($env:VSINSTALLDIR -eq $null) {
|
||||
Write-Host "Loading Visual Studio environment, this may take a second..."
|
||||
|
||||
@@ -23,14 +27,14 @@ if($env:VSINSTALLDIR -eq $null) {
|
||||
Push-Location $vsDir
|
||||
try {
|
||||
$vsShell = (Join-Path -Path $vsDir -ChildPath "Common7\Tools\Launch-VsDevShell.ps1")
|
||||
. $vsShell -Arch amd64 -HostArch amd64
|
||||
. $vsShell -Arch $script:VsArch -HostArch $script:VsArch
|
||||
} finally {
|
||||
Pop-Location
|
||||
}
|
||||
}
|
||||
|
||||
if($env:VSCMD_ARG_TGT_ARCH -eq "x86") {
|
||||
throw "Visual Studio environment is targeting 32 bit, but only 64 bit is supported."
|
||||
throw "Visual Studio environment is targeting 32 bit x86, but only 64-bit architectures (x64/arm64) are supported."
|
||||
}
|
||||
|
||||
if ($args.Count -gt 0) {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user