mirror of
https://github.com/oven-sh/bun
synced 2026-02-02 15:08:46 +00:00
Compare commits
68 Commits
claude/fix
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ddefa11070 | ||
|
|
35f8154319 | ||
|
|
9d68ec882a | ||
|
|
1337f5dba4 | ||
|
|
56b5be4ba4 | ||
|
|
6c119d608e | ||
|
|
a14a89ca95 | ||
|
|
a5246344fa | ||
|
|
f648483fe7 | ||
|
|
01fa61045f | ||
|
|
71ce550cfa | ||
|
|
8f61adf494 | ||
|
|
b4b7cc6d78 | ||
|
|
3feea91087 | ||
|
|
bb4d5b9af5 | ||
|
|
adc1a6b05c | ||
|
|
8a11a03297 | ||
|
|
baea21f0c7 | ||
|
|
209923a65c | ||
|
|
cd4d98338c | ||
|
|
b64edcb490 | ||
|
|
4feede90f5 | ||
|
|
fc4624c672 | ||
|
|
1bfe5c6b37 | ||
|
|
aded701d1d | ||
|
|
7ebfdf97a8 | ||
|
|
4cd3b241bc | ||
|
|
cae67a17e2 | ||
|
|
a394063a7d | ||
|
|
c9ebb17921 | ||
|
|
2f510724a9 | ||
|
|
9a16f4c345 | ||
|
|
ba426210c2 | ||
|
|
bd63fb9ef6 | ||
|
|
9d6ef0af1d | ||
|
|
d08e4bae09 | ||
|
|
b59c77a6e7 | ||
|
|
6130aa8168 | ||
|
|
a595fe1cca | ||
|
|
799907362f | ||
|
|
2c0721eabe | ||
|
|
02680b69bf | ||
|
|
7c50164987 | ||
|
|
a553fda32b | ||
|
|
f87fa27fac | ||
|
|
4071624edd | ||
|
|
bfe40e8760 | ||
|
|
bcaae48a95 | ||
|
|
6b3403a2b4 | ||
|
|
70fe76209b | ||
|
|
ab3df344b8 | ||
|
|
4680e89a91 | ||
|
|
f88f60af5a | ||
|
|
232e0df956 | ||
|
|
9f0e78fc42 | ||
|
|
043fafeefa | ||
|
|
ce173b1112 | ||
|
|
0c3b5e501b | ||
|
|
5dc72bc1d8 | ||
|
|
dfc36a8255 | ||
|
|
827c7091d9 | ||
|
|
13f78a7044 | ||
|
|
d8d8182e8e | ||
|
|
e8b2455f11 | ||
|
|
c4f6874960 | ||
|
|
c63415c9c9 | ||
|
|
86d4d87beb | ||
|
|
3b1c3bfe97 |
@@ -26,7 +26,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
wget curl git python3 python3-pip ninja-build \
|
||||
software-properties-common apt-transport-https \
|
||||
ca-certificates gnupg lsb-release unzip \
|
||||
libxml2-dev ruby ruby-dev bison gawk perl make golang ccache \
|
||||
libxml2-dev ruby ruby-dev bison gawk perl make golang ccache qemu-user-static \
|
||||
&& add-apt-repository ppa:ubuntu-toolchain-r/test \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y gcc-13 g++-13 libgcc-13-dev libstdc++-13-dev \
|
||||
|
||||
@@ -537,6 +537,109 @@ function getLinkBunStep(platform, options) {
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the artifact triplet for a platform, e.g. "bun-linux-aarch64" or "bun-linux-x64-musl-baseline".
|
||||
* Matches the naming convention in cmake/targets/BuildBun.cmake.
|
||||
* @param {Platform} platform
|
||||
* @returns {string}
|
||||
*/
|
||||
function getTargetTriplet(platform) {
|
||||
const { os, arch, abi, baseline } = platform;
|
||||
let triplet = `bun-${os}-${arch}`;
|
||||
if (abi === "musl") {
|
||||
triplet += "-musl";
|
||||
}
|
||||
if (baseline) {
|
||||
triplet += "-baseline";
|
||||
}
|
||||
return triplet;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if a platform needs QEMU-based baseline CPU verification.
|
||||
* x64 baseline builds verify no AVX/AVX2 instructions snuck in.
|
||||
* aarch64 builds verify no LSE/SVE instructions snuck in.
|
||||
* @param {Platform} platform
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function needsBaselineVerification(platform) {
|
||||
const { os, arch, baseline } = platform;
|
||||
if (os !== "linux") return false;
|
||||
return (arch === "x64" && baseline) || arch === "aarch64";
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @param {PipelineOptions} options
|
||||
* @returns {Step}
|
||||
*/
|
||||
function getVerifyBaselineStep(platform, options) {
|
||||
const { arch } = platform;
|
||||
const targetKey = getTargetKey(platform);
|
||||
const archArg = arch === "x64" ? "x64" : "aarch64";
|
||||
|
||||
return {
|
||||
key: `${targetKey}-verify-baseline`,
|
||||
label: `${getTargetLabel(platform)} - verify-baseline`,
|
||||
depends_on: [`${targetKey}-build-bun`],
|
||||
agents: getLinkBunAgent(platform, options),
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
timeout_in_minutes: 5,
|
||||
command: [
|
||||
`buildkite-agent artifact download '*.zip' . --step ${targetKey}-build-bun`,
|
||||
`unzip -o '${getTargetTriplet(platform)}.zip'`,
|
||||
`unzip -o '${getTargetTriplet(platform)}-profile.zip'`,
|
||||
`chmod +x ${getTargetTriplet(platform)}/bun ${getTargetTriplet(platform)}-profile/bun-profile`,
|
||||
`./scripts/verify-baseline-cpu.sh --arch ${archArg} --binary ${getTargetTriplet(platform)}/bun`,
|
||||
`./scripts/verify-baseline-cpu.sh --arch ${archArg} --binary ${getTargetTriplet(platform)}-profile/bun-profile`,
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the PR modifies SetupWebKit.cmake (WebKit version changes).
|
||||
* JIT stress tests under QEMU should run when WebKit is updated to catch
|
||||
* JIT-generated code that uses unsupported CPU instructions.
|
||||
* @param {PipelineOptions} options
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function hasWebKitChanges(options) {
|
||||
const { changedFiles = [] } = options;
|
||||
return changedFiles.some(file => file.includes("SetupWebKit.cmake"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a step that runs JSC JIT stress tests under QEMU.
|
||||
* This verifies that JIT-compiled code doesn't use CPU instructions
|
||||
* beyond the baseline target (no AVX on x64, no LSE on aarch64).
|
||||
* @param {Platform} platform
|
||||
* @param {PipelineOptions} options
|
||||
* @returns {Step}
|
||||
*/
|
||||
function getJitStressTestStep(platform, options) {
|
||||
const { arch } = platform;
|
||||
const targetKey = getTargetKey(platform);
|
||||
const archArg = arch === "x64" ? "x64" : "aarch64";
|
||||
|
||||
return {
|
||||
key: `${targetKey}-jit-stress-qemu`,
|
||||
label: `${getTargetLabel(platform)} - jit-stress-qemu`,
|
||||
depends_on: [`${targetKey}-build-bun`],
|
||||
agents: getLinkBunAgent(platform, options),
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
// JIT stress tests are slow under QEMU emulation
|
||||
timeout_in_minutes: 30,
|
||||
command: [
|
||||
`buildkite-agent artifact download '*.zip' . --step ${targetKey}-build-bun`,
|
||||
`unzip -o '${getTargetTriplet(platform)}.zip'`,
|
||||
`chmod +x ${getTargetTriplet(platform)}/bun`,
|
||||
`./scripts/verify-jit-stress-qemu.sh --arch ${archArg} --binary ${getTargetTriplet(platform)}/bun`,
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @param {PipelineOptions} options
|
||||
@@ -774,6 +877,7 @@ function getBenchmarkStep() {
|
||||
* @property {Platform[]} [buildPlatforms]
|
||||
* @property {Platform[]} [testPlatforms]
|
||||
* @property {string[]} [testFiles]
|
||||
* @property {string[]} [changedFiles]
|
||||
*/
|
||||
|
||||
/**
|
||||
@@ -1126,6 +1230,14 @@ async function getPipeline(options = {}) {
|
||||
steps.push(getBuildZigStep(target, options));
|
||||
steps.push(getLinkBunStep(target, options));
|
||||
|
||||
if (needsBaselineVerification(target)) {
|
||||
steps.push(getVerifyBaselineStep(target, options));
|
||||
// Run JIT stress tests under QEMU when WebKit is updated
|
||||
if (hasWebKitChanges(options)) {
|
||||
steps.push(getJitStressTestStep(target, options));
|
||||
}
|
||||
}
|
||||
|
||||
return getStepWithDependsOn(
|
||||
{
|
||||
key: getTargetKey(target),
|
||||
@@ -1223,6 +1335,7 @@ async function main() {
|
||||
console.log(`- PR is only docs, skipping tests!`);
|
||||
return;
|
||||
}
|
||||
options.changedFiles = allFiles;
|
||||
}
|
||||
|
||||
startGroup("Generating pipeline...");
|
||||
|
||||
2
.github/workflows/update-cares.yml
vendored
2
.github/workflows/update-cares.yml
vendored
@@ -88,7 +88,7 @@ jobs:
|
||||
commit-message: "deps: update c-ares to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update c-ares to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-cares-${{ github.run_number }}
|
||||
branch: deps/update-cares
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
|
||||
2
.github/workflows/update-hdrhistogram.yml
vendored
2
.github/workflows/update-hdrhistogram.yml
vendored
@@ -91,7 +91,7 @@ jobs:
|
||||
commit-message: "deps: update hdrhistogram to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update hdrhistogram to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-hdrhistogram-${{ github.run_number }}
|
||||
branch: deps/update-hdrhistogram
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
|
||||
2
.github/workflows/update-highway.yml
vendored
2
.github/workflows/update-highway.yml
vendored
@@ -107,7 +107,7 @@ jobs:
|
||||
commit-message: "deps: update highway to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update highway to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-highway-${{ github.run_number }}
|
||||
branch: deps/update-highway
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
|
||||
2
.github/workflows/update-libarchive.yml
vendored
2
.github/workflows/update-libarchive.yml
vendored
@@ -88,7 +88,7 @@ jobs:
|
||||
commit-message: "deps: update libarchive to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update libarchive to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-libarchive-${{ github.run_number }}
|
||||
branch: deps/update-libarchive
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
|
||||
2
.github/workflows/update-libdeflate.yml
vendored
2
.github/workflows/update-libdeflate.yml
vendored
@@ -88,7 +88,7 @@ jobs:
|
||||
commit-message: "deps: update libdeflate to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update libdeflate to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-libdeflate-${{ github.run_number }}
|
||||
branch: deps/update-libdeflate
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
|
||||
2
.github/workflows/update-lolhtml.yml
vendored
2
.github/workflows/update-lolhtml.yml
vendored
@@ -100,7 +100,7 @@ jobs:
|
||||
commit-message: "deps: update lolhtml to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update lolhtml to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-lolhtml-${{ github.run_number }}
|
||||
branch: deps/update-lolhtml
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
|
||||
2
.github/workflows/update-lshpack.yml
vendored
2
.github/workflows/update-lshpack.yml
vendored
@@ -105,7 +105,7 @@ jobs:
|
||||
commit-message: "deps: update lshpack to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update lshpack to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-lshpack-${{ github.run_number }}
|
||||
branch: deps/update-lshpack
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
|
||||
2
.github/workflows/update-root-certs.yml
vendored
2
.github/workflows/update-root-certs.yml
vendored
@@ -74,7 +74,7 @@ jobs:
|
||||
```
|
||||
${{ env.changed_files }}
|
||||
```
|
||||
branch: certs/update-root-certs-${{ github.run_number }}
|
||||
branch: certs/update-root-certs
|
||||
base: main
|
||||
delete-branch: true
|
||||
labels:
|
||||
|
||||
2
.github/workflows/update-sqlite3.yml
vendored
2
.github/workflows/update-sqlite3.yml
vendored
@@ -83,7 +83,7 @@ jobs:
|
||||
commit-message: "deps: update sqlite to ${{ steps.check-version.outputs.latest }}"
|
||||
title: "deps: update sqlite to ${{ steps.check-version.outputs.latest }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-sqlite-${{ steps.check-version.outputs.latest }}
|
||||
branch: deps/update-sqlite
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
|
||||
2
.github/workflows/update-vendor.yml
vendored
2
.github/workflows/update-vendor.yml
vendored
@@ -68,7 +68,7 @@ jobs:
|
||||
commit-message: "deps: update ${{ matrix.package }} to ${{ steps.check-version.outputs.latest }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update ${{ matrix.package }} to ${{ steps.check-version.outputs.latest }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-${{ matrix.package }}-${{ github.run_number }}
|
||||
branch: deps/update-${{ matrix.package }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
|
||||
2
.github/workflows/update-zstd.yml
vendored
2
.github/workflows/update-zstd.yml
vendored
@@ -88,7 +88,7 @@ jobs:
|
||||
commit-message: "deps: update zstd to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update zstd to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-zstd-${{ github.run_number }}
|
||||
branch: deps/update-zstd
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
|
||||
@@ -259,18 +259,13 @@ $ git clone https://github.com/oven-sh/WebKit vendor/WebKit
|
||||
# Check out the commit hash specified in `set(WEBKIT_VERSION <commit_hash>)` in cmake/tools/SetupWebKit.cmake
|
||||
$ git -C vendor/WebKit checkout <commit_hash>
|
||||
|
||||
# Make a debug build of JSC. This will output build artifacts in ./vendor/WebKit/WebKitBuild/Debug
|
||||
# Optionally, you can use `bun run jsc:build` for a release build
|
||||
$ bun run jsc:build:debug && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
|
||||
|
||||
# After an initial run of `make jsc-debug`, you can rebuild JSC with:
|
||||
$ cmake --build vendor/WebKit/WebKitBuild/Debug --target jsc && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
|
||||
|
||||
# Build bun with the local JSC build
|
||||
# Build bun with the local JSC build — this automatically configures and builds JSC
|
||||
$ bun run build:local
|
||||
```
|
||||
|
||||
Using `bun run build:local` will build Bun in the `./build/debug-local` directory (instead of `./build/debug`), you'll have to change a couple of places to use this new directory:
|
||||
`bun run build:local` handles everything: configuring JSC, building JSC, and building Bun. On subsequent runs, JSC will incrementally rebuild if any WebKit sources changed. `ninja -Cbuild/debug-local` also works after the first build, and will build Bun+JSC.
|
||||
|
||||
The build output goes to `./build/debug-local` (instead of `./build/debug`), so you'll need to update a couple of places:
|
||||
|
||||
- The first line in [`src/js/builtins.d.ts`](/src/js/builtins.d.ts)
|
||||
- The `CompilationDatabase` line in [`.clangd` config](/.clangd) should be `CompilationDatabase: build/debug-local`
|
||||
@@ -281,7 +276,7 @@ Note that the WebKit folder, including build artifacts, is 8GB+ in size.
|
||||
|
||||
If you are using a JSC debug build and using VScode, make sure to run the `C/C++: Select a Configuration` command to configure intellisense to find the debug headers.
|
||||
|
||||
Note that if you change make changes to our [WebKit fork](https://github.com/oven-sh/WebKit), you will also have to change [`SetupWebKit.cmake`](/cmake/tools/SetupWebKit.cmake) to point to the commit hash.
|
||||
Note that if you make changes to our [WebKit fork](https://github.com/oven-sh/WebKit), you will also have to change [`SetupWebKit.cmake`](/cmake/tools/SetupWebKit.cmake) to point to the commit hash.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
|
||||
@@ -36,6 +36,7 @@ Bun statically links these libraries:
|
||||
| [`libbase64`](https://github.com/aklomp/base64/blob/master/LICENSE) | BSD 2-Clause |
|
||||
| [`libuv`](https://github.com/libuv/libuv) (on Windows) | MIT |
|
||||
| [`libdeflate`](https://github.com/ebiggers/libdeflate) | MIT |
|
||||
| [`uucode`](https://github.com/jacobsandlund/uucode) | MIT |
|
||||
| A fork of [`uWebsockets`](https://github.com/jarred-sumner/uwebsockets) | Apache 2.0 licensed |
|
||||
| Parts of [Tigerbeetle's IO code](https://github.com/tigerbeetle/tigerbeetle/blob/532c8b70b9142c17e07737ab6d3da68d7500cbca/src/io/windows.zig#L1) | Apache 2.0 licensed |
|
||||
|
||||
|
||||
194
bench/bun.lock
194
bench/bun.lock
@@ -18,9 +18,13 @@
|
||||
"fast-glob": "3.3.1",
|
||||
"fastify": "^5.0.0",
|
||||
"fdir": "^6.1.0",
|
||||
"mitata": "^1.0.25",
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"marked": "^17.0.1",
|
||||
"mitata": "1.0.20",
|
||||
"react": "^19",
|
||||
"react-dom": "^19",
|
||||
"react-markdown": "^9.0.3",
|
||||
"remark": "^15.0.1",
|
||||
"remark-html": "^16.0.1",
|
||||
"string-width": "7.1.0",
|
||||
"strip-ansi": "^7.1.0",
|
||||
"tar": "^7.4.3",
|
||||
@@ -150,18 +154,36 @@
|
||||
|
||||
"@swc/core-win32-x64-msvc": ["@swc/core-win32-x64-msvc@1.3.35", "", { "os": "win32", "cpu": "x64" }, "sha512-/RvphT4WfuGfIK84Ha0dovdPrKB1bW/mc+dtdmhv2E3EGkNc5FoueNwYmXWRimxnU7X0X7IkcRhyKB4G5DeAmg=="],
|
||||
|
||||
"@types/debug": ["@types/debug@4.1.12", "", { "dependencies": { "@types/ms": "*" } }, "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ=="],
|
||||
|
||||
"@types/estree": ["@types/estree@1.0.8", "", {}, "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w=="],
|
||||
|
||||
"@types/estree-jsx": ["@types/estree-jsx@1.0.5", "", { "dependencies": { "@types/estree": "*" } }, "sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg=="],
|
||||
|
||||
"@types/fs-extra": ["@types/fs-extra@11.0.4", "", { "dependencies": { "@types/jsonfile": "*", "@types/node": "*" } }, "sha512-yTbItCNreRooED33qjunPthRcSjERP1r4MqCZc7wv0u2sUkzTFp45tgUfS5+r7FrZPdmCCNflLhVSP/o+SemsQ=="],
|
||||
|
||||
"@types/hast": ["@types/hast@3.0.4", "", { "dependencies": { "@types/unist": "*" } }, "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ=="],
|
||||
|
||||
"@types/jsonfile": ["@types/jsonfile@6.1.4", "", { "dependencies": { "@types/node": "*" } }, "sha512-D5qGUYwjvnNNextdU59/+fI+spnwtTFmyQP0h+PfIOSkNfpU6AOICUOkm4i0OnSk+NyjdPJrxCDro0sJsWlRpQ=="],
|
||||
|
||||
"@types/mdast": ["@types/mdast@4.0.4", "", { "dependencies": { "@types/unist": "*" } }, "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA=="],
|
||||
|
||||
"@types/minimist": ["@types/minimist@1.2.5", "", {}, "sha512-hov8bUuiLiyFPGyFPE1lwWhmzYbirOXQNNo40+y3zow8aFVTeyn3VWL0VFFfdNddA8S4Vf0Tc062rzyNr7Paag=="],
|
||||
|
||||
"@types/ms": ["@types/ms@2.1.0", "", {}, "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA=="],
|
||||
|
||||
"@types/node": ["@types/node@18.19.8", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-g1pZtPhsvGVTwmeVoexWZLTQaOvXwoSq//pTL0DHeNzUDrFnir4fgETdhjhIxjVnN+hKOuh98+E1eMLnUXstFg=="],
|
||||
|
||||
"@types/ps-tree": ["@types/ps-tree@1.1.6", "", {}, "sha512-PtrlVaOaI44/3pl3cvnlK+GxOM3re2526TJvPvh7W+keHIXdV4TE0ylpPBAcvFQCbGitaTXwL9u+RF7qtVeazQ=="],
|
||||
|
||||
"@types/react": ["@types/react@19.2.10", "", { "dependencies": { "csstype": "^3.2.2" } }, "sha512-WPigyYuGhgZ/cTPRXB2EwUw+XvsRA3GqHlsP4qteqrnnjDrApbS7MxcGr/hke5iUoeB7E/gQtrs9I37zAJ0Vjw=="],
|
||||
|
||||
"@types/unist": ["@types/unist@3.0.3", "", {}, "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q=="],
|
||||
|
||||
"@types/which": ["@types/which@3.0.3", "", {}, "sha512-2C1+XoY0huExTbs8MQv1DuS5FS86+SEjdM9F/+GS61gg5Hqbtj8ZiDSx8MfWcyei907fIPbfPGCOrNUTnVHY1g=="],
|
||||
|
||||
"@ungap/structured-clone": ["@ungap/structured-clone@1.3.0", "", {}, "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g=="],
|
||||
|
||||
"abstract-logging": ["abstract-logging@2.0.1", "", {}, "sha512-2BjRTZxTPvheOvGbBslFSYOUkr+SjPtOnrLP33f+VIWLzezQpZcqVg7ja3L4dBXmzzgwT+a029jRx5PCi3JuiA=="],
|
||||
|
||||
"ajv": ["ajv@8.17.1", "", { "dependencies": { "fast-deep-equal": "^3.1.3", "fast-uri": "^3.0.1", "json-schema-traverse": "^1.0.0", "require-from-string": "^2.0.2" } }, "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g=="],
|
||||
@@ -176,6 +198,8 @@
|
||||
|
||||
"avvio": ["avvio@9.1.0", "", { "dependencies": { "@fastify/error": "^4.0.0", "fastq": "^1.17.1" } }, "sha512-fYASnYi600CsH/j9EQov7lECAniYiBFiiAtBNuZYLA2leLe9qOvZzqYHFjtIj6gD2VMoMLP14834LFWvr4IfDw=="],
|
||||
|
||||
"bail": ["bail@2.0.2", "", {}, "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw=="],
|
||||
|
||||
"benchmark": ["benchmark@2.1.4", "", { "dependencies": { "lodash": "^4.17.4", "platform": "^1.3.3" } }, "sha512-l9MlfN4M1K/H2fbhfMy3B7vJd6AGKJVQn2h6Sg/Yx+KckoUA7ewS5Vv6TjSq18ooE1kS9hhAlQRH3AkXIh/aOQ=="],
|
||||
|
||||
"braces": ["braces@3.0.2", "", { "dependencies": { "fill-range": "^7.0.1" } }, "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A=="],
|
||||
@@ -184,8 +208,18 @@
|
||||
|
||||
"caniuse-lite": ["caniuse-lite@1.0.30001456", "", {}, "sha512-XFHJY5dUgmpMV25UqaD4kVq2LsiaU5rS8fb0f17pCoXQiQslzmFgnfOxfvo1bTpTqf7dwG/N/05CnLCnOEKmzA=="],
|
||||
|
||||
"ccount": ["ccount@2.0.1", "", {}, "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg=="],
|
||||
|
||||
"chalk": ["chalk@5.3.0", "", {}, "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w=="],
|
||||
|
||||
"character-entities": ["character-entities@2.0.2", "", {}, "sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ=="],
|
||||
|
||||
"character-entities-html4": ["character-entities-html4@2.1.0", "", {}, "sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA=="],
|
||||
|
||||
"character-entities-legacy": ["character-entities-legacy@3.0.0", "", {}, "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ=="],
|
||||
|
||||
"character-reference-invalid": ["character-reference-invalid@2.0.1", "", {}, "sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw=="],
|
||||
|
||||
"chownr": ["chownr@3.0.0", "", {}, "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g=="],
|
||||
|
||||
"color": ["color@4.2.3", "", { "dependencies": { "color-convert": "^2.0.1", "color-string": "^1.9.0" } }, "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A=="],
|
||||
@@ -196,18 +230,26 @@
|
||||
|
||||
"color-string": ["color-string@1.9.1", "", { "dependencies": { "color-name": "^1.0.0", "simple-swizzle": "^0.2.2" } }, "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg=="],
|
||||
|
||||
"comma-separated-tokens": ["comma-separated-tokens@2.0.3", "", {}, "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg=="],
|
||||
|
||||
"convert-source-map": ["convert-source-map@1.9.0", "", {}, "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A=="],
|
||||
|
||||
"cookie": ["cookie@1.0.2", "", {}, "sha512-9Kr/j4O16ISv8zBBhJoi4bXOYNTkFLOqSL3UDB0njXxCXNezjeyVrJyGOWtgfs/q2km1gwBcfH8q1yEGoMYunA=="],
|
||||
|
||||
"cross-spawn": ["cross-spawn@7.0.3", "", { "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" } }, "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w=="],
|
||||
|
||||
"csstype": ["csstype@3.2.3", "", {}, "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ=="],
|
||||
|
||||
"data-uri-to-buffer": ["data-uri-to-buffer@4.0.1", "", {}, "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A=="],
|
||||
|
||||
"debug": ["debug@4.3.4", "", { "dependencies": { "ms": "2.1.2" } }, "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ=="],
|
||||
|
||||
"decode-named-character-reference": ["decode-named-character-reference@1.3.0", "", { "dependencies": { "character-entities": "^2.0.0" } }, "sha512-GtpQYB283KrPp6nRw50q3U9/VfOutZOe103qlN7BPP6Ad27xYnOIWv4lPzo8HCAL+mMZofJ9KEy30fq6MfaK6Q=="],
|
||||
|
||||
"dequal": ["dequal@2.0.3", "", {}, "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA=="],
|
||||
|
||||
"devlop": ["devlop@1.1.0", "", { "dependencies": { "dequal": "^2.0.0" } }, "sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA=="],
|
||||
|
||||
"dir-glob": ["dir-glob@3.0.1", "", { "dependencies": { "path-type": "^4.0.0" } }, "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA=="],
|
||||
|
||||
"duplexer": ["duplexer@0.1.2", "", {}, "sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg=="],
|
||||
@@ -262,12 +304,16 @@
|
||||
|
||||
"escape-string-regexp": ["escape-string-regexp@1.0.5", "", {}, "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg=="],
|
||||
|
||||
"estree-util-is-identifier-name": ["estree-util-is-identifier-name@3.0.0", "", {}, "sha512-hFtqIDZTIUZ9BXLb8y4pYGyk6+wekIivNVTcmvk8NoOh+VeRn5y6cEHzbURrWbfp1fIqdVipilzj+lfaadNZmg=="],
|
||||
|
||||
"event-stream": ["event-stream@3.3.4", "", { "dependencies": { "duplexer": "~0.1.1", "from": "~0", "map-stream": "~0.1.0", "pause-stream": "0.0.11", "split": "0.3", "stream-combiner": "~0.0.4", "through": "~2.3.1" } }, "sha512-QHpkERcGsR0T7Qm3HNJSyXKEEj8AHNxkY3PK8TS2KJvQ7NiSHe3DDpwVKKtoYprL/AreyzFBeIkBIWChAqn60g=="],
|
||||
|
||||
"eventemitter3": ["eventemitter3@5.0.0", "", {}, "sha512-riuVbElZZNXLeLEoprfNYoDSwTBRR44X3mnhdI1YcnENpWTCsTTVZ2zFuqQcpoyqPQIUXdiPEU0ECAq0KQRaHg=="],
|
||||
|
||||
"execa": ["execa@8.0.1", "", { "dependencies": { "cross-spawn": "^7.0.3", "get-stream": "^8.0.1", "human-signals": "^5.0.0", "is-stream": "^3.0.0", "merge-stream": "^2.0.0", "npm-run-path": "^5.1.0", "onetime": "^6.0.0", "signal-exit": "^4.1.0", "strip-final-newline": "^3.0.0" } }, "sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg=="],
|
||||
|
||||
"extend": ["extend@3.0.2", "", {}, "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="],
|
||||
|
||||
"fast-decode-uri-component": ["fast-decode-uri-component@1.0.1", "", {}, "sha512-WKgKWg5eUxvRZGwW8FvfbaH7AXSh2cL+3j5fMGzUMCxWBJ3dV3a7Wz8y2f/uQ0e3B6WmodD3oS54jTQ9HVTIIg=="],
|
||||
|
||||
"fast-deep-equal": ["fast-deep-equal@3.1.3", "", {}, "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="],
|
||||
@@ -318,20 +364,44 @@
|
||||
|
||||
"has-flag": ["has-flag@3.0.0", "", {}, "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw=="],
|
||||
|
||||
"hast-util-sanitize": ["hast-util-sanitize@5.0.2", "", { "dependencies": { "@types/hast": "^3.0.0", "@ungap/structured-clone": "^1.0.0", "unist-util-position": "^5.0.0" } }, "sha512-3yTWghByc50aGS7JlGhk61SPenfE/p1oaFeNwkOOyrscaOkMGrcW9+Cy/QAIOBpZxP1yqDIzFMR0+Np0i0+usg=="],
|
||||
|
||||
"hast-util-to-html": ["hast-util-to-html@9.0.5", "", { "dependencies": { "@types/hast": "^3.0.0", "@types/unist": "^3.0.0", "ccount": "^2.0.0", "comma-separated-tokens": "^2.0.0", "hast-util-whitespace": "^3.0.0", "html-void-elements": "^3.0.0", "mdast-util-to-hast": "^13.0.0", "property-information": "^7.0.0", "space-separated-tokens": "^2.0.0", "stringify-entities": "^4.0.0", "zwitch": "^2.0.4" } }, "sha512-OguPdidb+fbHQSU4Q4ZiLKnzWo8Wwsf5bZfbvu7//a9oTYoqD/fWpe96NuHkoS9h0ccGOTe0C4NGXdtS0iObOw=="],
|
||||
|
||||
"hast-util-to-jsx-runtime": ["hast-util-to-jsx-runtime@2.3.6", "", { "dependencies": { "@types/estree": "^1.0.0", "@types/hast": "^3.0.0", "@types/unist": "^3.0.0", "comma-separated-tokens": "^2.0.0", "devlop": "^1.0.0", "estree-util-is-identifier-name": "^3.0.0", "hast-util-whitespace": "^3.0.0", "mdast-util-mdx-expression": "^2.0.0", "mdast-util-mdx-jsx": "^3.0.0", "mdast-util-mdxjs-esm": "^2.0.0", "property-information": "^7.0.0", "space-separated-tokens": "^2.0.0", "style-to-js": "^1.0.0", "unist-util-position": "^5.0.0", "vfile-message": "^4.0.0" } }, "sha512-zl6s8LwNyo1P9uw+XJGvZtdFF1GdAkOg8ujOw+4Pyb76874fLps4ueHXDhXWdk6YHQ6OgUtinliG7RsYvCbbBg=="],
|
||||
|
||||
"hast-util-whitespace": ["hast-util-whitespace@3.0.0", "", { "dependencies": { "@types/hast": "^3.0.0" } }, "sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw=="],
|
||||
|
||||
"html-url-attributes": ["html-url-attributes@3.0.1", "", {}, "sha512-ol6UPyBWqsrO6EJySPz2O7ZSr856WDrEzM5zMqp+FJJLGMW35cLYmmZnl0vztAZxRUoNZJFTCohfjuIJ8I4QBQ=="],
|
||||
|
||||
"html-void-elements": ["html-void-elements@3.0.0", "", {}, "sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg=="],
|
||||
|
||||
"human-signals": ["human-signals@5.0.0", "", {}, "sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ=="],
|
||||
|
||||
"ignore": ["ignore@5.3.0", "", {}, "sha512-g7dmpshy+gD7mh88OC9NwSGTKoc3kyLAZQRU1mt53Aw/vnvfXnbC+F/7F7QoYVKbV+KNvJx8wArewKy1vXMtlg=="],
|
||||
|
||||
"inline-style-parser": ["inline-style-parser@0.2.7", "", {}, "sha512-Nb2ctOyNR8DqQoR0OwRG95uNWIC0C1lCgf5Naz5H6Ji72KZ8OcFZLz2P5sNgwlyoJ8Yif11oMuYs5pBQa86csA=="],
|
||||
|
||||
"ipaddr.js": ["ipaddr.js@2.2.0", "", {}, "sha512-Ag3wB2o37wslZS19hZqorUnrnzSkpOVy+IiiDEiTqNubEYpYuHWIf6K4psgN2ZWKExS4xhVCrRVfb/wfW8fWJA=="],
|
||||
|
||||
"is-alphabetical": ["is-alphabetical@2.0.1", "", {}, "sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ=="],
|
||||
|
||||
"is-alphanumerical": ["is-alphanumerical@2.0.1", "", { "dependencies": { "is-alphabetical": "^2.0.0", "is-decimal": "^2.0.0" } }, "sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw=="],
|
||||
|
||||
"is-arrayish": ["is-arrayish@0.3.2", "", {}, "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ=="],
|
||||
|
||||
"is-decimal": ["is-decimal@2.0.1", "", {}, "sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A=="],
|
||||
|
||||
"is-extglob": ["is-extglob@2.1.1", "", {}, "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ=="],
|
||||
|
||||
"is-glob": ["is-glob@4.0.3", "", { "dependencies": { "is-extglob": "^2.1.1" } }, "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg=="],
|
||||
|
||||
"is-hexadecimal": ["is-hexadecimal@2.0.1", "", {}, "sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg=="],
|
||||
|
||||
"is-number": ["is-number@7.0.0", "", {}, "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng=="],
|
||||
|
||||
"is-plain-obj": ["is-plain-obj@4.1.0", "", {}, "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg=="],
|
||||
|
||||
"is-stream": ["is-stream@3.0.0", "", {}, "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA=="],
|
||||
|
||||
"isexe": ["isexe@2.0.0", "", {}, "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="],
|
||||
@@ -352,16 +422,76 @@
|
||||
|
||||
"lodash": ["lodash@4.17.21", "", {}, "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="],
|
||||
|
||||
"loose-envify": ["loose-envify@1.4.0", "", { "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" }, "bin": { "loose-envify": "cli.js" } }, "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q=="],
|
||||
"longest-streak": ["longest-streak@3.1.0", "", {}, "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g=="],
|
||||
|
||||
"lru-cache": ["lru-cache@5.1.1", "", { "dependencies": { "yallist": "^3.0.2" } }, "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w=="],
|
||||
|
||||
"map-stream": ["map-stream@0.1.0", "", {}, "sha512-CkYQrPYZfWnu/DAmVCpTSX/xHpKZ80eKh2lAkyA6AJTef6bW+6JpbQZN5rofum7da+SyN1bi5ctTm+lTfcCW3g=="],
|
||||
|
||||
"marked": ["marked@17.0.1", "", { "bin": { "marked": "bin/marked.js" } }, "sha512-boeBdiS0ghpWcSwoNm/jJBwdpFaMnZWRzjA6SkUMYb40SVaN1x7mmfGKp0jvexGcx+7y2La5zRZsYFZI6Qpypg=="],
|
||||
|
||||
"mdast-util-from-markdown": ["mdast-util-from-markdown@2.0.2", "", { "dependencies": { "@types/mdast": "^4.0.0", "@types/unist": "^3.0.0", "decode-named-character-reference": "^1.0.0", "devlop": "^1.0.0", "mdast-util-to-string": "^4.0.0", "micromark": "^4.0.0", "micromark-util-decode-numeric-character-reference": "^2.0.0", "micromark-util-decode-string": "^2.0.0", "micromark-util-normalize-identifier": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0", "unist-util-stringify-position": "^4.0.0" } }, "sha512-uZhTV/8NBuw0WHkPTrCqDOl0zVe1BIng5ZtHoDk49ME1qqcjYmmLmOf0gELgcRMxN4w2iuIeVso5/6QymSrgmA=="],
|
||||
|
||||
"mdast-util-mdx-expression": ["mdast-util-mdx-expression@2.0.1", "", { "dependencies": { "@types/estree-jsx": "^1.0.0", "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", "devlop": "^1.0.0", "mdast-util-from-markdown": "^2.0.0", "mdast-util-to-markdown": "^2.0.0" } }, "sha512-J6f+9hUp+ldTZqKRSg7Vw5V6MqjATc+3E4gf3CFNcuZNWD8XdyI6zQ8GqH7f8169MM6P7hMBRDVGnn7oHB9kXQ=="],
|
||||
|
||||
"mdast-util-mdx-jsx": ["mdast-util-mdx-jsx@3.2.0", "", { "dependencies": { "@types/estree-jsx": "^1.0.0", "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", "@types/unist": "^3.0.0", "ccount": "^2.0.0", "devlop": "^1.1.0", "mdast-util-from-markdown": "^2.0.0", "mdast-util-to-markdown": "^2.0.0", "parse-entities": "^4.0.0", "stringify-entities": "^4.0.0", "unist-util-stringify-position": "^4.0.0", "vfile-message": "^4.0.0" } }, "sha512-lj/z8v0r6ZtsN/cGNNtemmmfoLAFZnjMbNyLzBafjzikOM+glrjNHPlf6lQDOTccj9n5b0PPihEBbhneMyGs1Q=="],
|
||||
|
||||
"mdast-util-mdxjs-esm": ["mdast-util-mdxjs-esm@2.0.1", "", { "dependencies": { "@types/estree-jsx": "^1.0.0", "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", "devlop": "^1.0.0", "mdast-util-from-markdown": "^2.0.0", "mdast-util-to-markdown": "^2.0.0" } }, "sha512-EcmOpxsZ96CvlP03NghtH1EsLtr0n9Tm4lPUJUBccV9RwUOneqSycg19n5HGzCf+10LozMRSObtVr3ee1WoHtg=="],
|
||||
|
||||
"mdast-util-phrasing": ["mdast-util-phrasing@4.1.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "unist-util-is": "^6.0.0" } }, "sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w=="],
|
||||
|
||||
"mdast-util-to-hast": ["mdast-util-to-hast@13.2.1", "", { "dependencies": { "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", "@ungap/structured-clone": "^1.0.0", "devlop": "^1.0.0", "micromark-util-sanitize-uri": "^2.0.0", "trim-lines": "^3.0.0", "unist-util-position": "^5.0.0", "unist-util-visit": "^5.0.0", "vfile": "^6.0.0" } }, "sha512-cctsq2wp5vTsLIcaymblUriiTcZd0CwWtCbLvrOzYCDZoWyMNV8sZ7krj09FSnsiJi3WVsHLM4k6Dq/yaPyCXA=="],
|
||||
|
||||
"mdast-util-to-markdown": ["mdast-util-to-markdown@2.1.2", "", { "dependencies": { "@types/mdast": "^4.0.0", "@types/unist": "^3.0.0", "longest-streak": "^3.0.0", "mdast-util-phrasing": "^4.0.0", "mdast-util-to-string": "^4.0.0", "micromark-util-classify-character": "^2.0.0", "micromark-util-decode-string": "^2.0.0", "unist-util-visit": "^5.0.0", "zwitch": "^2.0.0" } }, "sha512-xj68wMTvGXVOKonmog6LwyJKrYXZPvlwabaryTjLh9LuvovB/KAH+kvi8Gjj+7rJjsFi23nkUxRQv1KqSroMqA=="],
|
||||
|
||||
"mdast-util-to-string": ["mdast-util-to-string@4.0.0", "", { "dependencies": { "@types/mdast": "^4.0.0" } }, "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg=="],
|
||||
|
||||
"merge-stream": ["merge-stream@2.0.0", "", {}, "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w=="],
|
||||
|
||||
"merge2": ["merge2@1.4.1", "", {}, "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg=="],
|
||||
|
||||
"micromark": ["micromark@4.0.2", "", { "dependencies": { "@types/debug": "^4.0.0", "debug": "^4.0.0", "decode-named-character-reference": "^1.0.0", "devlop": "^1.0.0", "micromark-core-commonmark": "^2.0.0", "micromark-factory-space": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-chunked": "^2.0.0", "micromark-util-combine-extensions": "^2.0.0", "micromark-util-decode-numeric-character-reference": "^2.0.0", "micromark-util-encode": "^2.0.0", "micromark-util-normalize-identifier": "^2.0.0", "micromark-util-resolve-all": "^2.0.0", "micromark-util-sanitize-uri": "^2.0.0", "micromark-util-subtokenize": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-zpe98Q6kvavpCr1NPVSCMebCKfD7CA2NqZ+rykeNhONIJBpc1tFKt9hucLGwha3jNTNI8lHpctWJWoimVF4PfA=="],
|
||||
|
||||
"micromark-core-commonmark": ["micromark-core-commonmark@2.0.3", "", { "dependencies": { "decode-named-character-reference": "^1.0.0", "devlop": "^1.0.0", "micromark-factory-destination": "^2.0.0", "micromark-factory-label": "^2.0.0", "micromark-factory-space": "^2.0.0", "micromark-factory-title": "^2.0.0", "micromark-factory-whitespace": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-chunked": "^2.0.0", "micromark-util-classify-character": "^2.0.0", "micromark-util-html-tag-name": "^2.0.0", "micromark-util-normalize-identifier": "^2.0.0", "micromark-util-resolve-all": "^2.0.0", "micromark-util-subtokenize": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg=="],
|
||||
|
||||
"micromark-factory-destination": ["micromark-factory-destination@2.0.1", "", { "dependencies": { "micromark-util-character": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-Xe6rDdJlkmbFRExpTOmRj9N3MaWmbAgdpSrBQvCFqhezUn4AHqJHbaEnfbVYYiexVSs//tqOdY/DxhjdCiJnIA=="],
|
||||
|
||||
"micromark-factory-label": ["micromark-factory-label@2.0.1", "", { "dependencies": { "devlop": "^1.0.0", "micromark-util-character": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-VFMekyQExqIW7xIChcXn4ok29YE3rnuyveW3wZQWWqF4Nv9Wk5rgJ99KzPvHjkmPXF93FXIbBp6YdW3t71/7Vg=="],
|
||||
|
||||
"micromark-factory-space": ["micromark-factory-space@2.0.1", "", { "dependencies": { "micromark-util-character": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg=="],
|
||||
|
||||
"micromark-factory-title": ["micromark-factory-title@2.0.1", "", { "dependencies": { "micromark-factory-space": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-5bZ+3CjhAd9eChYTHsjy6TGxpOFSKgKKJPJxr293jTbfry2KDoWkhBb6TcPVB4NmzaPhMs1Frm9AZH7OD4Cjzw=="],
|
||||
|
||||
"micromark-factory-whitespace": ["micromark-factory-whitespace@2.0.1", "", { "dependencies": { "micromark-factory-space": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-Ob0nuZ3PKt/n0hORHyvoD9uZhr+Za8sFoP+OnMcnWK5lngSzALgQYKMr9RJVOWLqQYuyn6ulqGWSXdwf6F80lQ=="],
|
||||
|
||||
"micromark-util-character": ["micromark-util-character@2.1.1", "", { "dependencies": { "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q=="],
|
||||
|
||||
"micromark-util-chunked": ["micromark-util-chunked@2.0.1", "", { "dependencies": { "micromark-util-symbol": "^2.0.0" } }, "sha512-QUNFEOPELfmvv+4xiNg2sRYeS/P84pTW0TCgP5zc9FpXetHY0ab7SxKyAQCNCc1eK0459uoLI1y5oO5Vc1dbhA=="],
|
||||
|
||||
"micromark-util-classify-character": ["micromark-util-classify-character@2.0.1", "", { "dependencies": { "micromark-util-character": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q=="],
|
||||
|
||||
"micromark-util-combine-extensions": ["micromark-util-combine-extensions@2.0.1", "", { "dependencies": { "micromark-util-chunked": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-OnAnH8Ujmy59JcyZw8JSbK9cGpdVY44NKgSM7E9Eh7DiLS2E9RNQf0dONaGDzEG9yjEl5hcqeIsj4hfRkLH/Bg=="],
|
||||
|
||||
"micromark-util-decode-numeric-character-reference": ["micromark-util-decode-numeric-character-reference@2.0.2", "", { "dependencies": { "micromark-util-symbol": "^2.0.0" } }, "sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw=="],
|
||||
|
||||
"micromark-util-decode-string": ["micromark-util-decode-string@2.0.1", "", { "dependencies": { "decode-named-character-reference": "^1.0.0", "micromark-util-character": "^2.0.0", "micromark-util-decode-numeric-character-reference": "^2.0.0", "micromark-util-symbol": "^2.0.0" } }, "sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ=="],
|
||||
|
||||
"micromark-util-encode": ["micromark-util-encode@2.0.1", "", {}, "sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw=="],
|
||||
|
||||
"micromark-util-html-tag-name": ["micromark-util-html-tag-name@2.0.1", "", {}, "sha512-2cNEiYDhCWKI+Gs9T0Tiysk136SnR13hhO8yW6BGNyhOC4qYFnwF1nKfD3HFAIXA5c45RrIG1ub11GiXeYd1xA=="],
|
||||
|
||||
"micromark-util-normalize-identifier": ["micromark-util-normalize-identifier@2.0.1", "", { "dependencies": { "micromark-util-symbol": "^2.0.0" } }, "sha512-sxPqmo70LyARJs0w2UclACPUUEqltCkJ6PhKdMIDuJ3gSf/Q+/GIe3WKl0Ijb/GyH9lOpUkRAO2wp0GVkLvS9Q=="],
|
||||
|
||||
"micromark-util-resolve-all": ["micromark-util-resolve-all@2.0.1", "", { "dependencies": { "micromark-util-types": "^2.0.0" } }, "sha512-VdQyxFWFT2/FGJgwQnJYbe1jjQoNTS4RjglmSjTUlpUMa95Htx9NHeYW4rGDJzbjvCsl9eLjMQwGeElsqmzcHg=="],
|
||||
|
||||
"micromark-util-sanitize-uri": ["micromark-util-sanitize-uri@2.0.1", "", { "dependencies": { "micromark-util-character": "^2.0.0", "micromark-util-encode": "^2.0.0", "micromark-util-symbol": "^2.0.0" } }, "sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ=="],
|
||||
|
||||
"micromark-util-subtokenize": ["micromark-util-subtokenize@2.1.0", "", { "dependencies": { "devlop": "^1.0.0", "micromark-util-chunked": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-XQLu552iSctvnEcgXw6+Sx75GflAPNED1qx7eBJ+wydBb2KCbRZe+NwvIEEMM83uml1+2WSXpBAcp9IUCgCYWA=="],
|
||||
|
||||
"micromark-util-symbol": ["micromark-util-symbol@2.0.1", "", {}, "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q=="],
|
||||
|
||||
"micromark-util-types": ["micromark-util-types@2.0.2", "", {}, "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA=="],
|
||||
|
||||
"micromatch": ["micromatch@4.0.5", "", { "dependencies": { "braces": "^3.0.2", "picomatch": "^2.3.1" } }, "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA=="],
|
||||
|
||||
"mimic-fn": ["mimic-fn@4.0.0", "", {}, "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw=="],
|
||||
@@ -372,7 +502,7 @@
|
||||
|
||||
"minizlib": ["minizlib@3.1.0", "", { "dependencies": { "minipass": "^7.1.2" } }, "sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw=="],
|
||||
|
||||
"mitata": ["mitata@1.0.25", "", {}, "sha512-0v5qZtVW5vwj9FDvYfraR31BMDcRLkhSFWPTLaxx/Z3/EvScfVtAAWtMI2ArIbBcwh7P86dXh0lQWKiXQPlwYA=="],
|
||||
"mitata": ["mitata@1.0.20", "", {}, "sha512-oHWYGX5bi4wGT/1zrhiZAEzqTV14Vq6/PUTW8WK0b3YHBBQcZz2QFm+InHhjnD0I7B6CMtwdGt2K0938r7YTdQ=="],
|
||||
|
||||
"ms": ["ms@2.1.2", "", {}, "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="],
|
||||
|
||||
@@ -388,6 +518,8 @@
|
||||
|
||||
"onetime": ["onetime@6.0.0", "", { "dependencies": { "mimic-fn": "^4.0.0" } }, "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ=="],
|
||||
|
||||
"parse-entities": ["parse-entities@4.0.2", "", { "dependencies": { "@types/unist": "^2.0.0", "character-entities-legacy": "^3.0.0", "character-reference-invalid": "^2.0.0", "decode-named-character-reference": "^1.0.0", "is-alphanumerical": "^2.0.0", "is-decimal": "^2.0.0", "is-hexadecimal": "^2.0.0" } }, "sha512-GG2AQYWoLgL877gQIKeRPGO1xF9+eG1ujIb5soS5gPvLQ1y2o8FL90w2QWNdf9I361Mpp7726c+lj3U0qK1uGw=="],
|
||||
|
||||
"path-key": ["path-key@3.1.1", "", {}, "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="],
|
||||
|
||||
"path-type": ["path-type@4.0.0", "", {}, "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw=="],
|
||||
@@ -408,18 +540,32 @@
|
||||
|
||||
"process-warning": ["process-warning@5.0.0", "", {}, "sha512-a39t9ApHNx2L4+HBnQKqxxHNs1r7KF+Intd8Q/g1bUh6q0WIp9voPXJ/x0j+ZL45KF1pJd9+q2jLIRMfvEshkA=="],
|
||||
|
||||
"property-information": ["property-information@7.1.0", "", {}, "sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ=="],
|
||||
|
||||
"ps-tree": ["ps-tree@1.2.0", "", { "dependencies": { "event-stream": "=3.3.4" }, "bin": { "ps-tree": "./bin/ps-tree.js" } }, "sha512-0VnamPPYHl4uaU/nSFeZZpR21QAWRz+sRv4iW9+v/GS/J5U5iZB5BNN6J0RMoOvdx2gWM2+ZFMIm58q24e4UYA=="],
|
||||
|
||||
"queue-microtask": ["queue-microtask@1.2.3", "", {}, "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="],
|
||||
|
||||
"quick-format-unescaped": ["quick-format-unescaped@4.0.4", "", {}, "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg=="],
|
||||
|
||||
"react": ["react@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ=="],
|
||||
"react": ["react@19.2.4", "", {}, "sha512-9nfp2hYpCwOjAN+8TZFGhtWEwgvWHXqESH8qT89AT/lWklpLON22Lc8pEtnpsZz7VmawabSU0gCjnj8aC0euHQ=="],
|
||||
|
||||
"react-dom": ["react-dom@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0", "scheduler": "^0.23.2" }, "peerDependencies": { "react": "^18.3.1" } }, "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw=="],
|
||||
"react-dom": ["react-dom@19.2.4", "", { "dependencies": { "scheduler": "^0.27.0" }, "peerDependencies": { "react": "^19.2.4" } }, "sha512-AXJdLo8kgMbimY95O2aKQqsz2iWi9jMgKJhRBAxECE4IFxfcazB2LmzloIoibJI3C12IlY20+KFaLv+71bUJeQ=="],
|
||||
|
||||
"react-markdown": ["react-markdown@9.1.0", "", { "dependencies": { "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", "devlop": "^1.0.0", "hast-util-to-jsx-runtime": "^2.0.0", "html-url-attributes": "^3.0.0", "mdast-util-to-hast": "^13.0.0", "remark-parse": "^11.0.0", "remark-rehype": "^11.0.0", "unified": "^11.0.0", "unist-util-visit": "^5.0.0", "vfile": "^6.0.0" }, "peerDependencies": { "@types/react": ">=18", "react": ">=18" } }, "sha512-xaijuJB0kzGiUdG7nc2MOMDUDBWPyGAjZtUrow9XxUeua8IqeP+VlIfAZ3bphpcLTnSZXz6z9jcVC/TCwbfgdw=="],
|
||||
|
||||
"real-require": ["real-require@0.2.0", "", {}, "sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg=="],
|
||||
|
||||
"remark": ["remark@15.0.1", "", { "dependencies": { "@types/mdast": "^4.0.0", "remark-parse": "^11.0.0", "remark-stringify": "^11.0.0", "unified": "^11.0.0" } }, "sha512-Eht5w30ruCXgFmxVUSlNWQ9iiimq07URKeFS3hNc8cUWy1llX4KDWfyEDZRycMc+znsN9Ux5/tJ/BFdgdOwA3A=="],
|
||||
|
||||
"remark-html": ["remark-html@16.0.1", "", { "dependencies": { "@types/mdast": "^4.0.0", "hast-util-sanitize": "^5.0.0", "hast-util-to-html": "^9.0.0", "mdast-util-to-hast": "^13.0.0", "unified": "^11.0.0" } }, "sha512-B9JqA5i0qZe0Nsf49q3OXyGvyXuZFDzAP2iOFLEumymuYJITVpiH1IgsTEwTpdptDmZlMDMWeDmSawdaJIGCXQ=="],
|
||||
|
||||
"remark-parse": ["remark-parse@11.0.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "mdast-util-from-markdown": "^2.0.0", "micromark-util-types": "^2.0.0", "unified": "^11.0.0" } }, "sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA=="],
|
||||
|
||||
"remark-rehype": ["remark-rehype@11.1.2", "", { "dependencies": { "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", "mdast-util-to-hast": "^13.0.0", "unified": "^11.0.0", "vfile": "^6.0.0" } }, "sha512-Dh7l57ianaEoIpzbp0PC9UKAdCSVklD8E5Rpw7ETfbTl3FqcOOgq5q2LVDhgGCkaBv7p24JXikPdvhhmHvKMsw=="],
|
||||
|
||||
"remark-stringify": ["remark-stringify@11.0.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "mdast-util-to-markdown": "^2.0.0", "unified": "^11.0.0" } }, "sha512-1OSmLd3awB/t8qdoEOMazZkNsfVTeY4fTsgzcQFdXNq8ToTN4ZGwrMnlda4K6smTFKD+GRV6O48i6Z4iKgPPpw=="],
|
||||
|
||||
"require-from-string": ["require-from-string@2.0.2", "", {}, "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw=="],
|
||||
|
||||
"ret": ["ret@0.5.0", "", {}, "sha512-I1XxrZSQ+oErkRR4jYbAyEEu2I0avBvvMM5JN+6EBprOGRCs63ENqZ3vjavq8fBw2+62G5LF5XelKwuJpcvcxw=="],
|
||||
@@ -434,7 +580,7 @@
|
||||
|
||||
"safe-stable-stringify": ["safe-stable-stringify@2.5.0", "", {}, "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA=="],
|
||||
|
||||
"scheduler": ["scheduler@0.23.2", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ=="],
|
||||
"scheduler": ["scheduler@0.27.0", "", {}, "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q=="],
|
||||
|
||||
"secure-json-parse": ["secure-json-parse@4.0.0", "", {}, "sha512-dxtLJO6sc35jWidmLxo7ij+Eg48PM/kleBsxpC8QJE0qJICe+KawkDQmvCMZUr9u7WKVHgMW6vy3fQ7zMiFZMA=="],
|
||||
|
||||
@@ -454,6 +600,8 @@
|
||||
|
||||
"sonic-boom": ["sonic-boom@4.2.0", "", { "dependencies": { "atomic-sleep": "^1.0.0" } }, "sha512-INb7TM37/mAcsGmc9hyyI6+QR3rR1zVRu36B0NeGXKnOOLiZOfER5SA+N7X7k3yUYRzLWafduTDvJAfDswwEww=="],
|
||||
|
||||
"space-separated-tokens": ["space-separated-tokens@2.0.2", "", {}, "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q=="],
|
||||
|
||||
"split": ["split@0.3.3", "", { "dependencies": { "through": "2" } }, "sha512-wD2AeVmxXRBoX44wAycgjVpMhvbwdI2aZjCkvfNcH1YqHQvJVa1duWc73OyVGJUc05fhFaTZeQ/PYsrmyH0JVA=="],
|
||||
|
||||
"split2": ["split2@4.2.0", "", {}, "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg=="],
|
||||
@@ -462,10 +610,16 @@
|
||||
|
||||
"string-width": ["string-width@7.1.0", "", { "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", "strip-ansi": "^7.1.0" } }, "sha512-SEIJCWiX7Kg4c129n48aDRwLbFb2LJmXXFrWBG4NGaRtMQ3myKPKbwrD1BKqQn74oCoNMBVrfDEr5M9YxCsrkw=="],
|
||||
|
||||
"stringify-entities": ["stringify-entities@4.0.4", "", { "dependencies": { "character-entities-html4": "^2.0.0", "character-entities-legacy": "^3.0.0" } }, "sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg=="],
|
||||
|
||||
"strip-ansi": ["strip-ansi@7.1.0", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ=="],
|
||||
|
||||
"strip-final-newline": ["strip-final-newline@3.0.0", "", {}, "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw=="],
|
||||
|
||||
"style-to-js": ["style-to-js@1.1.21", "", { "dependencies": { "style-to-object": "1.0.14" } }, "sha512-RjQetxJrrUJLQPHbLku6U/ocGtzyjbJMP9lCNK7Ag0CNh690nSH8woqWH9u16nMjYBAok+i7JO1NP2pOy8IsPQ=="],
|
||||
|
||||
"style-to-object": ["style-to-object@1.0.14", "", { "dependencies": { "inline-style-parser": "0.2.7" } }, "sha512-LIN7rULI0jBscWQYaSswptyderlarFkjQ+t79nzty8tcIAceVomEVlLzH5VP4Cmsv6MtKhs7qaAiwlcp+Mgaxw=="],
|
||||
|
||||
"supports-color": ["supports-color@5.5.0", "", { "dependencies": { "has-flag": "^3.0.0" } }, "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow=="],
|
||||
|
||||
"tar": ["tar@7.5.2", "", { "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", "minizlib": "^3.1.0", "yallist": "^5.0.0" } }, "sha512-7NyxrTE4Anh8km8iEy7o0QYPs+0JKBTj5ZaqHg6B39erLg0qYXN3BijtShwbsNSvQ+LN75+KV+C4QR/f6Gwnpg=="],
|
||||
@@ -482,12 +636,32 @@
|
||||
|
||||
"toad-cache": ["toad-cache@3.7.0", "", {}, "sha512-/m8M+2BJUpoJdgAHoG+baCwBT+tf2VraSfkBgl0Y00qIWt41DJ8R5B8nsEw0I58YwF5IZH6z24/2TobDKnqSWw=="],
|
||||
|
||||
"trim-lines": ["trim-lines@3.0.1", "", {}, "sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg=="],
|
||||
|
||||
"trough": ["trough@2.2.0", "", {}, "sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw=="],
|
||||
|
||||
"undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="],
|
||||
|
||||
"unified": ["unified@11.0.5", "", { "dependencies": { "@types/unist": "^3.0.0", "bail": "^2.0.0", "devlop": "^1.0.0", "extend": "^3.0.0", "is-plain-obj": "^4.0.0", "trough": "^2.0.0", "vfile": "^6.0.0" } }, "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA=="],
|
||||
|
||||
"unist-util-is": ["unist-util-is@6.0.1", "", { "dependencies": { "@types/unist": "^3.0.0" } }, "sha512-LsiILbtBETkDz8I9p1dQ0uyRUWuaQzd/cuEeS1hoRSyW5E5XGmTzlwY1OrNzzakGowI9Dr/I8HVaw4hTtnxy8g=="],
|
||||
|
||||
"unist-util-position": ["unist-util-position@5.0.0", "", { "dependencies": { "@types/unist": "^3.0.0" } }, "sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA=="],
|
||||
|
||||
"unist-util-stringify-position": ["unist-util-stringify-position@4.0.0", "", { "dependencies": { "@types/unist": "^3.0.0" } }, "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ=="],
|
||||
|
||||
"unist-util-visit": ["unist-util-visit@5.1.0", "", { "dependencies": { "@types/unist": "^3.0.0", "unist-util-is": "^6.0.0", "unist-util-visit-parents": "^6.0.0" } }, "sha512-m+vIdyeCOpdr/QeQCu2EzxX/ohgS8KbnPDgFni4dQsfSCtpz8UqDyY5GjRru8PDKuYn7Fq19j1CQ+nJSsGKOzg=="],
|
||||
|
||||
"unist-util-visit-parents": ["unist-util-visit-parents@6.0.2", "", { "dependencies": { "@types/unist": "^3.0.0", "unist-util-is": "^6.0.0" } }, "sha512-goh1s1TBrqSqukSc8wrjwWhL0hiJxgA8m4kFxGlQ+8FYQ3C/m11FcTs4YYem7V664AhHVvgoQLk890Ssdsr2IQ=="],
|
||||
|
||||
"universalify": ["universalify@2.0.1", "", {}, "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw=="],
|
||||
|
||||
"update-browserslist-db": ["update-browserslist-db@1.0.10", "", { "dependencies": { "escalade": "^3.1.1", "picocolors": "^1.0.0" }, "peerDependencies": { "browserslist": ">= 4.21.0" }, "bin": { "browserslist-lint": "cli.js" } }, "sha512-OztqDenkfFkbSG+tRxBeAnCVPckDBcvibKd35yDONx6OU8N7sqgwc7rCbkJ/WcYtVRZ4ba68d6byhC21GFh7sQ=="],
|
||||
|
||||
"vfile": ["vfile@6.0.3", "", { "dependencies": { "@types/unist": "^3.0.0", "vfile-message": "^4.0.0" } }, "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q=="],
|
||||
|
||||
"vfile-message": ["vfile-message@4.0.3", "", { "dependencies": { "@types/unist": "^3.0.0", "unist-util-stringify-position": "^4.0.0" } }, "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw=="],
|
||||
|
||||
"web-streams-polyfill": ["web-streams-polyfill@3.3.2", "", {}, "sha512-3pRGuxRF5gpuZc0W+EpwQRmCD7gRqcDOMt688KmdlDAgAyaB1XlN0zq2njfDNm44XVdIouE7pZ6GzbdyH47uIQ=="],
|
||||
|
||||
"webpod": ["webpod@0.0.2", "", { "bin": { "webpod": "dist/index.js" } }, "sha512-cSwwQIeg8v4i3p4ajHhwgR7N6VyxAf+KYSSsY6Pd3aETE+xEU4vbitz7qQkB0I321xnhDdgtxuiSfk5r/FVtjg=="],
|
||||
@@ -500,6 +674,8 @@
|
||||
|
||||
"yaml": ["yaml@2.3.4", "", {}, "sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA=="],
|
||||
|
||||
"zwitch": ["zwitch@2.0.4", "", {}, "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A=="],
|
||||
|
||||
"zx": ["zx@7.2.3", "", { "dependencies": { "@types/fs-extra": "^11.0.1", "@types/minimist": "^1.2.2", "@types/node": "^18.16.3", "@types/ps-tree": "^1.1.2", "@types/which": "^3.0.0", "chalk": "^5.2.0", "fs-extra": "^11.1.1", "fx": "*", "globby": "^13.1.4", "minimist": "^1.2.8", "node-fetch": "3.3.1", "ps-tree": "^1.2.0", "webpod": "^0", "which": "^3.0.0", "yaml": "^2.2.2" }, "bin": { "zx": "build/cli.js" } }, "sha512-QODu38nLlYXg/B/Gw7ZKiZrvPkEsjPN3LQ5JFXM7h0JvwhEdPNNl+4Ao1y4+o3CLNiDUNcwzQYZ4/Ko7kKzCMA=="],
|
||||
|
||||
"@babel/generator/@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.2", "", { "dependencies": { "@jridgewell/set-array": "^1.0.1", "@jridgewell/sourcemap-codec": "^1.4.10", "@jridgewell/trace-mapping": "^0.3.9" } }, "sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A=="],
|
||||
@@ -518,6 +694,8 @@
|
||||
|
||||
"npm-run-path/path-key": ["path-key@4.0.0", "", {}, "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ=="],
|
||||
|
||||
"parse-entities/@types/unist": ["@types/unist@2.0.11", "", {}, "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA=="],
|
||||
|
||||
"@babel/highlight/chalk/ansi-styles": ["ansi-styles@3.2.1", "", { "dependencies": { "color-convert": "^1.9.0" } }, "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA=="],
|
||||
|
||||
"@babel/highlight/chalk/ansi-styles/color-convert": ["color-convert@1.9.3", "", { "dependencies": { "color-name": "1.1.3" } }, "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg=="],
|
||||
|
||||
15
bench/json5/bun.lock
Normal file
15
bench/json5/bun.lock
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"configVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "json5-benchmark",
|
||||
"dependencies": {
|
||||
"json5": "^2.2.3",
|
||||
},
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"json5": ["json5@2.2.3", "", { "bin": { "json5": "lib/cli.js" } }, "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg=="],
|
||||
}
|
||||
}
|
||||
88
bench/json5/json5.mjs
Normal file
88
bench/json5/json5.mjs
Normal file
@@ -0,0 +1,88 @@
|
||||
import JSON5 from "json5";
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
|
||||
const isBun = typeof Bun !== "undefined" && Bun.JSON5;
|
||||
|
||||
function sizeLabel(n) {
|
||||
if (n >= 1024 * 1024) return `${(n / 1024 / 1024).toFixed(1)}MB`;
|
||||
if (n >= 1024) return `${(n / 1024).toFixed(0)}KB`;
|
||||
return `${n}B`;
|
||||
}
|
||||
|
||||
// -- parse inputs --
|
||||
|
||||
const smallJson5 = `{
|
||||
// User profile
|
||||
name: "John Doe",
|
||||
age: 30,
|
||||
email: 'john@example.com',
|
||||
active: true,
|
||||
}`;
|
||||
|
||||
function generateLargeJson5(count) {
|
||||
const lines = ["{\n // Auto-generated dataset\n items: [\n"];
|
||||
for (let i = 0; i < count; i++) {
|
||||
lines.push(` {
|
||||
id: ${i},
|
||||
name: 'item_${i}',
|
||||
value: ${(Math.random() * 1000).toFixed(2)},
|
||||
hex: 0x${i.toString(16).toUpperCase()},
|
||||
active: ${i % 2 === 0},
|
||||
tags: ['tag_${i % 10}', 'category_${i % 5}',],
|
||||
// entry ${i}
|
||||
},\n`);
|
||||
}
|
||||
lines.push(" ],\n total: " + count + ",\n status: 'complete',\n}\n");
|
||||
return lines.join("");
|
||||
}
|
||||
|
||||
const largeJson5 = generateLargeJson5(6500);
|
||||
|
||||
// -- stringify inputs --
|
||||
|
||||
const smallObject = {
|
||||
name: "John Doe",
|
||||
age: 30,
|
||||
email: "john@example.com",
|
||||
active: true,
|
||||
};
|
||||
|
||||
const largeObject = {
|
||||
items: Array.from({ length: 10000 }, (_, i) => ({
|
||||
id: i,
|
||||
name: `item_${i}`,
|
||||
value: +(Math.random() * 1000).toFixed(2),
|
||||
active: i % 2 === 0,
|
||||
tags: [`tag_${i % 10}`, `category_${i % 5}`],
|
||||
})),
|
||||
total: 10000,
|
||||
status: "complete",
|
||||
};
|
||||
|
||||
const stringify = isBun ? Bun.JSON5.stringify : JSON5.stringify;
|
||||
|
||||
// -- parse benchmarks --
|
||||
|
||||
group(`parse small (${sizeLabel(smallJson5.length)})`, () => {
|
||||
if (isBun) bench("Bun.JSON5.parse", () => Bun.JSON5.parse(smallJson5));
|
||||
bench("json5.parse", () => JSON5.parse(smallJson5));
|
||||
});
|
||||
|
||||
group(`parse large (${sizeLabel(largeJson5.length)})`, () => {
|
||||
if (isBun) bench("Bun.JSON5.parse", () => Bun.JSON5.parse(largeJson5));
|
||||
bench("json5.parse", () => JSON5.parse(largeJson5));
|
||||
});
|
||||
|
||||
// -- stringify benchmarks --
|
||||
|
||||
group(`stringify small (${sizeLabel(stringify(smallObject).length)})`, () => {
|
||||
if (isBun) bench("Bun.JSON5.stringify", () => Bun.JSON5.stringify(smallObject));
|
||||
bench("json5.stringify", () => JSON5.stringify(smallObject));
|
||||
});
|
||||
|
||||
group(`stringify large (${sizeLabel(stringify(largeObject).length)})`, () => {
|
||||
if (isBun) bench("Bun.JSON5.stringify", () => Bun.JSON5.stringify(largeObject));
|
||||
bench("json5.stringify", () => JSON5.stringify(largeObject));
|
||||
});
|
||||
|
||||
await run();
|
||||
7
bench/json5/package.json
Normal file
7
bench/json5/package.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"name": "json5-benchmark",
|
||||
"version": "1.0.0",
|
||||
"dependencies": {
|
||||
"json5": "^2.2.3"
|
||||
}
|
||||
}
|
||||
@@ -14,14 +14,18 @@
|
||||
"fast-glob": "3.3.1",
|
||||
"fastify": "^5.0.0",
|
||||
"fdir": "^6.1.0",
|
||||
"mitata": "^1.0.25",
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"marked": "^17.0.1",
|
||||
"mitata": "1.0.20",
|
||||
"react": "^19",
|
||||
"react-dom": "^19",
|
||||
"react-markdown": "^9.0.3",
|
||||
"remark": "^15.0.1",
|
||||
"remark-html": "^16.0.1",
|
||||
"string-width": "7.1.0",
|
||||
"wrap-ansi": "^9.0.0",
|
||||
"strip-ansi": "^7.1.0",
|
||||
"tar": "^7.4.3",
|
||||
"tinycolor2": "^1.6.0",
|
||||
"wrap-ansi": "^9.0.0",
|
||||
"zx": "^7.2.3"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
92
bench/snippets/markdown-react.mjs
Normal file
92
bench/snippets/markdown-react.mjs
Normal file
@@ -0,0 +1,92 @@
|
||||
import React from "react";
|
||||
import { renderToString } from "react-dom/server";
|
||||
import ReactMarkdown from "react-markdown";
|
||||
|
||||
const markdown = `# Project README
|
||||
|
||||
## Introduction
|
||||
|
||||
This is a medium-sized markdown document that includes **bold text**, *italic text*,
|
||||
and \`inline code\`. It also has [links](https://example.com) and various formatting.
|
||||
|
||||
## Features
|
||||
|
||||
- Feature one with **bold**
|
||||
- Feature two with *emphasis*
|
||||
- Feature three with \`code\`
|
||||
- Feature four with [a link](https://example.com)
|
||||
|
||||
## Code Example
|
||||
|
||||
\`\`\`javascript
|
||||
function hello() {
|
||||
console.log("Hello, world!");
|
||||
return 42;
|
||||
}
|
||||
|
||||
const result = hello();
|
||||
\`\`\`
|
||||
|
||||
## Table
|
||||
|
||||
| Name | Value | Description |
|
||||
|------|-------|-------------|
|
||||
| foo | 1 | First item |
|
||||
| bar | 2 | Second item |
|
||||
| baz | 3 | Third item |
|
||||
|
||||
## Blockquote
|
||||
|
||||
> This is a blockquote with **bold** and *italic* text.
|
||||
> It spans multiple lines and contains a [link](https://example.com).
|
||||
|
||||
---
|
||||
|
||||
### Nested Lists
|
||||
|
||||
1. First ordered item
|
||||
- Nested unordered
|
||||
- Another nested
|
||||
2. Second ordered item
|
||||
1. Nested ordered
|
||||
2. Another nested
|
||||
3. Third ordered item
|
||||
|
||||
Some final paragraph with ~~strikethrough~~ text and more **formatting**.
|
||||
`;
|
||||
|
||||
// Verify outputs are roughly the same
|
||||
const bunHtml = renderToString(Bun.markdown.react(markdown));
|
||||
const reactMarkdownHtml = renderToString(React.createElement(ReactMarkdown, { children: markdown }));
|
||||
|
||||
console.log("=== Bun.markdown.react output ===");
|
||||
console.log(bunHtml.slice(0, 500));
|
||||
console.log(`... (${bunHtml.length} chars total)\n`);
|
||||
|
||||
console.log("=== react-markdown output ===");
|
||||
console.log(reactMarkdownHtml.slice(0, 500));
|
||||
console.log(`... (${reactMarkdownHtml.length} chars total)\n`);
|
||||
|
||||
const server = Bun.serve({
|
||||
port: 0,
|
||||
routes: {
|
||||
"/bun-markdown": () => {
|
||||
return new Response(renderToString(Bun.markdown.react(markdown)), {
|
||||
headers: { "Content-Type": "text/html" },
|
||||
});
|
||||
},
|
||||
"/react-markdown": () => {
|
||||
return new Response(renderToString(React.createElement(ReactMarkdown, { children: markdown })), {
|
||||
headers: { "Content-Type": "text/html" },
|
||||
});
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
console.log(`Server listening on ${server.url}`);
|
||||
console.log(` ${server.url}bun-markdown`);
|
||||
console.log(` ${server.url}react-markdown`);
|
||||
console.log();
|
||||
console.log("Run:");
|
||||
console.log(` oha -c 20 -z 5s ${server.url}bun-markdown`);
|
||||
console.log(` oha -c 20 -z 5s ${server.url}react-markdown`);
|
||||
159
bench/snippets/markdown.mjs
Normal file
159
bench/snippets/markdown.mjs
Normal file
@@ -0,0 +1,159 @@
|
||||
import { marked } from "marked";
|
||||
import { remark } from "remark";
|
||||
import remarkHtml from "remark-html";
|
||||
import { bench, run, summary } from "../runner.mjs";
|
||||
|
||||
const remarkProcessor = remark().use(remarkHtml);
|
||||
|
||||
const small = `# Hello World
|
||||
|
||||
This is a **bold** and *italic* paragraph with a [link](https://example.com).
|
||||
|
||||
- Item 1
|
||||
- Item 2
|
||||
- Item 3
|
||||
`;
|
||||
|
||||
const medium = `# Project README
|
||||
|
||||
## Introduction
|
||||
|
||||
This is a medium-sized markdown document that includes **bold text**, *italic text*,
|
||||
and \`inline code\`. It also has [links](https://example.com) and various formatting.
|
||||
|
||||
## Features
|
||||
|
||||
- Feature one with **bold**
|
||||
- Feature two with *emphasis*
|
||||
- Feature three with \`code\`
|
||||
- Feature four with [a link](https://example.com)
|
||||
|
||||
## Code Example
|
||||
|
||||
\`\`\`javascript
|
||||
function hello() {
|
||||
console.log("Hello, world!");
|
||||
return 42;
|
||||
}
|
||||
|
||||
const result = hello();
|
||||
\`\`\`
|
||||
|
||||
## Table
|
||||
|
||||
| Name | Value | Description |
|
||||
|------|-------|-------------|
|
||||
| foo | 1 | First item |
|
||||
| bar | 2 | Second item |
|
||||
| baz | 3 | Third item |
|
||||
|
||||
## Blockquote
|
||||
|
||||
> This is a blockquote with **bold** and *italic* text.
|
||||
> It spans multiple lines and contains a [link](https://example.com).
|
||||
|
||||
---
|
||||
|
||||
### Nested Lists
|
||||
|
||||
1. First ordered item
|
||||
- Nested unordered
|
||||
- Another nested
|
||||
2. Second ordered item
|
||||
1. Nested ordered
|
||||
2. Another nested
|
||||
3. Third ordered item
|
||||
|
||||
Some final paragraph with ~~strikethrough~~ text and more **formatting**.
|
||||
`;
|
||||
|
||||
const large = medium.repeat(20);
|
||||
|
||||
const renderCallbacks = {
|
||||
heading: (children, { level }) => `<h${level}>${children}</h${level}>`,
|
||||
paragraph: children => `<p>${children}</p>`,
|
||||
strong: children => `<strong>${children}</strong>`,
|
||||
emphasis: children => `<em>${children}</em>`,
|
||||
codespan: children => `<code>${children}</code>`,
|
||||
code: (children, { language }) =>
|
||||
language
|
||||
? `<pre><code class="language-${language}">${children}</code></pre>`
|
||||
: `<pre><code>${children}</code></pre>`,
|
||||
link: (children, { href, title }) =>
|
||||
title ? `<a href="${href}" title="${title}">${children}</a>` : `<a href="${href}">${children}</a>`,
|
||||
image: (children, { src, title }) =>
|
||||
title ? `<img src="${src}" alt="${children}" title="${title}" />` : `<img src="${src}" alt="${children}" />`,
|
||||
list: (children, { ordered, start }) => (ordered ? `<ol start="${start}">${children}</ol>` : `<ul>${children}</ul>`),
|
||||
listItem: children => `<li>${children}</li>`,
|
||||
blockquote: children => `<blockquote>${children}</blockquote>`,
|
||||
hr: () => `<hr />`,
|
||||
strikethrough: children => `<del>${children}</del>`,
|
||||
table: children => `<table>${children}</table>`,
|
||||
thead: children => `<thead>${children}</thead>`,
|
||||
tbody: children => `<tbody>${children}</tbody>`,
|
||||
tr: children => `<tr>${children}</tr>`,
|
||||
th: children => `<th>${children}</th>`,
|
||||
td: children => `<td>${children}</td>`,
|
||||
};
|
||||
|
||||
summary(() => {
|
||||
if (typeof Bun !== "undefined" && Bun.markdown) {
|
||||
bench(`small (${small.length} chars) - Bun.markdown.html`, () => {
|
||||
return Bun.markdown.html(small);
|
||||
});
|
||||
|
||||
bench(`small (${small.length} chars) - Bun.markdown.render`, () => {
|
||||
return Bun.markdown.render(small, renderCallbacks);
|
||||
});
|
||||
}
|
||||
|
||||
bench(`small (${small.length} chars) - marked`, () => {
|
||||
return marked(small);
|
||||
});
|
||||
|
||||
bench(`small (${small.length} chars) - remark`, () => {
|
||||
return remarkProcessor.processSync(small).toString();
|
||||
});
|
||||
});
|
||||
|
||||
summary(() => {
|
||||
if (typeof Bun !== "undefined" && Bun.markdown) {
|
||||
bench(`medium (${medium.length} chars) - Bun.markdown.html`, () => {
|
||||
return Bun.markdown.html(medium);
|
||||
});
|
||||
|
||||
bench(`medium (${medium.length} chars) - Bun.markdown.render`, () => {
|
||||
return Bun.markdown.render(medium, renderCallbacks);
|
||||
});
|
||||
}
|
||||
|
||||
bench(`medium (${medium.length} chars) - marked`, () => {
|
||||
return marked(medium);
|
||||
});
|
||||
|
||||
bench(`medium (${medium.length} chars) - remark`, () => {
|
||||
return remarkProcessor.processSync(medium).toString();
|
||||
});
|
||||
});
|
||||
|
||||
summary(() => {
|
||||
if (typeof Bun !== "undefined" && Bun.markdown) {
|
||||
bench(`large (${large.length} chars) - Bun.markdown.html`, () => {
|
||||
return Bun.markdown.html(large);
|
||||
});
|
||||
|
||||
bench(`large (${large.length} chars) - Bun.markdown.render`, () => {
|
||||
return Bun.markdown.render(large, renderCallbacks);
|
||||
});
|
||||
}
|
||||
|
||||
bench(`large (${large.length} chars) - marked`, () => {
|
||||
return marked(large);
|
||||
});
|
||||
|
||||
bench(`large (${large.length} chars) - remark`, () => {
|
||||
return remarkProcessor.processSync(large).toString();
|
||||
});
|
||||
});
|
||||
|
||||
await run();
|
||||
20
bench/snippets/spread-set.mjs
Normal file
20
bench/snippets/spread-set.mjs
Normal file
@@ -0,0 +1,20 @@
|
||||
// Benchmark for [...set] optimization (WebKit#56539)
|
||||
// https://github.com/WebKit/WebKit/pull/56539
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const intSet10 = new Set([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
|
||||
const intSet100 = new Set(Array.from({ length: 100 }, (_, i) => i));
|
||||
const strSet10 = new Set(Array.from({ length: 10 }, (_, i) => `key-${i}`));
|
||||
const strSet100 = new Set(Array.from({ length: 100 }, (_, i) => `key-${i}`));
|
||||
|
||||
const objSet10 = new Set(Array.from({ length: 10 }, (_, i) => ({ id: i })));
|
||||
const objSet100 = new Set(Array.from({ length: 100 }, (_, i) => ({ id: i })));
|
||||
|
||||
bench("[...set] - integers (10)", () => [...intSet10]);
|
||||
bench("[...set] - integers (100)", () => [...intSet100]);
|
||||
bench("[...set] - strings (10)", () => [...strSet10]);
|
||||
bench("[...set] - strings (100)", () => [...strSet100]);
|
||||
bench("[...set] - objects (10)", () => [...objSet10]);
|
||||
bench("[...set] - objects (100)", () => [...objSet100]);
|
||||
|
||||
await run();
|
||||
140
build.zig
140
build.zig
@@ -459,6 +459,146 @@ pub fn build(b: *Build) !void {
|
||||
// const run = b.addRunArtifact(exe);
|
||||
// step.dependOn(&run.step);
|
||||
}
|
||||
|
||||
// zig build generate-grapheme-tables
|
||||
// Regenerates src/string/immutable/grapheme_tables.zig from the vendored uucode.
|
||||
// Run this when updating src/deps/uucode. Normal builds use the committed file.
|
||||
{
|
||||
const step = b.step("generate-grapheme-tables", "Regenerate grapheme property tables from vendored uucode");
|
||||
|
||||
// --- Phase 1: Build uucode tables (separate module graph, no tables dependency) ---
|
||||
const bt_config_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/config.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
const bt_types_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/types.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
bt_types_mod.addImport("config.zig", bt_config_mod);
|
||||
bt_config_mod.addImport("types.zig", bt_types_mod);
|
||||
|
||||
const bt_config_x_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/x/config.x.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
const bt_types_x_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/x/types.x.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
bt_types_x_mod.addImport("config.x.zig", bt_config_x_mod);
|
||||
bt_config_x_mod.addImport("types.x.zig", bt_types_x_mod);
|
||||
bt_config_x_mod.addImport("types.zig", bt_types_mod);
|
||||
bt_config_x_mod.addImport("config.zig", bt_config_mod);
|
||||
|
||||
const bt_build_config_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/unicode/uucode/uucode_config.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
bt_build_config_mod.addImport("types.zig", bt_types_mod);
|
||||
bt_build_config_mod.addImport("config.zig", bt_config_mod);
|
||||
bt_build_config_mod.addImport("types.x.zig", bt_types_x_mod);
|
||||
bt_build_config_mod.addImport("config.x.zig", bt_config_x_mod);
|
||||
|
||||
const build_tables_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/build/tables.zig"),
|
||||
.target = b.graph.host,
|
||||
.optimize = .Debug,
|
||||
});
|
||||
build_tables_mod.addImport("config.zig", bt_config_mod);
|
||||
build_tables_mod.addImport("build_config", bt_build_config_mod);
|
||||
build_tables_mod.addImport("types.zig", bt_types_mod);
|
||||
|
||||
const build_tables_exe = b.addExecutable(.{
|
||||
.name = "uucode_build_tables",
|
||||
.root_module = build_tables_mod,
|
||||
.use_llvm = true,
|
||||
});
|
||||
const run_build_tables = b.addRunArtifact(build_tables_exe);
|
||||
run_build_tables.setCwd(b.path("src/deps/uucode"));
|
||||
const tables_path = run_build_tables.addOutputFileArg("tables.zig");
|
||||
|
||||
// --- Phase 2: Build grapheme-gen with full uucode (separate module graph) ---
|
||||
const rt_config_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/config.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
const rt_types_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/types.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
rt_types_mod.addImport("config.zig", rt_config_mod);
|
||||
rt_config_mod.addImport("types.zig", rt_types_mod);
|
||||
|
||||
const rt_config_x_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/x/config.x.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
const rt_types_x_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/x/types.x.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
rt_types_x_mod.addImport("config.x.zig", rt_config_x_mod);
|
||||
rt_config_x_mod.addImport("types.x.zig", rt_types_x_mod);
|
||||
rt_config_x_mod.addImport("types.zig", rt_types_mod);
|
||||
rt_config_x_mod.addImport("config.zig", rt_config_mod);
|
||||
|
||||
const rt_build_config_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/unicode/uucode/uucode_config.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
rt_build_config_mod.addImport("types.zig", rt_types_mod);
|
||||
rt_build_config_mod.addImport("config.zig", rt_config_mod);
|
||||
rt_build_config_mod.addImport("types.x.zig", rt_types_x_mod);
|
||||
rt_build_config_mod.addImport("config.x.zig", rt_config_x_mod);
|
||||
|
||||
const rt_tables_mod = b.createModule(.{
|
||||
.root_source_file = tables_path,
|
||||
.target = b.graph.host,
|
||||
});
|
||||
rt_tables_mod.addImport("types.zig", rt_types_mod);
|
||||
rt_tables_mod.addImport("types.x.zig", rt_types_x_mod);
|
||||
rt_tables_mod.addImport("config.zig", rt_config_mod);
|
||||
rt_tables_mod.addImport("build_config", rt_build_config_mod);
|
||||
|
||||
const rt_get_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/get.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
rt_get_mod.addImport("types.zig", rt_types_mod);
|
||||
rt_get_mod.addImport("tables", rt_tables_mod);
|
||||
rt_types_mod.addImport("get.zig", rt_get_mod);
|
||||
|
||||
const uucode_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/root.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
uucode_mod.addImport("types.zig", rt_types_mod);
|
||||
uucode_mod.addImport("config.zig", rt_config_mod);
|
||||
uucode_mod.addImport("types.x.zig", rt_types_x_mod);
|
||||
uucode_mod.addImport("tables", rt_tables_mod);
|
||||
uucode_mod.addImport("get.zig", rt_get_mod);
|
||||
|
||||
// grapheme_gen executable
|
||||
const gen_exe = b.addExecutable(.{
|
||||
.name = "grapheme-gen",
|
||||
.root_module = b.createModule(.{
|
||||
.root_source_file = b.path("src/unicode/uucode/grapheme_gen.zig"),
|
||||
.target = b.graph.host,
|
||||
.optimize = .Debug,
|
||||
.imports = &.{
|
||||
.{ .name = "uucode", .module = uucode_mod },
|
||||
},
|
||||
}),
|
||||
.use_llvm = true,
|
||||
});
|
||||
|
||||
const run_gen = b.addRunArtifact(gen_exe);
|
||||
const gen_output = run_gen.captureStdOut();
|
||||
|
||||
const install = b.addInstallFile(gen_output, "../src/string/immutable/grapheme_tables.zig");
|
||||
step.dependOn(&install.step);
|
||||
}
|
||||
}
|
||||
|
||||
const TargetDescription = struct {
|
||||
|
||||
@@ -13,10 +13,7 @@
|
||||
},
|
||||
{
|
||||
"output": "JavaScriptSources.txt",
|
||||
"paths": [
|
||||
"src/js/**/*.{js,ts}",
|
||||
"src/install/PackageManager/scanner-entry.ts"
|
||||
]
|
||||
"paths": ["src/js/**/*.{js,ts}", "src/install/PackageManager/scanner-entry.ts"]
|
||||
},
|
||||
{
|
||||
"output": "JavaScriptCodegenSources.txt",
|
||||
|
||||
@@ -939,7 +939,7 @@ if(WIN32)
|
||||
endif()
|
||||
|
||||
if(USE_MIMALLOC_AS_DEFAULT_ALLOCATOR)
|
||||
target_compile_definitions(${bun} PRIVATE USE_MIMALLOC=1)
|
||||
target_compile_definitions(${bun} PRIVATE USE_BUN_MIMALLOC=1)
|
||||
endif()
|
||||
|
||||
target_compile_definitions(${bun} PRIVATE
|
||||
@@ -1253,6 +1253,7 @@ if(WIN32)
|
||||
target_link_libraries(${bun} PRIVATE
|
||||
${WEBKIT_LIB_PATH}/WTF.lib
|
||||
${WEBKIT_LIB_PATH}/JavaScriptCore.lib
|
||||
${WEBKIT_LIB_PATH}/bmalloc.lib
|
||||
${WEBKIT_LIB_PATH}/sicudtd.lib
|
||||
${WEBKIT_LIB_PATH}/sicuind.lib
|
||||
${WEBKIT_LIB_PATH}/sicuucd.lib
|
||||
@@ -1261,6 +1262,7 @@ if(WIN32)
|
||||
target_link_libraries(${bun} PRIVATE
|
||||
${WEBKIT_LIB_PATH}/WTF.lib
|
||||
${WEBKIT_LIB_PATH}/JavaScriptCore.lib
|
||||
${WEBKIT_LIB_PATH}/bmalloc.lib
|
||||
${WEBKIT_LIB_PATH}/sicudt.lib
|
||||
${WEBKIT_LIB_PATH}/sicuin.lib
|
||||
${WEBKIT_LIB_PATH}/sicuuc.lib
|
||||
@@ -1271,13 +1273,18 @@ else()
|
||||
${WEBKIT_LIB_PATH}/libWTF.a
|
||||
${WEBKIT_LIB_PATH}/libJavaScriptCore.a
|
||||
)
|
||||
if(NOT APPLE OR EXISTS ${WEBKIT_LIB_PATH}/libbmalloc.a)
|
||||
if(WEBKIT_LOCAL OR NOT APPLE OR EXISTS ${WEBKIT_LIB_PATH}/libbmalloc.a)
|
||||
target_link_libraries(${bun} PRIVATE ${WEBKIT_LIB_PATH}/libbmalloc.a)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
include_directories(${WEBKIT_INCLUDE_PATH})
|
||||
|
||||
# When building with a local WebKit, ensure JSC is built before compiling Bun's C++ sources.
|
||||
if(WEBKIT_LOCAL AND TARGET jsc)
|
||||
add_dependencies(${bun} jsc)
|
||||
endif()
|
||||
|
||||
# Include the generated dependency versions header
|
||||
include_directories(${CMAKE_BINARY_DIR})
|
||||
|
||||
@@ -1322,9 +1329,14 @@ if(LINUX)
|
||||
target_link_libraries(${bun} PUBLIC libatomic.so)
|
||||
endif()
|
||||
|
||||
target_link_libraries(${bun} PRIVATE ${WEBKIT_LIB_PATH}/libicudata.a)
|
||||
target_link_libraries(${bun} PRIVATE ${WEBKIT_LIB_PATH}/libicui18n.a)
|
||||
target_link_libraries(${bun} PRIVATE ${WEBKIT_LIB_PATH}/libicuuc.a)
|
||||
if(WEBKIT_LOCAL)
|
||||
find_package(ICU REQUIRED COMPONENTS data i18n uc)
|
||||
target_link_libraries(${bun} PRIVATE ICU::data ICU::i18n ICU::uc)
|
||||
else()
|
||||
target_link_libraries(${bun} PRIVATE ${WEBKIT_LIB_PATH}/libicudata.a)
|
||||
target_link_libraries(${bun} PRIVATE ${WEBKIT_LIB_PATH}/libicui18n.a)
|
||||
target_link_libraries(${bun} PRIVATE ${WEBKIT_LIB_PATH}/libicuuc.a)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(WIN32)
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
cloudflare/lol-html
|
||||
COMMIT
|
||||
d64457d9ff0143deef025d5df7e8586092b9afb7
|
||||
e9e16dca48dd4a8ffbc77642bc4be60407585f11
|
||||
)
|
||||
|
||||
set(LOLHTML_CWD ${VENDOR_PATH}/lolhtml/c-api)
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
oven-sh/mimalloc
|
||||
COMMIT
|
||||
1beadf9651a7bfdec6b5367c380ecc3fe1c40d1a
|
||||
ffa38ab8ac914f9eb7af75c1f8ad457643dc14f2
|
||||
)
|
||||
|
||||
set(MIMALLOC_CMAKE_ARGS
|
||||
@@ -14,7 +14,7 @@ set(MIMALLOC_CMAKE_ARGS
|
||||
-DMI_BUILD_TESTS=OFF
|
||||
-DMI_USE_CXX=ON
|
||||
-DMI_SKIP_COLLECT_ON_EXIT=ON
|
||||
|
||||
|
||||
# ```
|
||||
# ❯ mimalloc_allow_large_os_pages=0 BUN_PORT=3004 mem bun http-hello.js
|
||||
# Started development server: http://localhost:3004
|
||||
@@ -51,7 +51,7 @@ if(ENABLE_ASAN)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_DEBUG_UBSAN=ON)
|
||||
elseif(APPLE OR LINUX)
|
||||
if(APPLE)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OVERRIDE=OFF)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OVERRIDE=OFF)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_ZONE=OFF)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_INTERPOSE=OFF)
|
||||
else()
|
||||
@@ -69,17 +69,27 @@ if(ENABLE_VALGRIND)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_VALGRIND=ON)
|
||||
endif()
|
||||
|
||||
# Enable SIMD optimizations when not building for baseline (older CPUs)
|
||||
if(NOT ENABLE_BASELINE)
|
||||
# Enable architecture-specific optimizations when not building for baseline.
|
||||
# On Linux aarch64, upstream mimalloc force-enables MI_OPT_ARCH which adds
|
||||
# -march=armv8.1-a (LSE atomics). This crashes on ARMv8.0 CPUs
|
||||
# (Cortex-A53, Raspberry Pi 4, AWS a1 instances). Use MI_NO_OPT_ARCH
|
||||
# to prevent that, but keep SIMD enabled. -moutline-atomics for runtime
|
||||
# dispatch to LSE/LL-SC. macOS arm64 always has LSE (Apple Silicon) so
|
||||
# MI_OPT_ARCH is safe there.
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|arm64|ARM64|AARCH64" AND NOT APPLE)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_NO_OPT_ARCH=ON)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OPT_SIMD=ON)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS "-DCMAKE_C_FLAGS=-moutline-atomics")
|
||||
elseif(NOT ENABLE_BASELINE)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OPT_ARCH=ON)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OPT_SIMD=ON)
|
||||
endif()
|
||||
|
||||
if(WIN32)
|
||||
if(DEBUG)
|
||||
set(MIMALLOC_LIBRARY mimalloc-static-debug)
|
||||
set(MIMALLOC_LIBRARY mimalloc-debug)
|
||||
else()
|
||||
set(MIMALLOC_LIBRARY mimalloc-static)
|
||||
set(MIMALLOC_LIBRARY mimalloc)
|
||||
endif()
|
||||
elseif(DEBUG)
|
||||
if (ENABLE_ASAN)
|
||||
|
||||
@@ -6,7 +6,8 @@ endif()
|
||||
|
||||
optionx(BUILDKITE_ORGANIZATION_SLUG STRING "The organization slug to use on Buildkite" DEFAULT "bun")
|
||||
optionx(BUILDKITE_PIPELINE_SLUG STRING "The pipeline slug to use on Buildkite" DEFAULT "bun")
|
||||
optionx(BUILDKITE_BUILD_ID STRING "The build ID to use on Buildkite")
|
||||
optionx(BUILDKITE_BUILD_ID STRING "The build ID (UUID) to use on Buildkite")
|
||||
optionx(BUILDKITE_BUILD_NUMBER STRING "The build number to use on Buildkite")
|
||||
optionx(BUILDKITE_GROUP_ID STRING "The group ID to use on Buildkite")
|
||||
|
||||
if(ENABLE_BASELINE)
|
||||
@@ -32,7 +33,13 @@ if(NOT BUILDKITE_BUILD_ID)
|
||||
return()
|
||||
endif()
|
||||
|
||||
setx(BUILDKITE_BUILD_URL https://buildkite.com/${BUILDKITE_ORGANIZATION_SLUG}/${BUILDKITE_PIPELINE_SLUG}/builds/${BUILDKITE_BUILD_ID})
|
||||
# Use BUILDKITE_BUILD_NUMBER for the URL if available, as the UUID format causes a 302 redirect
|
||||
# that CMake's file(DOWNLOAD) doesn't follow, resulting in empty response.
|
||||
if(BUILDKITE_BUILD_NUMBER)
|
||||
setx(BUILDKITE_BUILD_URL https://buildkite.com/${BUILDKITE_ORGANIZATION_SLUG}/${BUILDKITE_PIPELINE_SLUG}/builds/${BUILDKITE_BUILD_NUMBER})
|
||||
else()
|
||||
setx(BUILDKITE_BUILD_URL https://buildkite.com/${BUILDKITE_ORGANIZATION_SLUG}/${BUILDKITE_PIPELINE_SLUG}/builds/${BUILDKITE_BUILD_ID})
|
||||
endif()
|
||||
setx(BUILDKITE_BUILD_PATH ${BUILDKITE_BUILDS_PATH}/builds/${BUILDKITE_BUILD_ID})
|
||||
|
||||
file(
|
||||
|
||||
@@ -31,13 +31,6 @@ execute_process(
|
||||
ERROR_QUIET
|
||||
)
|
||||
|
||||
if(MACOS_VERSION VERSION_LESS ${CMAKE_OSX_DEPLOYMENT_TARGET})
|
||||
message(FATAL_ERROR "Your computer is running macOS ${MACOS_VERSION}, which is older than the target macOS SDK ${CMAKE_OSX_DEPLOYMENT_TARGET}. To fix this, either:\n"
|
||||
" - Upgrade your computer to macOS ${CMAKE_OSX_DEPLOYMENT_TARGET} or newer\n"
|
||||
" - Download a newer version of the macOS SDK from Apple: https://developer.apple.com/download/all/?q=xcode\n"
|
||||
" - Set -DCMAKE_OSX_DEPLOYMENT_TARGET=${MACOS_VERSION}\n")
|
||||
endif()
|
||||
|
||||
execute_process(
|
||||
COMMAND xcrun --sdk macosx --show-sdk-path
|
||||
OUTPUT_VARIABLE DEFAULT_CMAKE_OSX_SYSROOT
|
||||
|
||||
@@ -1,8 +1,12 @@
|
||||
# NOTE: Changes to this file trigger QEMU JIT stress tests in CI.
|
||||
# See scripts/verify-jit-stress-qemu.sh for details.
|
||||
|
||||
option(WEBKIT_VERSION "The version of WebKit to use")
|
||||
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
|
||||
option(WEBKIT_BUILD_TYPE "The build type for local WebKit (defaults to CMAKE_BUILD_TYPE)")
|
||||
|
||||
if(NOT WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION 87c6cde57dd1d2a82bbc9caf500f70f8a7c1f249)
|
||||
set(WEBKIT_VERSION 515344bc5d65aa2d4f9ff277b5fb944f0e051dcd)
|
||||
endif()
|
||||
|
||||
# Use preview build URL for Windows ARM64 until the fix is merged to main
|
||||
@@ -12,7 +16,10 @@ string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)
|
||||
string(SUBSTRING ${WEBKIT_VERSION} 0 8 WEBKIT_VERSION_SHORT)
|
||||
|
||||
if(WEBKIT_LOCAL)
|
||||
set(DEFAULT_WEBKIT_PATH ${VENDOR_PATH}/WebKit/WebKitBuild/${CMAKE_BUILD_TYPE})
|
||||
if(NOT WEBKIT_BUILD_TYPE)
|
||||
set(WEBKIT_BUILD_TYPE ${CMAKE_BUILD_TYPE})
|
||||
endif()
|
||||
set(DEFAULT_WEBKIT_PATH ${VENDOR_PATH}/WebKit/WebKitBuild/${WEBKIT_BUILD_TYPE})
|
||||
else()
|
||||
set(DEFAULT_WEBKIT_PATH ${CACHE_PATH}/webkit-${WEBKIT_VERSION_PREFIX})
|
||||
endif()
|
||||
@@ -27,35 +34,153 @@ set(WEBKIT_INCLUDE_PATH ${WEBKIT_PATH}/include)
|
||||
set(WEBKIT_LIB_PATH ${WEBKIT_PATH}/lib)
|
||||
|
||||
if(WEBKIT_LOCAL)
|
||||
if(EXISTS ${WEBKIT_PATH}/cmakeconfig.h)
|
||||
# You may need to run:
|
||||
# make jsc-compile-debug jsc-copy-headers
|
||||
include_directories(
|
||||
${WEBKIT_PATH}
|
||||
${WEBKIT_PATH}/JavaScriptCore/Headers
|
||||
${WEBKIT_PATH}/JavaScriptCore/Headers/JavaScriptCore
|
||||
${WEBKIT_PATH}/JavaScriptCore/PrivateHeaders
|
||||
${WEBKIT_PATH}/bmalloc/Headers
|
||||
${WEBKIT_PATH}/WTF/Headers
|
||||
${WEBKIT_PATH}/JavaScriptCore/PrivateHeaders/JavaScriptCore
|
||||
${WEBKIT_PATH}/JavaScriptCore/DerivedSources/inspector
|
||||
)
|
||||
set(WEBKIT_SOURCE_DIR ${VENDOR_PATH}/WebKit)
|
||||
|
||||
# On Windows, add ICU include path from vcpkg
|
||||
if(WIN32)
|
||||
# Auto-detect vcpkg triplet
|
||||
set(VCPKG_ARM64_PATH ${VENDOR_PATH}/WebKit/vcpkg_installed/arm64-windows-static)
|
||||
set(VCPKG_X64_PATH ${VENDOR_PATH}/WebKit/vcpkg_installed/x64-windows-static)
|
||||
if(EXISTS ${VCPKG_ARM64_PATH})
|
||||
set(VCPKG_ICU_PATH ${VCPKG_ARM64_PATH})
|
||||
if(WIN32)
|
||||
# --- Build ICU from source (Windows only) ---
|
||||
# On macOS, ICU is found automatically (Homebrew icu4c for headers, system for libs).
|
||||
# On Linux, ICU is found automatically from system packages (e.g. libicu-dev).
|
||||
# On Windows, there is no system ICU, so we build it from source.
|
||||
set(ICU_LOCAL_ROOT ${VENDOR_PATH}/WebKit/WebKitBuild/icu)
|
||||
if(NOT EXISTS ${ICU_LOCAL_ROOT}/lib/sicudt.lib)
|
||||
message(STATUS "Building ICU from source...")
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm64|ARM64|aarch64|AARCH64")
|
||||
set(ICU_PLATFORM "ARM64")
|
||||
else()
|
||||
set(VCPKG_ICU_PATH ${VCPKG_X64_PATH})
|
||||
set(ICU_PLATFORM "x64")
|
||||
endif()
|
||||
if(EXISTS ${VCPKG_ICU_PATH}/include)
|
||||
include_directories(${VCPKG_ICU_PATH}/include)
|
||||
message(STATUS "Using ICU from vcpkg: ${VCPKG_ICU_PATH}/include")
|
||||
execute_process(
|
||||
COMMAND powershell -ExecutionPolicy Bypass -File
|
||||
${WEBKIT_SOURCE_DIR}/build-icu.ps1
|
||||
-Platform ${ICU_PLATFORM}
|
||||
-BuildType ${WEBKIT_BUILD_TYPE}
|
||||
-OutputDir ${ICU_LOCAL_ROOT}
|
||||
RESULT_VARIABLE ICU_BUILD_RESULT
|
||||
)
|
||||
if(NOT ICU_BUILD_RESULT EQUAL 0)
|
||||
message(FATAL_ERROR "Failed to build ICU (exit code: ${ICU_BUILD_RESULT}).")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# Copy ICU libs to WEBKIT_LIB_PATH with the names BuildBun.cmake expects.
|
||||
# Prebuilt WebKit uses 's' prefix (static) and 'd' suffix (debug).
|
||||
file(MAKE_DIRECTORY ${WEBKIT_LIB_PATH})
|
||||
if(WEBKIT_BUILD_TYPE STREQUAL "Debug")
|
||||
set(ICU_SUFFIX "d")
|
||||
else()
|
||||
set(ICU_SUFFIX "")
|
||||
endif()
|
||||
file(COPY_FILE ${ICU_LOCAL_ROOT}/lib/sicudt.lib ${WEBKIT_LIB_PATH}/sicudt${ICU_SUFFIX}.lib ONLY_IF_DIFFERENT)
|
||||
file(COPY_FILE ${ICU_LOCAL_ROOT}/lib/icuin.lib ${WEBKIT_LIB_PATH}/sicuin${ICU_SUFFIX}.lib ONLY_IF_DIFFERENT)
|
||||
file(COPY_FILE ${ICU_LOCAL_ROOT}/lib/icuuc.lib ${WEBKIT_LIB_PATH}/sicuuc${ICU_SUFFIX}.lib ONLY_IF_DIFFERENT)
|
||||
endif()
|
||||
|
||||
# --- Configure JSC ---
|
||||
message(STATUS "Configuring JSC from local WebKit source at ${WEBKIT_SOURCE_DIR}...")
|
||||
|
||||
set(JSC_CMAKE_ARGS
|
||||
-S ${WEBKIT_SOURCE_DIR}
|
||||
-B ${WEBKIT_PATH}
|
||||
-G ${CMAKE_GENERATOR}
|
||||
-DPORT=JSCOnly
|
||||
-DENABLE_STATIC_JSC=ON
|
||||
-DUSE_THIN_ARCHIVES=OFF
|
||||
-DENABLE_FTL_JIT=ON
|
||||
-DCMAKE_EXPORT_COMPILE_COMMANDS=ON
|
||||
-DUSE_BUN_JSC_ADDITIONS=ON
|
||||
-DUSE_BUN_EVENT_LOOP=ON
|
||||
-DENABLE_BUN_SKIP_FAILING_ASSERTIONS=ON
|
||||
-DALLOW_LINE_AND_COLUMN_NUMBER_IN_BUILTINS=ON
|
||||
-DCMAKE_BUILD_TYPE=${WEBKIT_BUILD_TYPE}
|
||||
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
|
||||
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
|
||||
-DENABLE_REMOTE_INSPECTOR=ON
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
# ICU paths and Windows-specific compiler/linker settings
|
||||
list(APPEND JSC_CMAKE_ARGS
|
||||
-DICU_ROOT=${ICU_LOCAL_ROOT}
|
||||
-DICU_LIBRARY=${ICU_LOCAL_ROOT}/lib
|
||||
-DICU_INCLUDE_DIR=${ICU_LOCAL_ROOT}/include
|
||||
-DCMAKE_LINKER=lld-link
|
||||
)
|
||||
# Static CRT and U_STATIC_IMPLEMENTATION
|
||||
if(WEBKIT_BUILD_TYPE STREQUAL "Debug")
|
||||
set(JSC_MSVC_RUNTIME "MultiThreadedDebug")
|
||||
else()
|
||||
set(JSC_MSVC_RUNTIME "MultiThreaded")
|
||||
endif()
|
||||
list(APPEND JSC_CMAKE_ARGS
|
||||
-DCMAKE_MSVC_RUNTIME_LIBRARY=${JSC_MSVC_RUNTIME}
|
||||
"-DCMAKE_C_FLAGS=/DU_STATIC_IMPLEMENTATION"
|
||||
"-DCMAKE_CXX_FLAGS=/DU_STATIC_IMPLEMENTATION /clang:-fno-c++-static-destructors"
|
||||
)
|
||||
endif()
|
||||
|
||||
if(ENABLE_ASAN)
|
||||
list(APPEND JSC_CMAKE_ARGS -DENABLE_SANITIZERS=address)
|
||||
endif()
|
||||
|
||||
# Pass through ccache if available
|
||||
if(CMAKE_C_COMPILER_LAUNCHER)
|
||||
list(APPEND JSC_CMAKE_ARGS -DCMAKE_C_COMPILER_LAUNCHER=${CMAKE_C_COMPILER_LAUNCHER})
|
||||
endif()
|
||||
if(CMAKE_CXX_COMPILER_LAUNCHER)
|
||||
list(APPEND JSC_CMAKE_ARGS -DCMAKE_CXX_COMPILER_LAUNCHER=${CMAKE_CXX_COMPILER_LAUNCHER})
|
||||
endif()
|
||||
|
||||
execute_process(
|
||||
COMMAND ${CMAKE_COMMAND} ${JSC_CMAKE_ARGS}
|
||||
RESULT_VARIABLE JSC_CONFIGURE_RESULT
|
||||
)
|
||||
if(NOT JSC_CONFIGURE_RESULT EQUAL 0)
|
||||
message(FATAL_ERROR "Failed to configure JSC (exit code: ${JSC_CONFIGURE_RESULT}). "
|
||||
"Check the output above for errors.")
|
||||
endif()
|
||||
|
||||
if(WIN32)
|
||||
set(JSC_BYPRODUCTS
|
||||
${WEBKIT_LIB_PATH}/JavaScriptCore.lib
|
||||
${WEBKIT_LIB_PATH}/WTF.lib
|
||||
${WEBKIT_LIB_PATH}/bmalloc.lib
|
||||
)
|
||||
else()
|
||||
set(JSC_BYPRODUCTS
|
||||
${WEBKIT_LIB_PATH}/libJavaScriptCore.a
|
||||
${WEBKIT_LIB_PATH}/libWTF.a
|
||||
${WEBKIT_LIB_PATH}/libbmalloc.a
|
||||
)
|
||||
endif()
|
||||
|
||||
if(WIN32)
|
||||
add_custom_target(jsc ALL
|
||||
COMMAND ${CMAKE_COMMAND} --build ${WEBKIT_PATH} --config ${WEBKIT_BUILD_TYPE} --target jsc
|
||||
BYPRODUCTS ${JSC_BYPRODUCTS}
|
||||
COMMENT "Building JSC (${WEBKIT_PATH})"
|
||||
)
|
||||
else()
|
||||
add_custom_target(jsc ALL
|
||||
COMMAND ${CMAKE_COMMAND} --build ${WEBKIT_PATH} --config ${WEBKIT_BUILD_TYPE} --target jsc
|
||||
BYPRODUCTS ${JSC_BYPRODUCTS}
|
||||
COMMENT "Building JSC (${WEBKIT_PATH})"
|
||||
USES_TERMINAL
|
||||
)
|
||||
endif()
|
||||
|
||||
include_directories(
|
||||
${WEBKIT_PATH}
|
||||
${WEBKIT_PATH}/JavaScriptCore/Headers
|
||||
${WEBKIT_PATH}/JavaScriptCore/Headers/JavaScriptCore
|
||||
${WEBKIT_PATH}/JavaScriptCore/PrivateHeaders
|
||||
${WEBKIT_PATH}/bmalloc/Headers
|
||||
${WEBKIT_PATH}/WTF/Headers
|
||||
${WEBKIT_PATH}/JavaScriptCore/PrivateHeaders/JavaScriptCore
|
||||
)
|
||||
|
||||
# On Windows, add ICU headers from the local ICU build
|
||||
if(WIN32)
|
||||
include_directories(${ICU_LOCAL_ROOT}/include)
|
||||
endif()
|
||||
|
||||
# After this point, only prebuilt WebKit is supported
|
||||
|
||||
@@ -7,9 +7,9 @@ Bytecode caching is a build-time optimization that dramatically improves applica
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic usage
|
||||
### Basic usage (CommonJS)
|
||||
|
||||
Enable bytecode caching with the `--bytecode` flag:
|
||||
Enable bytecode caching with the `--bytecode` flag. Without `--format`, this defaults to CommonJS:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
bun build ./index.ts --target=bun --bytecode --outdir=./dist
|
||||
@@ -17,7 +17,7 @@ bun build ./index.ts --target=bun --bytecode --outdir=./dist
|
||||
|
||||
This generates two files:
|
||||
|
||||
- `dist/index.js` - Your bundled JavaScript
|
||||
- `dist/index.js` - Your bundled JavaScript (CommonJS)
|
||||
- `dist/index.jsc` - The bytecode cache file
|
||||
|
||||
At runtime, Bun automatically detects and uses the `.jsc` file:
|
||||
@@ -28,14 +28,24 @@ bun ./dist/index.js # Automatically uses index.jsc
|
||||
|
||||
### With standalone executables
|
||||
|
||||
When creating executables with `--compile`, bytecode is embedded into the binary:
|
||||
When creating executables with `--compile`, bytecode is embedded into the binary. Both ESM and CommonJS formats are supported:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
# ESM (requires --compile)
|
||||
bun build ./cli.ts --compile --bytecode --format=esm --outfile=mycli
|
||||
|
||||
# CommonJS (works with or without --compile)
|
||||
bun build ./cli.ts --compile --bytecode --outfile=mycli
|
||||
```
|
||||
|
||||
The resulting executable contains both the code and bytecode, giving you maximum performance in a single file.
|
||||
|
||||
### ESM bytecode
|
||||
|
||||
ESM bytecode requires `--compile` because Bun embeds module metadata (import/export information) in the compiled binary. This metadata allows the JavaScript engine to skip parsing entirely at runtime.
|
||||
|
||||
Without `--compile`, ESM bytecode would still require parsing the source to analyze module dependencies—defeating the purpose of bytecode caching.
|
||||
|
||||
### Combining with other optimizations
|
||||
|
||||
Bytecode works great with minification and source maps:
|
||||
@@ -90,35 +100,9 @@ Larger applications benefit more because they have more code to parse.
|
||||
- ❌ **Code that runs once**
|
||||
- ❌ **Development builds**
|
||||
- ❌ **Size-constrained environments**
|
||||
- ❌ **Code with top-level await** (not supported)
|
||||
|
||||
## Limitations
|
||||
|
||||
### CommonJS only
|
||||
|
||||
Bytecode caching currently works with CommonJS output format. Bun's bundler automatically converts most ESM code to CommonJS, but **top-level await** is the exception:
|
||||
|
||||
```js
|
||||
// This prevents bytecode caching
|
||||
const data = await fetch("https://api.example.com");
|
||||
export default data;
|
||||
```
|
||||
|
||||
**Why**: Top-level await requires async module evaluation, which can't be represented in CommonJS. The module graph becomes asynchronous, and the CommonJS wrapper function model breaks down.
|
||||
|
||||
**Workaround**: Move async initialization into a function:
|
||||
|
||||
```js
|
||||
async function init() {
|
||||
const data = await fetch("https://api.example.com");
|
||||
return data;
|
||||
}
|
||||
|
||||
export default init;
|
||||
```
|
||||
|
||||
Now the module exports a function that the consumer can await when needed.
|
||||
|
||||
### Version compatibility
|
||||
|
||||
Bytecode is **not portable across Bun versions**. The bytecode format is tied to JavaScriptCore's internal representation, which changes between versions.
|
||||
@@ -236,8 +220,6 @@ It's normal for it it to log a cache miss multiple times since Bun doesn't curre
|
||||
- Compressing `.jsc` files for network transfer (gzip/brotli)
|
||||
- Evaluating if the startup performance gain is worth the size increase
|
||||
|
||||
**Top-level await**: Not supported. Refactor to use async initialization functions.
|
||||
|
||||
## What is bytecode?
|
||||
|
||||
When you run JavaScript, the JavaScript engine doesn't execute your source code directly. Instead, it goes through several steps:
|
||||
|
||||
@@ -322,10 +322,7 @@ Using bytecode compilation, `tsc` starts 2x faster:
|
||||
|
||||
Bytecode compilation moves parsing overhead for large input files from runtime to bundle time. Your app starts faster, in exchange for making the `bun build` command a little slower. It doesn't obscure source code.
|
||||
|
||||
<Warning>
|
||||
**Experimental:** Bytecode compilation is an experimental feature. Only `cjs` format is supported (which means no
|
||||
top-level-await). Let us know if you run into any issues!
|
||||
</Warning>
|
||||
<Note>Bytecode compilation supports both `cjs` and `esm` formats when used with `--compile`.</Note>
|
||||
|
||||
### What do these flags do?
|
||||
|
||||
@@ -365,6 +362,23 @@ The `--bytecode` argument enables bytecode compilation. Every time you run JavaS
|
||||
console.log(process.execArgv); // ["--smol", "--user-agent=MyBot"]
|
||||
```
|
||||
|
||||
### Runtime arguments via `BUN_OPTIONS`
|
||||
|
||||
The `BUN_OPTIONS` environment variable is applied to standalone executables, allowing you to pass runtime flags without recompiling:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
# Enable CPU profiling on a compiled executable
|
||||
BUN_OPTIONS="--cpu-prof" ./myapp
|
||||
|
||||
# Enable heap profiling with markdown output
|
||||
BUN_OPTIONS="--heap-prof-md" ./myapp
|
||||
|
||||
# Combine multiple flags
|
||||
BUN_OPTIONS="--smol --cpu-prof-md" ./myapp
|
||||
```
|
||||
|
||||
This is useful for debugging or profiling production executables without rebuilding them.
|
||||
|
||||
---
|
||||
|
||||
## Automatic config loading
|
||||
|
||||
@@ -1333,6 +1333,50 @@ Generate metadata about the build in a structured format. The metafile contains
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
#### Markdown metafile
|
||||
|
||||
Use `--metafile-md` to generate a markdown metafile, which is LLM-friendly and easy to read in the terminal:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
bun build ./src/index.ts --outdir ./dist --metafile-md ./dist/meta.md
|
||||
```
|
||||
|
||||
Both `--metafile` and `--metafile-md` can be used together:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
bun build ./src/index.ts --outdir ./dist --metafile ./dist/meta.json --metafile-md ./dist/meta.md
|
||||
```
|
||||
|
||||
#### `metafile` option formats
|
||||
|
||||
In the JavaScript API, `metafile` accepts several forms:
|
||||
|
||||
```ts title="build.ts" icon="/icons/typescript.svg"
|
||||
// Boolean — include metafile in the result object
|
||||
await Bun.build({
|
||||
entrypoints: ["./src/index.ts"],
|
||||
outdir: "./dist",
|
||||
metafile: true,
|
||||
});
|
||||
|
||||
// String — write JSON metafile to a specific path
|
||||
await Bun.build({
|
||||
entrypoints: ["./src/index.ts"],
|
||||
outdir: "./dist",
|
||||
metafile: "./dist/meta.json",
|
||||
});
|
||||
|
||||
// Object — specify separate paths for JSON and markdown output
|
||||
await Bun.build({
|
||||
entrypoints: ["./src/index.ts"],
|
||||
outdir: "./dist",
|
||||
metafile: {
|
||||
json: "./dist/meta.json",
|
||||
markdown: "./dist/meta.md",
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
The metafile structure contains:
|
||||
|
||||
```ts
|
||||
@@ -1464,22 +1508,43 @@ BuildArtifact (entry-point) {
|
||||
|
||||
## Bytecode
|
||||
|
||||
The `bytecode: boolean` option can be used to generate bytecode for any JavaScript/TypeScript entrypoints. This can greatly improve startup times for large applications. Only supported for `"cjs"` format, only supports `"target": "bun"` and dependent on a matching version of Bun. This adds a corresponding `.jsc` file for each entrypoint.
|
||||
The `bytecode: boolean` option can be used to generate bytecode for any JavaScript/TypeScript entrypoints. This can greatly improve startup times for large applications. Requires `"target": "bun"` and is dependent on a matching version of Bun.
|
||||
|
||||
- **CommonJS**: Works with or without `compile: true`. Generates a `.jsc` file alongside each entrypoint.
|
||||
- **ESM**: Requires `compile: true`. Bytecode and module metadata are embedded in the standalone executable.
|
||||
|
||||
Without an explicit `format`, bytecode defaults to CommonJS.
|
||||
|
||||
<Tabs>
|
||||
<Tab title="JavaScript">
|
||||
```ts title="build.ts" icon="/icons/typescript.svg"
|
||||
// CommonJS bytecode (generates .jsc files)
|
||||
await Bun.build({
|
||||
entrypoints: ["./index.tsx"],
|
||||
outdir: "./out",
|
||||
bytecode: true,
|
||||
})
|
||||
|
||||
// ESM bytecode (requires compile)
|
||||
await Bun.build({
|
||||
entrypoints: ["./index.tsx"],
|
||||
outfile: "./mycli",
|
||||
bytecode: true,
|
||||
format: "esm",
|
||||
compile: true,
|
||||
})
|
||||
```
|
||||
|
||||
</Tab>
|
||||
<Tab title="CLI">
|
||||
```bash terminal icon="terminal"
|
||||
# CommonJS bytecode
|
||||
bun build ./index.tsx --outdir ./out --bytecode
|
||||
|
||||
# ESM bytecode (requires --compile)
|
||||
bun build ./index.tsx --outfile ./mycli --bytecode --format=esm --compile
|
||||
```
|
||||
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
@@ -1646,7 +1711,10 @@ interface BuildConfig {
|
||||
* start times, but will make the final output larger and slightly increase
|
||||
* memory usage.
|
||||
*
|
||||
* Bytecode is currently only supported for CommonJS (`format: "cjs"`).
|
||||
* - CommonJS: works with or without `compile: true`
|
||||
* - ESM: requires `compile: true`
|
||||
*
|
||||
* Without an explicit `format`, defaults to CommonJS.
|
||||
*
|
||||
* Must be `target: "bun"`
|
||||
* @default false
|
||||
|
||||
@@ -150,6 +150,9 @@
|
||||
"/runtime/secrets",
|
||||
"/runtime/console",
|
||||
"/runtime/yaml",
|
||||
"/runtime/markdown",
|
||||
"/runtime/json5",
|
||||
"/runtime/jsonl",
|
||||
"/runtime/html-rewriter",
|
||||
"/runtime/hashing",
|
||||
"/runtime/glob",
|
||||
@@ -497,6 +500,7 @@
|
||||
"/guides/runtime/import-json",
|
||||
"/guides/runtime/import-toml",
|
||||
"/guides/runtime/import-yaml",
|
||||
"/guides/runtime/import-json5",
|
||||
"/guides/runtime/import-html",
|
||||
"/guides/util/import-meta-dir",
|
||||
"/guides/util/import-meta-file",
|
||||
|
||||
74
docs/guides/runtime/import-json5.mdx
Normal file
74
docs/guides/runtime/import-json5.mdx
Normal file
@@ -0,0 +1,74 @@
|
||||
---
|
||||
title: Import a JSON5 file
|
||||
sidebarTitle: Import JSON5
|
||||
mode: center
|
||||
---
|
||||
|
||||
Bun natively supports `.json5` imports.
|
||||
|
||||
```json5 config.json5 icon="file-code"
|
||||
{
|
||||
// Comments are allowed
|
||||
database: {
|
||||
host: "localhost",
|
||||
port: 5432,
|
||||
name: "myapp",
|
||||
},
|
||||
|
||||
server: {
|
||||
port: 3000,
|
||||
timeout: 30,
|
||||
},
|
||||
|
||||
features: {
|
||||
auth: true,
|
||||
rateLimit: true,
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Import the file like any other source file.
|
||||
|
||||
```ts config.ts icon="/icons/typescript.svg"
|
||||
import config from "./config.json5";
|
||||
|
||||
config.database.host; // => "localhost"
|
||||
config.server.port; // => 3000
|
||||
config.features.auth; // => true
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
You can also use named imports to destructure top-level properties:
|
||||
|
||||
```ts config.ts icon="/icons/typescript.svg"
|
||||
import { database, server, features } from "./config.json5";
|
||||
|
||||
console.log(database.name); // => "myapp"
|
||||
console.log(server.timeout); // => 30
|
||||
console.log(features.rateLimit); // => true
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
For parsing JSON5 strings at runtime, use `Bun.JSON5.parse()`:
|
||||
|
||||
```ts config.ts icon="/icons/typescript.svg"
|
||||
const data = JSON5.parse(`{
|
||||
name: 'John Doe',
|
||||
age: 30,
|
||||
hobbies: [
|
||||
'reading',
|
||||
'coding',
|
||||
],
|
||||
}`);
|
||||
|
||||
console.log(data.name); // => "John Doe"
|
||||
console.log(data.hobbies); // => ["reading", "coding"]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > JSON5](/runtime/json5) for complete documentation on JSON5 support in Bun.
|
||||
@@ -97,6 +97,31 @@ Filters respect your [workspace configuration](/pm/workspaces): If you have a `p
|
||||
bun run --filter foo myscript
|
||||
```
|
||||
|
||||
### Parallel and sequential mode
|
||||
|
||||
Combine `--filter` or `--workspaces` with `--parallel` or `--sequential` to run scripts across workspace packages with Foreman-style prefixed output:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
# Run "build" in all matching packages concurrently
|
||||
bun run --parallel --filter '*' build
|
||||
|
||||
# Run "build" in all workspace packages sequentially
|
||||
bun run --sequential --workspaces build
|
||||
|
||||
# Run glob-matched scripts across all packages
|
||||
bun run --parallel --filter '*' "build:*"
|
||||
|
||||
# Continue running even if one package's script fails
|
||||
bun run --parallel --no-exit-on-error --filter '*' test
|
||||
|
||||
# Run multiple scripts across all packages
|
||||
bun run --parallel --filter '*' build lint
|
||||
```
|
||||
|
||||
Each line of output is prefixed with the package and script name (e.g. `pkg-a:build | ...`). Without `--filter`/`--workspaces`, the prefix is just the script name (e.g. `build | ...`). When a package's `package.json` has no `name` field, the relative path from the workspace root is used instead.
|
||||
|
||||
Use `--if-present` with `--workspaces` to skip packages that don't have the requested script instead of erroring.
|
||||
|
||||
### Dependency Order
|
||||
|
||||
Bun will respect package dependency order when running scripts. Say you have a package `foo` that depends on another package `bar` in your workspace, and both packages have a `build` script. When you run `bun --filter '*' build`, you will notice that `foo` will only start running once `bar` is done.
|
||||
|
||||
@@ -227,6 +227,26 @@ bun --cpu-prof script.js
|
||||
|
||||
This generates a `.cpuprofile` file you can open in Chrome DevTools (Performance tab → Load profile) or VS Code's CPU profiler.
|
||||
|
||||
### Markdown output
|
||||
|
||||
Use `--cpu-prof-md` to generate a markdown CPU profile, which is grep-friendly and designed for LLM analysis:
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
bun --cpu-prof-md script.js
|
||||
```
|
||||
|
||||
Both `--cpu-prof` and `--cpu-prof-md` can be used together to generate both formats at once:
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
bun --cpu-prof --cpu-prof-md script.js
|
||||
```
|
||||
|
||||
You can also trigger profiling via the `BUN_OPTIONS` environment variable:
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
BUN_OPTIONS="--cpu-prof-md" bun script.js
|
||||
```
|
||||
|
||||
### Options
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
@@ -234,8 +254,43 @@ bun --cpu-prof --cpu-prof-name my-profile.cpuprofile script.js
|
||||
bun --cpu-prof --cpu-prof-dir ./profiles script.js
|
||||
```
|
||||
|
||||
| Flag | Description |
|
||||
| ---------------------------- | -------------------- |
|
||||
| `--cpu-prof` | Enable profiling |
|
||||
| `--cpu-prof-name <filename>` | Set output filename |
|
||||
| `--cpu-prof-dir <dir>` | Set output directory |
|
||||
| Flag | Description |
|
||||
| ---------------------------- | ----------------------------------------------------------- |
|
||||
| `--cpu-prof` | Generate a `.cpuprofile` JSON file (Chrome DevTools format) |
|
||||
| `--cpu-prof-md` | Generate a markdown CPU profile (grep/LLM-friendly) |
|
||||
| `--cpu-prof-name <filename>` | Set output filename |
|
||||
| `--cpu-prof-dir <dir>` | Set output directory |
|
||||
|
||||
## Heap profiling
|
||||
|
||||
Generate heap snapshots on exit to analyze memory usage and find memory leaks.
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
bun --heap-prof script.js
|
||||
```
|
||||
|
||||
This generates a V8 `.heapsnapshot` file that can be loaded in Chrome DevTools (Memory tab → Load).
|
||||
|
||||
### Markdown output
|
||||
|
||||
Use `--heap-prof-md` to generate a markdown heap profile for CLI analysis:
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
bun --heap-prof-md script.js
|
||||
```
|
||||
|
||||
<Note>If both `--heap-prof` and `--heap-prof-md` are specified, the markdown format is used.</Note>
|
||||
|
||||
### Options
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
bun --heap-prof --heap-prof-name my-snapshot.heapsnapshot script.js
|
||||
bun --heap-prof --heap-prof-dir ./profiles script.js
|
||||
```
|
||||
|
||||
| Flag | Description |
|
||||
| ----------------------------- | ------------------------------------------ |
|
||||
| `--heap-prof` | Generate a V8 `.heapsnapshot` file on exit |
|
||||
| `--heap-prof-md` | Generate a markdown heap profile on exit |
|
||||
| `--heap-prof-name <filename>` | Set output filename |
|
||||
| `--heap-prof-dir <dir>` | Set output directory |
|
||||
|
||||
@@ -266,18 +266,13 @@ git clone https://github.com/oven-sh/WebKit vendor/WebKit
|
||||
# Check out the commit hash specified in `set(WEBKIT_VERSION <commit_hash>)` in cmake/tools/SetupWebKit.cmake
|
||||
git -C vendor/WebKit checkout <commit_hash>
|
||||
|
||||
# Make a debug build of JSC. This will output build artifacts in ./vendor/WebKit/WebKitBuild/Debug
|
||||
# Optionally, you can use `bun run jsc:build` for a release build
|
||||
bun run jsc:build:debug && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
|
||||
|
||||
# After an initial run of `make jsc-debug`, you can rebuild JSC with:
|
||||
cmake --build vendor/WebKit/WebKitBuild/Debug --target jsc && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
|
||||
|
||||
# Build bun with the local JSC build
|
||||
# Build bun with the local JSC build — this automatically configures and builds JSC
|
||||
bun run build:local
|
||||
```
|
||||
|
||||
Using `bun run build:local` will build Bun in the `./build/debug-local` directory (instead of `./build/debug`), you'll have to change a couple of places to use this new directory:
|
||||
`bun run build:local` handles everything: configuring JSC, building JSC, and building Bun. On subsequent runs, JSC will incrementally rebuild if any WebKit sources changed. `ninja -Cbuild/debug-local` also works after the first build, and will build Bun+JSC.
|
||||
|
||||
The build output goes to `./build/debug-local` (instead of `./build/debug`), so you'll need to update a couple of places:
|
||||
|
||||
- The first line in `src/js/builtins.d.ts`
|
||||
- The `CompilationDatabase` line in `.clangd` config should be `CompilationDatabase: build/debug-local`
|
||||
@@ -288,7 +283,7 @@ Note that the WebKit folder, including build artifacts, is 8GB+ in size.
|
||||
|
||||
If you are using a JSC debug build and using VScode, make sure to run the `C/C++: Select a Configuration` command to configure intellisense to find the debug headers.
|
||||
|
||||
Note that if you change make changes to our [WebKit fork](https://github.com/oven-sh/WebKit), you will also have to change `SetupWebKit.cmake` to point to the commit hash.
|
||||
Note that if you make changes to our [WebKit fork](https://github.com/oven-sh/WebKit), you will also have to change `SetupWebKit.cmake` to point to the commit hash.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
|
||||
@@ -55,5 +55,5 @@ Click the link in the right column to jump to the associated documentation.
|
||||
| Stream Processing | [`Bun.readableStreamTo*()`](/runtime/utils#bun-readablestreamto), `Bun.readableStreamToBytes()`, `Bun.readableStreamToBlob()`, `Bun.readableStreamToFormData()`, `Bun.readableStreamToJSON()`, `Bun.readableStreamToArray()` |
|
||||
| Memory & Buffer Management | `Bun.ArrayBufferSink`, `Bun.allocUnsafe`, `Bun.concatArrayBuffers` |
|
||||
| Module Resolution | [`Bun.resolveSync()`](/runtime/utils#bun-resolvesync) |
|
||||
| Parsing & Formatting | [`Bun.semver`](/runtime/semver), `Bun.TOML.parse`, [`Bun.color`](/runtime/color) |
|
||||
| Parsing & Formatting | [`Bun.semver`](/runtime/semver), `Bun.TOML.parse`, [`Bun.markdown`](/runtime/markdown), [`Bun.color`](/runtime/color) |
|
||||
| Low-level / Internals | `Bun.mmap`, `Bun.gc`, `Bun.generateHeapSnapshot`, [`bun:jsc`](https://bun.com/reference/bun/jsc) |
|
||||
|
||||
@@ -5,7 +5,7 @@ description: "File types and loaders supported by Bun's bundler and runtime"
|
||||
|
||||
The Bun bundler implements a set of default loaders out of the box. As a rule of thumb, the bundler and the runtime both support the same set of file types out of the box.
|
||||
|
||||
`.js` `.cjs` `.mjs` `.mts` `.cts` `.ts` `.tsx` `.jsx` `.css` `.json` `.jsonc` `.toml` `.yaml` `.yml` `.txt` `.wasm` `.node` `.html` `.sh`
|
||||
`.js` `.cjs` `.mjs` `.mts` `.cts` `.ts` `.tsx` `.jsx` `.css` `.json` `.jsonc` `.json5` `.toml` `.yaml` `.yml` `.txt` `.wasm` `.node` `.html` `.sh`
|
||||
|
||||
Bun uses the file extension to determine which built-in _loader_ should be used to parse the file. Every loader has a name, such as `js`, `tsx`, or `json`. These names are used when building [plugins](/bundler/plugins) that extend Bun with custom loaders.
|
||||
|
||||
@@ -197,6 +197,53 @@ export default {
|
||||
|
||||
</CodeGroup>
|
||||
|
||||
### `json5`
|
||||
|
||||
**JSON5 loader**. Default for `.json5`.
|
||||
|
||||
JSON5 files can be directly imported. Bun will parse them with its fast native JSON5 parser. JSON5 is a superset of JSON that supports comments, trailing commas, unquoted keys, single-quoted strings, and more.
|
||||
|
||||
```ts
|
||||
import config from "./config.json5";
|
||||
console.log(config);
|
||||
|
||||
// via import attribute:
|
||||
import data from "./data.txt" with { type: "json5" };
|
||||
```
|
||||
|
||||
During bundling, the parsed JSON5 is inlined into the bundle as a JavaScript object.
|
||||
|
||||
```ts
|
||||
var config = {
|
||||
name: "my-app",
|
||||
version: "1.0.0",
|
||||
// ...other fields
|
||||
};
|
||||
```
|
||||
|
||||
If a `.json5` file is passed as an entrypoint, it will be converted to a `.js` module that `export default`s the parsed object.
|
||||
|
||||
<CodeGroup>
|
||||
|
||||
```json5 Input
|
||||
{
|
||||
// Configuration
|
||||
name: "John Doe",
|
||||
age: 35,
|
||||
email: "johndoe@example.com",
|
||||
}
|
||||
```
|
||||
|
||||
```ts Output
|
||||
export default {
|
||||
name: "John Doe",
|
||||
age: 35,
|
||||
email: "johndoe@example.com",
|
||||
};
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
|
||||
### `text`
|
||||
|
||||
**Text loader**. Default for `.txt`.
|
||||
|
||||
271
docs/runtime/json5.mdx
Normal file
271
docs/runtime/json5.mdx
Normal file
@@ -0,0 +1,271 @@
|
||||
---
|
||||
title: JSON5
|
||||
description: Use Bun's built-in support for JSON5 files through both runtime APIs and bundler integration
|
||||
---
|
||||
|
||||
In Bun, JSON5 is a first-class citizen alongside JSON, TOML, and YAML. You can:
|
||||
|
||||
- Parse and stringify JSON5 with `Bun.JSON5.parse` and `Bun.JSON5.stringify`
|
||||
- `import` & `require` JSON5 files as modules at runtime (including hot reloading & watch mode support)
|
||||
- `import` & `require` JSON5 files in frontend apps via Bun's bundler
|
||||
|
||||
---
|
||||
|
||||
## Conformance
|
||||
|
||||
Bun's JSON5 parser passes 100% of the [official JSON5 test suite](https://github.com/json5/json5-tests). The parser is written in Zig for optimal performance. You can view our [translated test suite](https://github.com/oven-sh/bun/blob/main/test/js/bun/json5/json5-test-suite.test.ts) to see every test case.
|
||||
|
||||
---
|
||||
|
||||
## Runtime API
|
||||
|
||||
### `Bun.JSON5.parse()`
|
||||
|
||||
Parse a JSON5 string into a JavaScript value.
|
||||
|
||||
```ts
|
||||
import { JSON5 } from "bun";
|
||||
|
||||
const data = JSON5.parse(`{
|
||||
// JSON5 supports comments
|
||||
name: 'my-app',
|
||||
version: '1.0.0',
|
||||
debug: true,
|
||||
|
||||
// trailing commas are allowed
|
||||
tags: ['web', 'api',],
|
||||
}`);
|
||||
|
||||
console.log(data);
|
||||
// {
|
||||
// name: "my-app",
|
||||
// version: "1.0.0",
|
||||
// debug: true,
|
||||
// tags: ["web", "api"]
|
||||
// }
|
||||
```
|
||||
|
||||
#### Supported JSON5 Features
|
||||
|
||||
JSON5 is a superset of JSON based on ECMAScript 5.1 syntax. It supports:
|
||||
|
||||
- **Comments**: single-line (`//`) and multi-line (`/* */`)
|
||||
- **Trailing commas**: in objects and arrays
|
||||
- **Unquoted keys**: valid ECMAScript 5.1 identifiers can be used as keys
|
||||
- **Single-quoted strings**: in addition to double-quoted strings
|
||||
- **Multi-line strings**: using backslash line continuations
|
||||
- **Hex numbers**: `0xFF`
|
||||
- **Leading & trailing decimal points**: `.5` and `5.`
|
||||
- **Infinity and NaN**: positive and negative
|
||||
- **Explicit plus sign**: `+42`
|
||||
|
||||
```ts
|
||||
const data = JSON5.parse(`{
|
||||
// Unquoted keys
|
||||
unquoted: 'keys work',
|
||||
|
||||
// Single and double quotes
|
||||
single: 'single-quoted',
|
||||
double: "double-quoted",
|
||||
|
||||
// Trailing commas
|
||||
trailing: 'comma',
|
||||
|
||||
// Special numbers
|
||||
hex: 0xDEADbeef,
|
||||
half: .5,
|
||||
to: Infinity,
|
||||
nan: NaN,
|
||||
|
||||
// Multi-line strings
|
||||
multiline: 'line 1 \
|
||||
line 2',
|
||||
}`);
|
||||
```
|
||||
|
||||
#### Error Handling
|
||||
|
||||
`Bun.JSON5.parse()` throws a `SyntaxError` if the input is invalid JSON5:
|
||||
|
||||
```ts
|
||||
try {
|
||||
JSON5.parse("{invalid}");
|
||||
} catch (error) {
|
||||
console.error("Failed to parse JSON5:", error.message);
|
||||
}
|
||||
```
|
||||
|
||||
### `Bun.JSON5.stringify()`
|
||||
|
||||
Stringify a JavaScript value to a JSON5 string.
|
||||
|
||||
```ts
|
||||
import { JSON5 } from "bun";
|
||||
|
||||
const str = JSON5.stringify({ name: "my-app", version: "1.0.0" });
|
||||
console.log(str);
|
||||
// {name:'my-app',version:'1.0.0'}
|
||||
```
|
||||
|
||||
#### Pretty Printing
|
||||
|
||||
Pass a `space` argument to format the output with indentation:
|
||||
|
||||
```ts
|
||||
const pretty = JSON5.stringify(
|
||||
{
|
||||
name: "my-app",
|
||||
debug: true,
|
||||
tags: ["web", "api"],
|
||||
},
|
||||
null,
|
||||
2,
|
||||
);
|
||||
|
||||
console.log(pretty);
|
||||
// {
|
||||
// name: 'my-app',
|
||||
// debug: true,
|
||||
// tags: [
|
||||
// 'web',
|
||||
// 'api',
|
||||
// ],
|
||||
// }
|
||||
```
|
||||
|
||||
The `space` argument can be a number (number of spaces) or a string (used as the indent character):
|
||||
|
||||
```ts
|
||||
// Tab indentation
|
||||
JSON5.stringify(data, null, "\t");
|
||||
```
|
||||
|
||||
#### Special Values
|
||||
|
||||
Unlike `JSON.stringify`, `JSON5.stringify` preserves special numeric values:
|
||||
|
||||
```ts
|
||||
JSON5.stringify({ inf: Infinity, ninf: -Infinity, nan: NaN });
|
||||
// {inf:Infinity,ninf:-Infinity,nan:NaN}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Module Import
|
||||
|
||||
### ES Modules
|
||||
|
||||
You can import JSON5 files directly as ES modules:
|
||||
|
||||
```json5 config.json5
|
||||
{
|
||||
// Database configuration
|
||||
database: {
|
||||
host: "localhost",
|
||||
port: 5432,
|
||||
name: "myapp",
|
||||
},
|
||||
|
||||
features: {
|
||||
auth: true,
|
||||
rateLimit: true,
|
||||
analytics: false,
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
#### Default Import
|
||||
|
||||
```ts app.ts icon="/icons/typescript.svg"
|
||||
import config from "./config.json5";
|
||||
|
||||
console.log(config.database.host); // "localhost"
|
||||
console.log(config.features.auth); // true
|
||||
```
|
||||
|
||||
#### Named Imports
|
||||
|
||||
You can destructure top-level properties as named imports:
|
||||
|
||||
```ts app.ts icon="/icons/typescript.svg"
|
||||
import { database, features } from "./config.json5";
|
||||
|
||||
console.log(database.host); // "localhost"
|
||||
console.log(features.rateLimit); // true
|
||||
```
|
||||
|
||||
### CommonJS
|
||||
|
||||
JSON5 files can also be required in CommonJS:
|
||||
|
||||
```ts app.ts icon="/icons/typescript.svg"
|
||||
const config = require("./config.json5");
|
||||
console.log(config.database.name); // "myapp"
|
||||
|
||||
// Destructuring also works
|
||||
const { database, features } = require("./config.json5");
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Hot Reloading with JSON5
|
||||
|
||||
When you run your application with `bun --hot`, changes to JSON5 files are automatically detected and reloaded:
|
||||
|
||||
```json5 config.json5
|
||||
{
|
||||
server: {
|
||||
port: 3000,
|
||||
host: "localhost",
|
||||
},
|
||||
features: {
|
||||
debug: true,
|
||||
verbose: false,
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
```ts server.ts icon="/icons/typescript.svg"
|
||||
import { server, features } from "./config.json5";
|
||||
|
||||
Bun.serve({
|
||||
port: server.port,
|
||||
hostname: server.host,
|
||||
fetch(req) {
|
||||
if (features.verbose) {
|
||||
console.log(`${req.method} ${req.url}`);
|
||||
}
|
||||
return new Response("Hello World");
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
Run with hot reloading:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
bun --hot server.ts
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Bundler Integration
|
||||
|
||||
When you import JSON5 files and bundle with Bun, the JSON5 is parsed at build time and included as a JavaScript module:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
bun build app.ts --outdir=dist
|
||||
```
|
||||
|
||||
This means:
|
||||
|
||||
- Zero runtime JSON5 parsing overhead in production
|
||||
- Smaller bundle sizes
|
||||
- Tree-shaking support for unused properties (named imports)
|
||||
|
||||
### Dynamic Imports
|
||||
|
||||
JSON5 files can be dynamically imported:
|
||||
|
||||
```ts
|
||||
const config = await import("./config.json5");
|
||||
```
|
||||
188
docs/runtime/jsonl.mdx
Normal file
188
docs/runtime/jsonl.mdx
Normal file
@@ -0,0 +1,188 @@
|
||||
---
|
||||
title: JSONL
|
||||
description: Parse newline-delimited JSON (JSONL) with Bun's built-in streaming parser
|
||||
---
|
||||
|
||||
Bun has built-in support for parsing [JSONL](https://jsonlines.org/) (newline-delimited JSON), where each line is a separate JSON value. The parser is implemented in C++ using JavaScriptCore's optimized JSON parser and supports streaming use cases.
|
||||
|
||||
```ts
|
||||
const results = Bun.JSONL.parse('{"name":"Alice"}\n{"name":"Bob"}\n');
|
||||
// [{ name: "Alice" }, { name: "Bob" }]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## `Bun.JSONL.parse()`
|
||||
|
||||
Parse a complete JSONL input and return an array of all parsed values.
|
||||
|
||||
```ts
|
||||
import { JSONL } from "bun";
|
||||
|
||||
const input = '{"id":1,"name":"Alice"}\n{"id":2,"name":"Bob"}\n{"id":3,"name":"Charlie"}\n';
|
||||
const records = JSONL.parse(input);
|
||||
console.log(records);
|
||||
// [
|
||||
// { id: 1, name: "Alice" },
|
||||
// { id: 2, name: "Bob" },
|
||||
// { id: 3, name: "Charlie" }
|
||||
// ]
|
||||
```
|
||||
|
||||
Input can be a string or a `Uint8Array`:
|
||||
|
||||
```ts
|
||||
const buffer = new TextEncoder().encode('{"a":1}\n{"b":2}\n');
|
||||
const results = Bun.JSONL.parse(buffer);
|
||||
// [{ a: 1 }, { b: 2 }]
|
||||
```
|
||||
|
||||
When passed a `Uint8Array`, a UTF-8 BOM at the start of the buffer is automatically skipped.
|
||||
|
||||
### Error handling
|
||||
|
||||
If the input contains invalid JSON, `Bun.JSONL.parse()` throws a `SyntaxError`:
|
||||
|
||||
```ts
|
||||
try {
|
||||
Bun.JSONL.parse('{"valid":true}\n{invalid}\n');
|
||||
} catch (error) {
|
||||
console.error(error); // SyntaxError: Failed to parse JSONL
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## `Bun.JSONL.parseChunk()`
|
||||
|
||||
For streaming scenarios, `parseChunk` parses as many complete values as possible from the input and reports how far it got. This is useful when receiving data incrementally (e.g., from a network stream) and you need to know where to resume parsing.
|
||||
|
||||
```ts
|
||||
const chunk = '{"id":1}\n{"id":2}\n{"id":3';
|
||||
|
||||
const result = Bun.JSONL.parseChunk(chunk);
|
||||
console.log(result.values); // [{ id: 1 }, { id: 2 }]
|
||||
console.log(result.read); // 17 — characters consumed
|
||||
console.log(result.done); // false — incomplete value remains
|
||||
console.log(result.error); // null — no parse error
|
||||
```
|
||||
|
||||
### Return value
|
||||
|
||||
`parseChunk` returns an object with four properties:
|
||||
|
||||
| Property | Type | Description |
|
||||
| -------- | --------------------- | ----------------------------------------------------------------------- |
|
||||
| `values` | `any[]` | Array of successfully parsed JSON values |
|
||||
| `read` | `number` | Number of bytes (for `Uint8Array`) or characters (for strings) consumed |
|
||||
| `done` | `boolean` | `true` if the entire input was consumed with no remaining data |
|
||||
| `error` | `SyntaxError \| null` | Parse error, or `null` if no error occurred |
|
||||
|
||||
### Streaming example
|
||||
|
||||
Use `read` to slice off consumed input and carry forward the remainder:
|
||||
|
||||
```ts
|
||||
let buffer = "";
|
||||
|
||||
async function processStream(stream: ReadableStream<string>) {
|
||||
for await (const chunk of stream) {
|
||||
buffer += chunk;
|
||||
const result = Bun.JSONL.parseChunk(buffer);
|
||||
|
||||
for (const value of result.values) {
|
||||
handleRecord(value);
|
||||
}
|
||||
|
||||
// Keep only the unconsumed portion
|
||||
buffer = buffer.slice(result.read);
|
||||
}
|
||||
|
||||
// Handle any remaining data
|
||||
if (buffer.length > 0) {
|
||||
const final = Bun.JSONL.parseChunk(buffer);
|
||||
for (const value of final.values) {
|
||||
handleRecord(value);
|
||||
}
|
||||
if (final.error) {
|
||||
console.error("Parse error in final chunk:", final.error.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Byte offsets with `Uint8Array`
|
||||
|
||||
When the input is a `Uint8Array`, you can pass optional `start` and `end` byte offsets:
|
||||
|
||||
```ts
|
||||
const buf = new TextEncoder().encode('{"a":1}\n{"b":2}\n{"c":3}\n');
|
||||
|
||||
// Parse starting from byte 8
|
||||
const result = Bun.JSONL.parseChunk(buf, 8);
|
||||
console.log(result.values); // [{ b: 2 }, { c: 3 }]
|
||||
console.log(result.read); // 24
|
||||
|
||||
// Parse a specific range
|
||||
const partial = Bun.JSONL.parseChunk(buf, 0, 8);
|
||||
console.log(partial.values); // [{ a: 1 }]
|
||||
```
|
||||
|
||||
The `read` value is always a byte offset into the original buffer, making it easy to use with `TypedArray.subarray()` for zero-copy streaming:
|
||||
|
||||
```ts
|
||||
let buf = new Uint8Array(0);
|
||||
|
||||
async function processBinaryStream(stream: ReadableStream<Uint8Array>) {
|
||||
for await (const chunk of stream) {
|
||||
// Append chunk to buffer
|
||||
const newBuf = new Uint8Array(buf.length + chunk.length);
|
||||
newBuf.set(buf);
|
||||
newBuf.set(chunk, buf.length);
|
||||
buf = newBuf;
|
||||
|
||||
const result = Bun.JSONL.parseChunk(buf);
|
||||
|
||||
for (const value of result.values) {
|
||||
handleRecord(value);
|
||||
}
|
||||
|
||||
// Keep unconsumed bytes
|
||||
buf = buf.slice(result.read);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Error recovery
|
||||
|
||||
Unlike `parse()`, `parseChunk()` does not throw on invalid JSON. Instead, it returns the error in the `error` property, along with any values that were successfully parsed before the error:
|
||||
|
||||
```ts
|
||||
const input = '{"a":1}\n{invalid}\n{"b":2}\n';
|
||||
const result = Bun.JSONL.parseChunk(input);
|
||||
|
||||
console.log(result.values); // [{ a: 1 }] — values parsed before the error
|
||||
console.log(result.error); // SyntaxError
|
||||
console.log(result.read); // 7 — position up to last successful parse
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Supported value types
|
||||
|
||||
Each line can be any valid JSON value, not just objects:
|
||||
|
||||
```ts
|
||||
const input = '42\n"hello"\ntrue\nnull\n[1,2,3]\n{"key":"value"}\n';
|
||||
const values = Bun.JSONL.parse(input);
|
||||
// [42, "hello", true, null, [1, 2, 3], { key: "value" }]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Performance notes
|
||||
|
||||
- **ASCII fast path**: Pure ASCII input is parsed directly without copying, using a zero-allocation `StringView`.
|
||||
- **UTF-8 support**: Non-ASCII `Uint8Array` input is decoded to UTF-16 using SIMD-accelerated conversion.
|
||||
- **BOM handling**: UTF-8 BOM (`0xEF 0xBB 0xBF`) at the start of a `Uint8Array` is automatically skipped.
|
||||
- **Pre-built object shape**: The result object from `parseChunk` uses a cached structure for fast property access.
|
||||
344
docs/runtime/markdown.mdx
Normal file
344
docs/runtime/markdown.mdx
Normal file
@@ -0,0 +1,344 @@
|
||||
---
|
||||
title: Markdown
|
||||
description: Parse and render Markdown with Bun's built-in Markdown API, supporting GFM extensions and custom rendering callbacks
|
||||
---
|
||||
|
||||
{% callout type="note" %}
|
||||
**Unstable API** — This API is under active development and may change in future versions of Bun.
|
||||
{% /callout %}
|
||||
|
||||
Bun includes a fast, built-in Markdown parser written in Zig. It supports GitHub Flavored Markdown (GFM) extensions and provides three APIs:
|
||||
|
||||
- `Bun.markdown.html()` — render Markdown to an HTML string
|
||||
- `Bun.markdown.render()` — render Markdown with custom callbacks for each element
|
||||
- `Bun.markdown.react()` — render Markdown to React JSX elements
|
||||
|
||||
---
|
||||
|
||||
## `Bun.markdown.html()`
|
||||
|
||||
Convert a Markdown string to HTML.
|
||||
|
||||
```ts
|
||||
const html = Bun.markdown.html("# Hello **world**");
|
||||
// "<h1>Hello <strong>world</strong></h1>\n"
|
||||
```
|
||||
|
||||
GFM extensions like tables, strikethrough, and task lists are enabled by default:
|
||||
|
||||
```ts
|
||||
const html = Bun.markdown.html(`
|
||||
| Feature | Status |
|
||||
|-------------|--------|
|
||||
| Tables | ~~done~~ |
|
||||
| Strikethrough| ~~done~~ |
|
||||
| Task lists | done |
|
||||
`);
|
||||
```
|
||||
|
||||
### Options
|
||||
|
||||
Pass an options object as the second argument to configure the parser:
|
||||
|
||||
```ts
|
||||
const html = Bun.markdown.html("some markdown", {
|
||||
tables: true, // GFM tables (default: true)
|
||||
strikethrough: true, // GFM strikethrough (default: true)
|
||||
tasklists: true, // GFM task lists (default: true)
|
||||
tagFilter: true, // GFM tag filter for disallowed HTML tags
|
||||
autolinks: true, // Autolink URLs, emails, and www. links
|
||||
});
|
||||
```
|
||||
|
||||
All available options:
|
||||
|
||||
| Option | Default | Description |
|
||||
| ---------------------- | ------- | ----------------------------------------------------------- |
|
||||
| `tables` | `false` | GFM tables |
|
||||
| `strikethrough` | `false` | GFM strikethrough (`~~text~~`) |
|
||||
| `tasklists` | `false` | GFM task lists (`- [x] item`) |
|
||||
| `autolinks` | `false` | Enable autolinks — see [Autolinks](#autolinks) |
|
||||
| `headings` | `false` | Heading IDs and autolinks — see [Heading IDs](#heading-ids) |
|
||||
| `hardSoftBreaks` | `false` | Treat soft line breaks as hard breaks |
|
||||
| `wikiLinks` | `false` | Enable `[[wiki links]]` |
|
||||
| `underline` | `false` | `__text__` renders as `<u>` instead of `<strong>` |
|
||||
| `latexMath` | `false` | Enable `$inline$` and `$$display$$` math |
|
||||
| `collapseWhitespace` | `false` | Collapse whitespace in text |
|
||||
| `permissiveAtxHeaders` | `false` | ATX headers without space after `#` |
|
||||
| `noIndentedCodeBlocks` | `false` | Disable indented code blocks |
|
||||
| `noHtmlBlocks` | `false` | Disable HTML blocks |
|
||||
| `noHtmlSpans` | `false` | Disable inline HTML |
|
||||
| `tagFilter` | `false` | GFM tag filter for disallowed HTML tags |
|
||||
|
||||
#### Autolinks
|
||||
|
||||
Pass `true` to enable all autolink types, or an object for granular control:
|
||||
|
||||
```ts
|
||||
// Enable all autolinks (URL, WWW, email)
|
||||
Bun.markdown.html("Visit www.example.com", { autolinks: true });
|
||||
|
||||
// Enable only specific types
|
||||
Bun.markdown.html("Visit www.example.com", {
|
||||
autolinks: { url: true, www: true },
|
||||
});
|
||||
```
|
||||
|
||||
#### Heading IDs
|
||||
|
||||
Pass `true` to enable both heading IDs and autolink headings, or an object for granular control:
|
||||
|
||||
```ts
|
||||
// Enable heading IDs and autolink headings
|
||||
Bun.markdown.html("## Hello World", { headings: true });
|
||||
// '<h2 id="hello-world"><a href="#hello-world">Hello World</a></h2>\n'
|
||||
|
||||
// Enable only heading IDs (no autolink)
|
||||
Bun.markdown.html("## Hello World", { headings: { ids: true } });
|
||||
// '<h2 id="hello-world">Hello World</h2>\n'
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## `Bun.markdown.render()`
|
||||
|
||||
Parse Markdown and render it using custom JavaScript callbacks. This gives you full control over the output format — you can generate HTML with custom classes, React elements, ANSI terminal output, or any other string format.
|
||||
|
||||
```ts
|
||||
const result = Bun.markdown.render("# Hello **world**", {
|
||||
heading: (children, { level }) => `<h${level} class="title">${children}</h${level}>`,
|
||||
strong: children => `<b>${children}</b>`,
|
||||
paragraph: children => `<p>${children}</p>`,
|
||||
});
|
||||
// '<h1 class="title">Hello <b>world</b></h1>'
|
||||
```
|
||||
|
||||
### Callback signature
|
||||
|
||||
Each callback receives:
|
||||
|
||||
1. **`children`** — the accumulated content of the element as a string
|
||||
2. **`meta`** (optional) — an object with element-specific metadata
|
||||
|
||||
Return a string to replace the element's rendering. Return `null` or `undefined` to omit the element from the output entirely. If no callback is registered for an element, its children pass through unchanged.
|
||||
|
||||
### Block callbacks
|
||||
|
||||
| Callback | Meta | Description |
|
||||
| ------------ | ------------------------------------------- | ---------------------------------------------------------------------------------------- |
|
||||
| `heading` | `{ level: number, id?: string }` | Heading level 1–6. `id` is set when `headings: { ids: true }` is enabled |
|
||||
| `paragraph` | — | Paragraph block |
|
||||
| `blockquote` | — | Blockquote block |
|
||||
| `code` | `{ language?: string }` | Fenced or indented code block. `language` is the info-string when specified on the fence |
|
||||
| `list` | `{ ordered: boolean, start?: number }` | Ordered or unordered list. `start` is the start number for ordered lists |
|
||||
| `listItem` | `{ checked?: boolean }` | List item. `checked` is set for task list items (`- [x]` / `- [ ]`) |
|
||||
| `hr` | — | Horizontal rule |
|
||||
| `table` | — | Table block |
|
||||
| `thead` | — | Table head |
|
||||
| `tbody` | — | Table body |
|
||||
| `tr` | — | Table row |
|
||||
| `th` | `{ align?: "left" \| "center" \| "right" }` | Table header cell. `align` is set when alignment is specified |
|
||||
| `td` | `{ align?: "left" \| "center" \| "right" }` | Table data cell. `align` is set when alignment is specified |
|
||||
| `html` | — | Raw HTML content |
|
||||
|
||||
### Inline callbacks
|
||||
|
||||
| Callback | Meta | Description |
|
||||
| --------------- | ---------------------------------- | ---------------------------- |
|
||||
| `strong` | — | Strong emphasis (`**text**`) |
|
||||
| `emphasis` | — | Emphasis (`*text*`) |
|
||||
| `link` | `{ href: string, title?: string }` | Link |
|
||||
| `image` | `{ src: string, title?: string }` | Image |
|
||||
| `codespan` | — | Inline code (`` `code` ``) |
|
||||
| `strikethrough` | — | Strikethrough (`~~text~~`) |
|
||||
| `text` | — | Plain text content |
|
||||
|
||||
### Examples
|
||||
|
||||
#### Custom HTML with classes
|
||||
|
||||
```ts
|
||||
const html = Bun.markdown.render("# Title\n\nHello **world**", {
|
||||
heading: (children, { level }) => `<h${level} class="heading heading-${level}">${children}</h${level}>`,
|
||||
paragraph: children => `<p class="body">${children}</p>`,
|
||||
strong: children => `<strong class="bold">${children}</strong>`,
|
||||
});
|
||||
```
|
||||
|
||||
#### Stripping all formatting
|
||||
|
||||
```ts
|
||||
const plaintext = Bun.markdown.render("# Hello **world**", {
|
||||
heading: children => children,
|
||||
paragraph: children => children,
|
||||
strong: children => children,
|
||||
emphasis: children => children,
|
||||
link: children => children,
|
||||
image: () => "",
|
||||
code: children => children,
|
||||
codespan: children => children,
|
||||
});
|
||||
// "Hello world"
|
||||
```
|
||||
|
||||
#### Omitting elements
|
||||
|
||||
Return `null` or `undefined` to remove an element from the output:
|
||||
|
||||
```ts
|
||||
const result = Bun.markdown.render("# Title\n\n\n\nHello", {
|
||||
image: () => null, // Remove all images
|
||||
heading: children => children,
|
||||
paragraph: children => children + "\n",
|
||||
});
|
||||
// "Title\nHello\n"
|
||||
```
|
||||
|
||||
#### ANSI terminal output
|
||||
|
||||
```ts
|
||||
const ansi = Bun.markdown.render("# Hello\n\nThis is **bold** and *italic*", {
|
||||
heading: (children, { level }) => `\x1b[1;4m${children}\x1b[0m\n`,
|
||||
paragraph: children => children + "\n",
|
||||
strong: children => `\x1b[1m${children}\x1b[22m`,
|
||||
emphasis: children => `\x1b[3m${children}\x1b[23m`,
|
||||
});
|
||||
```
|
||||
|
||||
#### Code block syntax highlighting
|
||||
|
||||
````ts
|
||||
const result = Bun.markdown.render("```js\nconsole.log('hi')\n```", {
|
||||
code: (children, meta) => {
|
||||
const lang = meta?.language ?? "";
|
||||
return `<pre><code class="language-${lang}">${children}</code></pre>`;
|
||||
},
|
||||
});
|
||||
````
|
||||
|
||||
### Parser options
|
||||
|
||||
Parser options are passed as a separate third argument:
|
||||
|
||||
```ts
|
||||
const result = Bun.markdown.render(
|
||||
"Visit www.example.com",
|
||||
{
|
||||
link: (children, { href }) => `[${children}](${href})`,
|
||||
paragraph: children => children,
|
||||
},
|
||||
{ autolinks: true },
|
||||
);
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## `Bun.markdown.react()`
|
||||
|
||||
Render Markdown directly to React elements. Returns a `<Fragment>` that you can use as a component return value.
|
||||
|
||||
```tsx
|
||||
function Markdown({ text }: { text: string }) {
|
||||
return Bun.markdown.react(text);
|
||||
}
|
||||
```
|
||||
|
||||
### Server-side rendering
|
||||
|
||||
Works with `renderToString()` and React Server Components:
|
||||
|
||||
```tsx
|
||||
import { renderToString } from "react-dom/server";
|
||||
|
||||
const html = renderToString(Bun.markdown.react("# Hello **world**"));
|
||||
// "<h1>Hello <strong>world</strong></h1>"
|
||||
```
|
||||
|
||||
### Component overrides
|
||||
|
||||
Replace any HTML element with a custom React component by passing it in the second argument, keyed by tag name:
|
||||
|
||||
```tsx
|
||||
function Code({ language, children }) {
|
||||
return (
|
||||
<pre data-language={language}>
|
||||
<code>{children}</code>
|
||||
</pre>
|
||||
);
|
||||
}
|
||||
|
||||
function Link({ href, title, children }) {
|
||||
return (
|
||||
<a href={href} title={title} target="_blank" rel="noopener noreferrer">
|
||||
{children}
|
||||
</a>
|
||||
);
|
||||
}
|
||||
|
||||
function Heading({ id, children }) {
|
||||
return (
|
||||
<h2 id={id}>
|
||||
<a href={`#${id}`}>{children}</a>
|
||||
</h2>
|
||||
);
|
||||
}
|
||||
|
||||
const el = Bun.markdown.react(
|
||||
content,
|
||||
{
|
||||
pre: Code,
|
||||
a: Link,
|
||||
h2: Heading,
|
||||
},
|
||||
{ headings: { ids: true } },
|
||||
);
|
||||
```
|
||||
|
||||
#### Available overrides
|
||||
|
||||
Every HTML tag produced by the parser can be overridden:
|
||||
|
||||
| Option | Props | Description |
|
||||
| ------------ | ---------------------------- | --------------------------------------------------------------- |
|
||||
| `h1`–`h6` | `{ id?, children }` | Headings. `id` is set when `headings: { ids: true }` is enabled |
|
||||
| `p` | `{ children }` | Paragraph |
|
||||
| `blockquote` | `{ children }` | Blockquote |
|
||||
| `pre` | `{ language?, children }` | Code block. `language` is the info string (e.g. `"js"`) |
|
||||
| `hr` | `{}` | Horizontal rule (no children) |
|
||||
| `ul` | `{ children }` | Unordered list |
|
||||
| `ol` | `{ start, children }` | Ordered list. `start` is the first item number |
|
||||
| `li` | `{ checked?, children }` | List item. `checked` is set for task list items |
|
||||
| `table` | `{ children }` | Table |
|
||||
| `thead` | `{ children }` | Table head |
|
||||
| `tbody` | `{ children }` | Table body |
|
||||
| `tr` | `{ children }` | Table row |
|
||||
| `th` | `{ align?, children }` | Table header cell |
|
||||
| `td` | `{ align?, children }` | Table data cell |
|
||||
| `em` | `{ children }` | Emphasis (`*text*`) |
|
||||
| `strong` | `{ children }` | Strong (`**text**`) |
|
||||
| `a` | `{ href, title?, children }` | Link |
|
||||
| `img` | `{ src, alt?, title? }` | Image (no children) |
|
||||
| `code` | `{ children }` | Inline code |
|
||||
| `del` | `{ children }` | Strikethrough (`~~text~~`) |
|
||||
| `br` | `{}` | Hard line break (no children) |
|
||||
|
||||
### React 18 and older
|
||||
|
||||
By default, elements use `Symbol.for('react.transitional.element')` as the `$$typeof` symbol. For React 18 and older, pass `reactVersion: 18` in the options (third argument):
|
||||
|
||||
```tsx
|
||||
function Markdown({ text }: { text: string }) {
|
||||
return Bun.markdown.react(text, undefined, { reactVersion: 18 });
|
||||
}
|
||||
```
|
||||
|
||||
### Parser options
|
||||
|
||||
All [parser options](#options) are passed as the third argument:
|
||||
|
||||
```tsx
|
||||
const el = Bun.markdown.react("## Hello World", undefined, {
|
||||
headings: { ids: true },
|
||||
autolinks: true,
|
||||
});
|
||||
```
|
||||
@@ -165,7 +165,7 @@ This page is updated regularly to reflect compatibility status of the latest ver
|
||||
|
||||
### [`node:inspector`](https://nodejs.org/api/inspector.html)
|
||||
|
||||
🔴 Not implemented.
|
||||
🟡 Partially implemented. `Profiler` API is supported (`Profiler.enable`, `Profiler.disable`, `Profiler.start`, `Profiler.stop`, `Profiler.setSamplingInterval`). Other inspector APIs are not yet implemented.
|
||||
|
||||
### [`node:repl`](https://nodejs.org/api/repl.html)
|
||||
|
||||
|
||||
@@ -135,6 +135,18 @@ await s3file.write(JSON.stringify({ name: "John", age: 30 }), {
|
||||
type: "application/json",
|
||||
});
|
||||
|
||||
// Write with content encoding (e.g. for pre-compressed data)
|
||||
await s3file.write(compressedData, {
|
||||
type: "application/json",
|
||||
contentEncoding: "gzip",
|
||||
});
|
||||
|
||||
// Write with content disposition
|
||||
await s3file.write(pdfData, {
|
||||
type: "application/pdf",
|
||||
contentDisposition: 'attachment; filename="report.pdf"',
|
||||
});
|
||||
|
||||
// Write using a writer (streaming)
|
||||
const writer = s3file.writer({ type: "application/json" });
|
||||
writer.write("Hello");
|
||||
@@ -188,7 +200,13 @@ const download = s3.presign("my-file.txt"); // GET, text/plain, expires in 24 ho
|
||||
const upload = s3.presign("my-file", {
|
||||
expiresIn: 3600, // 1 hour
|
||||
method: "PUT",
|
||||
type: "application/json", // No extension for inferring, so we can specify the content type to be JSON
|
||||
type: "application/json", // Sets response-content-type in the presigned URL
|
||||
});
|
||||
|
||||
// Presign with content disposition (e.g. force download with a specific filename)
|
||||
const downloadUrl = s3.presign("report.pdf", {
|
||||
expiresIn: 3600,
|
||||
contentDisposition: 'attachment; filename="quarterly-report.pdf"',
|
||||
});
|
||||
|
||||
// You can call .presign() if on a file reference, but avoid doing so
|
||||
|
||||
@@ -460,7 +460,7 @@ console.log(result); // Blob(13) { size: 13, type: "text/plain" }
|
||||
For cross-platform compatibility, Bun Shell implements a set of builtin commands, in addition to reading commands from the PATH environment variable.
|
||||
|
||||
- `cd`: change the working directory
|
||||
- `ls`: list files in a directory
|
||||
- `ls`: list files in a directory (supports `-l` for long listing format)
|
||||
- `rm`: remove files and directories
|
||||
- `echo`: print text
|
||||
- `pwd`: print the working directory
|
||||
|
||||
@@ -880,6 +880,94 @@ npm/strip-ansi 212,992 chars long-ansi 1.36 ms/iter 1.38 ms
|
||||
|
||||
---
|
||||
|
||||
## `Bun.wrapAnsi()`
|
||||
|
||||
<Note>Drop-in replacement for `wrap-ansi` npm package</Note>
|
||||
|
||||
`Bun.wrapAnsi(input: string, columns: number, options?: WrapAnsiOptions): string`
|
||||
|
||||
Wrap text to a specified column width while preserving ANSI escape codes, hyperlinks, and handling Unicode/emoji width correctly. This is a native, high-performance alternative to the popular [`wrap-ansi`](https://www.npmjs.com/package/wrap-ansi) npm package.
|
||||
|
||||
```ts
|
||||
// Basic wrapping at 20 columns
|
||||
Bun.wrapAnsi("The quick brown fox jumps over the lazy dog", 20);
|
||||
// => "The quick brown fox\njumps over the lazy\ndog"
|
||||
|
||||
// Preserves ANSI escape codes
|
||||
Bun.wrapAnsi("\u001b[31mThe quick brown fox jumps over the lazy dog\u001b[0m", 20);
|
||||
// => "\u001b[31mThe quick brown fox\njumps over the lazy\ndog\u001b[0m"
|
||||
```
|
||||
|
||||
### Options
|
||||
|
||||
```ts
|
||||
Bun.wrapAnsi("Hello World", 5, {
|
||||
hard: true, // Break words that exceed column width (default: false)
|
||||
wordWrap: true, // Wrap at word boundaries (default: true)
|
||||
trim: true, // Trim leading/trailing whitespace per line (default: true)
|
||||
ambiguousIsNarrow: true, // Treat ambiguous-width characters as narrow (default: true)
|
||||
});
|
||||
```
|
||||
|
||||
| Option | Default | Description |
|
||||
| ------------------- | ------- | --------------------------------------------------------------------------------------------------------------- |
|
||||
| `hard` | `false` | If `true`, break words in the middle if they exceed the column width. |
|
||||
| `wordWrap` | `true` | If `true`, wrap at word boundaries. If `false`, only break at explicit newlines. |
|
||||
| `trim` | `true` | If `true`, trim leading and trailing whitespace from each line. |
|
||||
| `ambiguousIsNarrow` | `true` | If `true`, treat ambiguous-width Unicode characters as 1 column wide. If `false`, treat them as 2 columns wide. |
|
||||
|
||||
TypeScript definition:
|
||||
|
||||
```ts expandable
|
||||
namespace Bun {
|
||||
export function wrapAnsi(
|
||||
/**
|
||||
* The string to wrap
|
||||
*/
|
||||
input: string,
|
||||
/**
|
||||
* The maximum column width
|
||||
*/
|
||||
columns: number,
|
||||
/**
|
||||
* Wrapping options
|
||||
*/
|
||||
options?: {
|
||||
/**
|
||||
* If `true`, break words in the middle if they don't fit on a line.
|
||||
* If `false`, only break at word boundaries.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
hard?: boolean;
|
||||
/**
|
||||
* If `true`, wrap at word boundaries when possible.
|
||||
* If `false`, don't perform word wrapping (only wrap at explicit newlines).
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
wordWrap?: boolean;
|
||||
/**
|
||||
* If `true`, trim leading and trailing whitespace from each line.
|
||||
* If `false`, preserve whitespace.
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
trim?: boolean;
|
||||
/**
|
||||
* When it's ambiguous and `true`, count ambiguous width characters as 1 character wide.
|
||||
* If `false`, count them as 2 characters wide.
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
ambiguousIsNarrow?: boolean;
|
||||
},
|
||||
): string;
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## `serialize` & `deserialize` in `bun:jsc`
|
||||
|
||||
To save a JavaScript value into an ArrayBuffer & back, use `serialize` and `deserialize` from the `"bun:jsc"` module.
|
||||
|
||||
@@ -50,7 +50,8 @@ bun build <entry points>
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="--format" type="string" default="esm">
|
||||
Module format of the output bundle. One of <code>esm</code>, <code>cjs</code>, or <code>iife</code>
|
||||
Module format of the output bundle. One of <code>esm</code>, <code>cjs</code>, or <code>iife</code>. Defaults to{" "}
|
||||
<code>cjs</code> when <code>--bytecode</code> is used.
|
||||
</ParamField>
|
||||
|
||||
### File Naming
|
||||
|
||||
@@ -40,6 +40,18 @@ bun run <file or script>
|
||||
Run a script in all workspace packages (from the <code>workspaces</code> field in <code>package.json</code>)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="--parallel" type="boolean">
|
||||
Run multiple scripts or workspace scripts concurrently with prefixed output
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="--sequential" type="boolean">
|
||||
Run multiple scripts or workspace scripts one after another with prefixed output
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="--no-exit-on-error" type="boolean">
|
||||
When using <code>--parallel</code> or <code>--sequential</code>, continue running other scripts when one fails
|
||||
</ParamField>
|
||||
|
||||
### Runtime & Process Control
|
||||
|
||||
<ParamField path="--bun" type="boolean">
|
||||
|
||||
24
meta.json
Normal file
24
meta.json
Normal file
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"inputs": {
|
||||
"../../tmp/test-entry.js": {
|
||||
"bytes": 21,
|
||||
"imports": [
|
||||
],
|
||||
"format": "esm"
|
||||
}
|
||||
},
|
||||
"outputs": {
|
||||
"./test-entry.js": {
|
||||
"bytes": 49,
|
||||
"inputs": {
|
||||
"../../tmp/test-entry.js": {
|
||||
"bytesInOutput": 22
|
||||
}
|
||||
},
|
||||
"imports": [
|
||||
],
|
||||
"exports": [],
|
||||
"entryPoint": "../../tmp/test-entry.js"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -35,7 +35,7 @@ JSC_DEFINE_HOST_FUNCTION(jsFunctionWrite, (JSC::JSGlobalObject * globalObject,
|
||||
JSValue arg1 = callframe->argument(0);
|
||||
JSValue toWriteArg = callframe->argument(1);
|
||||
auto &vm = globalObject->vm();
|
||||
auto scope = DECLARE_CATCH_SCOPE(vm);
|
||||
auto scope = DECLARE_TOP_EXCEPTION_SCOPE(vm);
|
||||
|
||||
int32_t fd = STDOUT_FILENO;
|
||||
if (callframe->argumentCount() > 1) {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "bun",
|
||||
"version": "1.3.7",
|
||||
"version": "1.3.9",
|
||||
"workspaces": [
|
||||
"./packages/bun-types",
|
||||
"./packages/@types/bun"
|
||||
|
||||
@@ -73,9 +73,11 @@ async function buildRootModule(dryRun?: boolean) {
|
||||
});
|
||||
// Create placeholder scripts that print an error message if postinstall hasn't run.
|
||||
// On Unix, these are executed as shell scripts despite the .exe extension.
|
||||
// On Windows, npm creates .cmd wrappers that would fail anyway if the binary isn't valid.
|
||||
const placeholderScript = `#!/bin/sh
|
||||
echo "Error: Bun's postinstall script was not run." >&2
|
||||
// Do NOT add a shebang (#!/bin/sh) here — npm's cmd-shim reads shebangs to generate
|
||||
// .ps1/.cmd wrappers BEFORE postinstall runs, and bakes the interpreter path in.
|
||||
// A #!/bin/sh shebang breaks Windows because the wrappers reference /bin/sh which
|
||||
// doesn't exist, even after postinstall replaces the placeholder with the real binary.
|
||||
const placeholderScript = `echo "Error: Bun's postinstall script was not run." >&2
|
||||
echo "" >&2
|
||||
echo "This occurs when using --ignore-scripts during installation, or when using a" >&2
|
||||
echo "package manager like pnpm that does not run postinstall scripts by default." >&2
|
||||
|
||||
585
packages/bun-types/bun.d.ts
vendored
585
packages/bun-types/bun.d.ts
vendored
@@ -743,6 +743,101 @@ declare module "bun" {
|
||||
export function parse(input: string): unknown;
|
||||
}
|
||||
|
||||
/**
|
||||
* JSONL (JSON Lines) related APIs.
|
||||
*
|
||||
* Each line in the input is expected to be a valid JSON value separated by newlines.
|
||||
*/
|
||||
namespace JSONL {
|
||||
/**
|
||||
* The result of `Bun.JSONL.parseChunk`.
|
||||
*/
|
||||
interface ParseChunkResult {
|
||||
/** The successfully parsed JSON values. */
|
||||
values: unknown[];
|
||||
/** How far into the input was consumed. When the input is a string, this is a character offset. When the input is a `TypedArray`, this is a byte offset. Use `input.slice(read)` or `input.subarray(read)` to get the unconsumed remainder. */
|
||||
read: number;
|
||||
/** `true` if all input was consumed successfully. `false` if the input ends with an incomplete value or a parse error occurred. */
|
||||
done: boolean;
|
||||
/** A `SyntaxError` if a parse error occurred, otherwise `null`. Values parsed before the error are still available in `values`. */
|
||||
error: SyntaxError | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a JSONL (JSON Lines) string into an array of JavaScript values.
|
||||
*
|
||||
* If a parse error occurs and no values were successfully parsed, throws
|
||||
* a `SyntaxError`. If values were parsed before the error, returns the
|
||||
* successfully parsed values without throwing.
|
||||
*
|
||||
* Incomplete trailing values (e.g. from a partial chunk) are silently
|
||||
* ignored and not included in the result.
|
||||
*
|
||||
* When a `TypedArray` is passed, the bytes are parsed directly without
|
||||
* copying if the content is ASCII.
|
||||
*
|
||||
* @param input The JSONL string or typed array to parse
|
||||
* @returns An array of parsed values
|
||||
* @throws {SyntaxError} If the input starts with invalid JSON and no values could be parsed
|
||||
*
|
||||
* @example
|
||||
* ```js
|
||||
* const items = Bun.JSONL.parse('{"a":1}\n{"b":2}\n');
|
||||
* // [{ a: 1 }, { b: 2 }]
|
||||
*
|
||||
* // From a Uint8Array (zero-copy for ASCII):
|
||||
* const buf = new TextEncoder().encode('{"a":1}\n{"b":2}\n');
|
||||
* const items = Bun.JSONL.parse(buf);
|
||||
* // [{ a: 1 }, { b: 2 }]
|
||||
*
|
||||
* // Partial results on error after valid values:
|
||||
* const partial = Bun.JSONL.parse('{"a":1}\n{bad}\n');
|
||||
* // [{ a: 1 }]
|
||||
*
|
||||
* // Throws when no valid values precede the error:
|
||||
* Bun.JSONL.parse('{bad}\n'); // throws SyntaxError
|
||||
* ```
|
||||
*/
|
||||
export function parse(input: string | NodeJS.TypedArray | DataView<ArrayBuffer> | ArrayBufferLike): unknown[];
|
||||
|
||||
/**
|
||||
* Parse a JSONL chunk, designed for streaming use.
|
||||
*
|
||||
* Never throws on parse errors. Instead, returns whatever values were
|
||||
* successfully parsed along with an `error` property containing the
|
||||
* `SyntaxError` (or `null` on success). Use `read` to determine how
|
||||
* much input was consumed and `done` to check if all input was parsed.
|
||||
*
|
||||
* When a `TypedArray` is passed, the bytes are parsed directly without
|
||||
* copying if the content is ASCII. Optional `start` and `end` parameters
|
||||
* allow slicing without copying, and `read` will be a byte offset into
|
||||
* the original typed array.
|
||||
*
|
||||
* @param input The JSONL string or typed array to parse
|
||||
* @param start Byte offset to start parsing from (typed array only, default: 0)
|
||||
* @param end Byte offset to stop parsing at (typed array only, default: input.byteLength)
|
||||
* @returns An object with `values`, `read`, `done`, and `error` properties
|
||||
*
|
||||
* @example
|
||||
* ```js
|
||||
* let buffer = new Uint8Array(0);
|
||||
* for await (const chunk of stream) {
|
||||
* buffer = Buffer.concat([buffer, chunk]);
|
||||
* const { values, read, error } = Bun.JSONL.parseChunk(buffer);
|
||||
* if (error) throw error;
|
||||
* for (const value of values) handle(value);
|
||||
* buffer = buffer.subarray(read);
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export function parseChunk(input: string): ParseChunkResult;
|
||||
export function parseChunk(
|
||||
input: NodeJS.TypedArray | DataView<ArrayBuffer> | ArrayBufferLike,
|
||||
start?: number,
|
||||
end?: number,
|
||||
): ParseChunkResult;
|
||||
}
|
||||
|
||||
/**
|
||||
* YAML related APIs
|
||||
*/
|
||||
@@ -810,6 +905,480 @@ declare module "bun" {
|
||||
export function stringify(input: unknown, replacer?: undefined | null, space?: string | number): string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Markdown related APIs.
|
||||
*
|
||||
* Provides fast markdown parsing and rendering with three output modes:
|
||||
* - `html()` — render to an HTML string
|
||||
* - `render()` — render with custom callbacks for each element
|
||||
* - `react()` — parse to React-compatible JSX elements
|
||||
*
|
||||
* Supports GFM extensions (tables, strikethrough, task lists, autolinks) and
|
||||
* component overrides to replace default HTML tags with custom components.
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* // Render markdown to HTML
|
||||
* const html = Bun.markdown.html("# Hello **world**");
|
||||
* // "<h1>Hello <strong>world</strong></h1>\n"
|
||||
*
|
||||
* // Render with custom callbacks
|
||||
* const ansi = Bun.markdown.render("# Hello **world**", {
|
||||
* heading: (children, { level }) => `\x1b[1m${children}\x1b[0m\n`,
|
||||
* strong: (children) => `\x1b[1m${children}\x1b[22m`,
|
||||
* paragraph: (children) => children + "\n",
|
||||
* });
|
||||
*
|
||||
* // Render as a React component
|
||||
* function Markdown({ text }: { text: string }) {
|
||||
* return Bun.markdown.react(text);
|
||||
* }
|
||||
*
|
||||
* // With component overrides
|
||||
* const element = Bun.markdown.react("# Hello", { h1: MyHeadingComponent });
|
||||
* ```
|
||||
*/
|
||||
namespace markdown {
|
||||
/**
|
||||
* Options for configuring the markdown parser.
|
||||
*
|
||||
* By default, GFM extensions (tables, strikethrough, task lists) are enabled.
|
||||
*/
|
||||
interface Options {
|
||||
/** Enable GFM tables. Default: `true`. */
|
||||
tables?: boolean;
|
||||
/** Enable GFM strikethrough (`~~text~~`). Default: `true`. */
|
||||
strikethrough?: boolean;
|
||||
/** Enable GFM task lists (`- [x] item`). Default: `true`. */
|
||||
tasklists?: boolean;
|
||||
/** Treat soft line breaks as hard line breaks. Default: `false`. */
|
||||
hardSoftBreaks?: boolean;
|
||||
/** Enable wiki-style links (`[[target]]` or `[[target|label]]`). Default: `false`. */
|
||||
wikiLinks?: boolean;
|
||||
/** Enable underline syntax (`__text__` renders as `<u>` instead of `<strong>`). Default: `false`. */
|
||||
underline?: boolean;
|
||||
/** Enable LaTeX math (`$inline$` and `$$display$$`). Default: `false`. */
|
||||
latexMath?: boolean;
|
||||
/** Collapse whitespace in text content. Default: `false`. */
|
||||
collapseWhitespace?: boolean;
|
||||
/** Allow ATX headers without a space after `#`. Default: `false`. */
|
||||
permissiveAtxHeaders?: boolean;
|
||||
/** Disable indented code blocks. Default: `false`. */
|
||||
noIndentedCodeBlocks?: boolean;
|
||||
/** Disable HTML blocks. Default: `false`. */
|
||||
noHtmlBlocks?: boolean;
|
||||
/** Disable inline HTML spans. Default: `false`. */
|
||||
noHtmlSpans?: boolean;
|
||||
/**
|
||||
* Enable the GFM tag filter, which replaces `<` with `<` for disallowed
|
||||
* HTML tags (e.g. `<script>`, `<style>`, `<iframe>`). Default: `false`.
|
||||
*/
|
||||
tagFilter?: boolean;
|
||||
/**
|
||||
* Enable autolinks. Pass `true` to enable all autolink types (URL, WWW, email),
|
||||
* or an object to enable individually.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Enable all autolinks
|
||||
* { autolinks: true }
|
||||
* // Enable only URL and email autolinks
|
||||
* { autolinks: { url: true, email: true } }
|
||||
* ```
|
||||
*/
|
||||
autolinks?: boolean | { url?: boolean; www?: boolean; email?: boolean };
|
||||
/**
|
||||
* Configure heading IDs and autolink headings. Pass `true` to enable both
|
||||
* heading IDs and autolink headings, or an object to configure individually.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Enable both heading IDs and autolink headings
|
||||
* { headings: true }
|
||||
* // Enable only heading IDs
|
||||
* { headings: { ids: true } }
|
||||
* ```
|
||||
*/
|
||||
headings?: boolean | { ids?: boolean; autolink?: boolean };
|
||||
}
|
||||
|
||||
/** A component that accepts props `P`: a function, class, or HTML tag name. */
|
||||
type Component<P = {}> = string | ((props: P) => any) | (new (props: P) => any);
|
||||
|
||||
interface ChildrenProps {
|
||||
children: import("./jsx.d.ts").JSX.Element[];
|
||||
}
|
||||
interface HeadingProps extends ChildrenProps {
|
||||
/** Heading ID slug. Set when `headings: { ids: true }` is enabled. */
|
||||
id?: string;
|
||||
}
|
||||
interface OrderedListProps extends ChildrenProps {
|
||||
/** The start number. */
|
||||
start: number;
|
||||
}
|
||||
interface ListItemProps extends ChildrenProps {
|
||||
/** Task list checked state. Set for `- [x]` / `- [ ]` items. */
|
||||
checked?: boolean;
|
||||
}
|
||||
interface CodeBlockProps extends ChildrenProps {
|
||||
/** The info-string language (e.g. `"js"`). */
|
||||
language?: string;
|
||||
}
|
||||
interface CellProps extends ChildrenProps {
|
||||
/** Column alignment. */
|
||||
align?: "left" | "center" | "right";
|
||||
}
|
||||
interface LinkProps extends ChildrenProps {
|
||||
/** Link URL. */
|
||||
href: string;
|
||||
/** Link title attribute. */
|
||||
title?: string;
|
||||
}
|
||||
interface ImageProps {
|
||||
/** Image URL. */
|
||||
src: string;
|
||||
/** Alt text. */
|
||||
alt?: string;
|
||||
/** Image title attribute. */
|
||||
title?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Component overrides for `react()`.
|
||||
*
|
||||
* Replace default HTML tags with custom React components. Each override
|
||||
* receives the same props the default element would get.
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* function Code({ language, children }: { language?: string; children: React.ReactNode }) {
|
||||
* return <pre data-language={language}><code>{children}</code></pre>;
|
||||
* }
|
||||
* Bun.markdown.react(text, { pre: Code });
|
||||
* ```
|
||||
*/
|
||||
interface ComponentOverrides {
|
||||
h1?: Component<HeadingProps>;
|
||||
h2?: Component<HeadingProps>;
|
||||
h3?: Component<HeadingProps>;
|
||||
h4?: Component<HeadingProps>;
|
||||
h5?: Component<HeadingProps>;
|
||||
h6?: Component<HeadingProps>;
|
||||
p?: Component<ChildrenProps>;
|
||||
blockquote?: Component<ChildrenProps>;
|
||||
ul?: Component<ChildrenProps>;
|
||||
ol?: Component<OrderedListProps>;
|
||||
li?: Component<ListItemProps>;
|
||||
pre?: Component<CodeBlockProps>;
|
||||
hr?: Component<{}>;
|
||||
html?: Component<ChildrenProps>;
|
||||
table?: Component<ChildrenProps>;
|
||||
thead?: Component<ChildrenProps>;
|
||||
tbody?: Component<ChildrenProps>;
|
||||
tr?: Component<ChildrenProps>;
|
||||
th?: Component<CellProps>;
|
||||
td?: Component<CellProps>;
|
||||
em?: Component<ChildrenProps>;
|
||||
strong?: Component<ChildrenProps>;
|
||||
a?: Component<LinkProps>;
|
||||
img?: Component<ImageProps>;
|
||||
code?: Component<ChildrenProps>;
|
||||
del?: Component<ChildrenProps>;
|
||||
math?: Component<ChildrenProps>;
|
||||
u?: Component<ChildrenProps>;
|
||||
br?: Component<{}>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Callbacks for `render()`. Each callback receives the accumulated children
|
||||
* as a string and optional metadata, and returns a string.
|
||||
*
|
||||
* Return `null` or `undefined` to omit the element from the output.
|
||||
* If no callback is registered for an element, its children pass through unchanged.
|
||||
*/
|
||||
/** Meta passed to the `heading` callback. */
|
||||
interface HeadingMeta {
|
||||
/** Heading level (1–6). */
|
||||
level: number;
|
||||
/** Heading ID slug. Set when `headings: { ids: true }` is enabled. */
|
||||
id?: string;
|
||||
}
|
||||
|
||||
/** Meta passed to the `code` callback. */
|
||||
interface CodeBlockMeta {
|
||||
/** The info-string language (e.g. `"js"`). */
|
||||
language?: string;
|
||||
}
|
||||
|
||||
/** Meta passed to the `list` callback. */
|
||||
interface ListMeta {
|
||||
/** Whether this is an ordered list. */
|
||||
ordered: boolean;
|
||||
/** The start number for ordered lists. */
|
||||
start?: number;
|
||||
}
|
||||
|
||||
/** Meta passed to the `listItem` callback. */
|
||||
interface ListItemMeta {
|
||||
/** Task list checked state. Set for `- [x]` / `- [ ]` items. */
|
||||
checked?: boolean;
|
||||
}
|
||||
|
||||
/** Meta passed to `th` and `td` callbacks. */
|
||||
interface CellMeta {
|
||||
/** Column alignment. */
|
||||
align?: "left" | "center" | "right";
|
||||
}
|
||||
|
||||
/** Meta passed to the `link` callback. */
|
||||
interface LinkMeta {
|
||||
/** Link URL. */
|
||||
href: string;
|
||||
/** Link title attribute. */
|
||||
title?: string;
|
||||
}
|
||||
|
||||
/** Meta passed to the `image` callback. */
|
||||
interface ImageMeta {
|
||||
/** Image URL. */
|
||||
src: string;
|
||||
/** Image title attribute. */
|
||||
title?: string;
|
||||
}
|
||||
|
||||
interface RenderCallbacks {
|
||||
/** Heading (level 1–6). `id` is set when `headings: { ids: true }` is enabled. */
|
||||
heading?: (children: string, meta: HeadingMeta) => string | null | undefined;
|
||||
/** Paragraph. */
|
||||
paragraph?: (children: string) => string | null | undefined;
|
||||
/** Blockquote. */
|
||||
blockquote?: (children: string) => string | null | undefined;
|
||||
/** Code block. `meta.language` is the info-string (e.g. `"js"`). Only passed for fenced code blocks with a language. */
|
||||
code?: (children: string, meta?: CodeBlockMeta) => string | null | undefined;
|
||||
/** Ordered or unordered list. `start` is the first item number for ordered lists. */
|
||||
list?: (children: string, meta: ListMeta) => string | null | undefined;
|
||||
/** List item. `meta.checked` is set for task list items (`- [x]` / `- [ ]`). Only passed for task list items. */
|
||||
listItem?: (children: string, meta?: ListItemMeta) => string | null | undefined;
|
||||
/** Horizontal rule. */
|
||||
hr?: (children: string) => string | null | undefined;
|
||||
/** Table. */
|
||||
table?: (children: string) => string | null | undefined;
|
||||
/** Table head. */
|
||||
thead?: (children: string) => string | null | undefined;
|
||||
/** Table body. */
|
||||
tbody?: (children: string) => string | null | undefined;
|
||||
/** Table row. */
|
||||
tr?: (children: string) => string | null | undefined;
|
||||
/** Table header cell. `meta.align` is set when column alignment is specified. */
|
||||
th?: (children: string, meta?: CellMeta) => string | null | undefined;
|
||||
/** Table data cell. `meta.align` is set when column alignment is specified. */
|
||||
td?: (children: string, meta?: CellMeta) => string | null | undefined;
|
||||
/** Raw HTML content. */
|
||||
html?: (children: string) => string | null | undefined;
|
||||
/** Strong emphasis (`**text**`). */
|
||||
strong?: (children: string) => string | null | undefined;
|
||||
/** Emphasis (`*text*`). */
|
||||
emphasis?: (children: string) => string | null | undefined;
|
||||
/** Link. `href` is the URL, `title` is the optional title attribute. */
|
||||
link?: (children: string, meta: LinkMeta) => string | null | undefined;
|
||||
/** Image. `src` is the URL, `title` is the optional title attribute. */
|
||||
image?: (children: string, meta: ImageMeta) => string | null | undefined;
|
||||
/** Inline code (`` `code` ``). */
|
||||
codespan?: (children: string) => string | null | undefined;
|
||||
/** Strikethrough (`~~text~~`). */
|
||||
strikethrough?: (children: string) => string | null | undefined;
|
||||
/** Plain text content. */
|
||||
text?: (text: string) => string | null | undefined;
|
||||
}
|
||||
|
||||
/** Options for `react()` — parser options and element symbol configuration. */
|
||||
interface ReactOptions extends Options {
|
||||
/**
|
||||
* Which `$$typeof` symbol to use on the generated elements.
|
||||
* - `19` (default): `Symbol.for('react.transitional.element')`
|
||||
* - `18`: `Symbol.for('react.element')` — use this for React 18 and older
|
||||
*/
|
||||
reactVersion?: 18 | 19;
|
||||
}
|
||||
|
||||
/**
|
||||
* Render markdown to an HTML string.
|
||||
*
|
||||
* @param input The markdown string or buffer to render
|
||||
* @param options Parser options
|
||||
* @returns An HTML string
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const html = Bun.markdown.html("# Hello **world**");
|
||||
* // "<h1>Hello <strong>world</strong></h1>\n"
|
||||
*
|
||||
* // With options
|
||||
* const html = Bun.markdown.html("## Hello", { headings: { ids: true } });
|
||||
* // '<h2 id="hello">Hello</h2>\n'
|
||||
* ```
|
||||
*/
|
||||
export function html(
|
||||
input: string | NodeJS.TypedArray | DataView<ArrayBuffer> | ArrayBufferLike,
|
||||
options?: Options,
|
||||
): string;
|
||||
|
||||
/**
|
||||
* Render markdown with custom JavaScript callbacks for each element.
|
||||
*
|
||||
* Each callback receives the accumulated children as a string and optional
|
||||
* metadata, and returns a string. Return `null` or `undefined` to omit
|
||||
* an element. If no callback is registered, children pass through unchanged.
|
||||
*
|
||||
* Parser options are passed as a separate third argument.
|
||||
*
|
||||
* @param input The markdown string to render
|
||||
* @param callbacks Callbacks for each element type
|
||||
* @param options Parser options
|
||||
* @returns The accumulated string output
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Custom HTML with classes
|
||||
* const html = Bun.markdown.render("# Title\n\nHello **world**", {
|
||||
* heading: (children, { level }) => `<h${level} class="title">${children}</h${level}>`,
|
||||
* paragraph: (children) => `<p>${children}</p>`,
|
||||
* strong: (children) => `<b>${children}</b>`,
|
||||
* });
|
||||
*
|
||||
* // ANSI terminal output
|
||||
* const ansi = Bun.markdown.render("# Hello\n\n**bold**", {
|
||||
* heading: (children) => `\x1b[1;4m${children}\x1b[0m\n`,
|
||||
* paragraph: (children) => children + "\n",
|
||||
* strong: (children) => `\x1b[1m${children}\x1b[22m`,
|
||||
* });
|
||||
*
|
||||
* // With parser options as third argument
|
||||
* const text = Bun.markdown.render("Visit www.example.com", {
|
||||
* link: (children, { href }) => `[${children}](${href})`,
|
||||
* paragraph: (children) => children,
|
||||
* }, { autolinks: true });
|
||||
* ```
|
||||
*/
|
||||
export function render(
|
||||
input: string | NodeJS.TypedArray | DataView<ArrayBuffer> | ArrayBufferLike,
|
||||
callbacks?: RenderCallbacks,
|
||||
options?: Options,
|
||||
): string;
|
||||
|
||||
/**
|
||||
* Render markdown to React JSX elements.
|
||||
*
|
||||
* Returns a React Fragment containing the parsed markdown as children.
|
||||
* Can be returned directly from a component or passed to `renderToString()`.
|
||||
*
|
||||
* Override any HTML element with a custom component by passing it in the
|
||||
* second argument, keyed by tag name. Custom components receive the same props
|
||||
* the default elements would (e.g. `href` for links, `language` for code blocks).
|
||||
*
|
||||
* Parser options (including `reactVersion`) are passed as a separate third argument.
|
||||
* Uses `Symbol.for('react.transitional.element')` by default (React 19).
|
||||
* Pass `reactVersion: 18` for React 18 and older.
|
||||
*
|
||||
* @param input The markdown string or buffer to parse
|
||||
* @param components Component overrides keyed by HTML tag name
|
||||
* @param options Parser options and element symbol configuration
|
||||
* @returns A React Fragment element containing the parsed markdown
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* // Use directly as a component return value
|
||||
* function Markdown({ text }: { text: string }) {
|
||||
* return Bun.markdown.react(text);
|
||||
* }
|
||||
*
|
||||
* // Server-side rendering
|
||||
* import { renderToString } from "react-dom/server";
|
||||
* const html = renderToString(Bun.markdown.react("# Hello **world**"));
|
||||
*
|
||||
* // Custom components receive element props
|
||||
* function Code({ language, children }: { language?: string; children: React.ReactNode }) {
|
||||
* return <pre data-language={language}><code>{children}</code></pre>;
|
||||
* }
|
||||
* function Link({ href, children }: { href: string; children: React.ReactNode }) {
|
||||
* return <a href={href} target="_blank">{children}</a>;
|
||||
* }
|
||||
* const el = Bun.markdown.react(text, { pre: Code, a: Link });
|
||||
*
|
||||
* // For React 18 and older
|
||||
* const el18 = Bun.markdown.react(text, undefined, { reactVersion: 18 });
|
||||
* ```
|
||||
*/
|
||||
export function react(
|
||||
input: string | NodeJS.TypedArray | DataView<ArrayBuffer> | ArrayBufferLike,
|
||||
components?: ComponentOverrides,
|
||||
options?: ReactOptions,
|
||||
): import("./jsx.d.ts").JSX.Element;
|
||||
}
|
||||
|
||||
/**
|
||||
* JSON5 related APIs
|
||||
*/
|
||||
namespace JSON5 {
|
||||
/**
|
||||
* Parse a JSON5 string into a JavaScript value.
|
||||
*
|
||||
* JSON5 is a superset of JSON based on ECMAScript 5.1 that supports
|
||||
* comments, trailing commas, unquoted keys, single-quoted strings,
|
||||
* hex numbers, Infinity, NaN, and more.
|
||||
*
|
||||
* @category Utilities
|
||||
*
|
||||
* @param input The JSON5 string to parse
|
||||
* @returns A JavaScript value
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* import { JSON5 } from "bun";
|
||||
*
|
||||
* const result = JSON5.parse(`{
|
||||
* // This is a comment
|
||||
* name: 'my-app',
|
||||
* version: '1.0.0', // trailing comma is allowed
|
||||
* hex: 0xDEADbeef,
|
||||
* half: .5,
|
||||
* infinity: Infinity,
|
||||
* }`);
|
||||
* ```
|
||||
*/
|
||||
export function parse(input: string): unknown;
|
||||
|
||||
/**
|
||||
* Convert a JavaScript value into a JSON5 string. Object keys that are
|
||||
* valid identifiers are unquoted, strings use double quotes, `Infinity`
|
||||
* and `NaN` are represented as literals, and indented output includes
|
||||
* trailing commas.
|
||||
*
|
||||
* @category Utilities
|
||||
*
|
||||
* @param input The JavaScript value to stringify.
|
||||
* @param replacer Currently not supported.
|
||||
* @param space A number for how many spaces each level of indentation gets, or a string used as indentation.
|
||||
* The number is clamped between 0 and 10, and the first 10 characters of the string are used.
|
||||
* @returns A JSON5 string, or `undefined` if the input is `undefined`, a function, or a symbol.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* import { JSON5 } from "bun";
|
||||
*
|
||||
* console.log(JSON5.stringify({ a: 1, b: "two" }));
|
||||
* // {a:1,b:"two"}
|
||||
*
|
||||
* console.log(JSON5.stringify({ a: 1, b: 2 }, null, 2));
|
||||
* // {
|
||||
* // a: 1,
|
||||
* // b: 2,
|
||||
* // }
|
||||
* ```
|
||||
*/
|
||||
export function stringify(input: unknown, replacer?: undefined | null, space?: string | number): string | undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Synchronously resolve a `moduleId` as though it were imported from `parent`
|
||||
*
|
||||
@@ -1864,12 +2433,13 @@ declare module "bun" {
|
||||
type SIMD = "baseline" | "modern";
|
||||
type CompileTarget =
|
||||
| `bun-darwin-${Architecture}`
|
||||
| `bun-darwin-x64-${SIMD}`
|
||||
| `bun-darwin-${Architecture}-${SIMD}`
|
||||
| `bun-linux-${Architecture}`
|
||||
| `bun-linux-${Architecture}-${Libc}`
|
||||
| `bun-linux-${Architecture}-${SIMD}`
|
||||
| `bun-linux-${Architecture}-${SIMD}-${Libc}`
|
||||
| "bun-windows-x64"
|
||||
| `bun-windows-x64-${SIMD}`
|
||||
| `bun-linux-x64-${SIMD}-${Libc}`;
|
||||
| `bun-windows-x64-${SIMD}`;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -2024,7 +2594,10 @@ declare module "bun" {
|
||||
* start times, but will make the final output larger and slightly increase
|
||||
* memory usage.
|
||||
*
|
||||
* Bytecode is currently only supported for CommonJS (`format: "cjs"`).
|
||||
* - CommonJS: works with or without `compile: true`
|
||||
* - ESM: requires `compile: true`
|
||||
*
|
||||
* Without an explicit `format`, defaults to CommonJS.
|
||||
*
|
||||
* Must be `target: "bun"`
|
||||
* @default false
|
||||
@@ -5109,7 +5682,7 @@ declare module "bun" {
|
||||
*
|
||||
* This will apply to all sockets from the same {@link Listener}. it is per socket only for {@link Bun.connect}.
|
||||
*/
|
||||
reload(handler: SocketHandler): void;
|
||||
reload(options: Pick<SocketOptions<Data>, "socket">): void;
|
||||
|
||||
/**
|
||||
* Get the server that created this socket
|
||||
@@ -5452,7 +6025,7 @@ declare module "bun" {
|
||||
stop(closeActiveConnections?: boolean): void;
|
||||
ref(): void;
|
||||
unref(): void;
|
||||
reload(options: Pick<Partial<SocketOptions>, "socket">): void;
|
||||
reload(options: Pick<SocketOptions<Data>, "socket">): void;
|
||||
data: Data;
|
||||
}
|
||||
interface TCPSocketListener<Data = unknown> extends SocketListener<Data> {
|
||||
|
||||
5
packages/bun-types/extensions.d.ts
vendored
5
packages/bun-types/extensions.d.ts
vendored
@@ -23,6 +23,11 @@ declare module "*.jsonc" {
|
||||
export = contents;
|
||||
}
|
||||
|
||||
declare module "*.json5" {
|
||||
var contents: any;
|
||||
export = contents;
|
||||
}
|
||||
|
||||
declare module "*/bun.lock" {
|
||||
var contents: import("bun").BunLockFile;
|
||||
export = contents;
|
||||
|
||||
11
packages/bun-types/jsx.d.ts
vendored
Normal file
11
packages/bun-types/jsx.d.ts
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
export {};
|
||||
|
||||
type ReactElement = typeof globalThis extends { React: infer React }
|
||||
? React extends { createElement(...args: any): infer R }
|
||||
? R
|
||||
: never
|
||||
: unknown;
|
||||
|
||||
export namespace JSX {
|
||||
export type Element = ReactElement;
|
||||
}
|
||||
@@ -204,26 +204,38 @@ namespace uWS {
|
||||
}
|
||||
|
||||
// do we have data to emit all?
|
||||
if (data.length() >= chunkSize(state)) {
|
||||
unsigned int remaining = chunkSize(state);
|
||||
if (data.length() >= remaining) {
|
||||
// emit all but 2 bytes then reset state to 0 and goto beginning
|
||||
// not fin
|
||||
std::string_view emitSoon;
|
||||
bool shouldEmit = false;
|
||||
if (chunkSize(state) > 2) {
|
||||
emitSoon = std::string_view(data.data(), chunkSize(state) - 2);
|
||||
shouldEmit = true;
|
||||
// Validate the chunk terminator (\r\n) accounting for partial reads
|
||||
switch (remaining) {
|
||||
default:
|
||||
// remaining > 2: emit data and validate full terminator
|
||||
emitSoon = std::string_view(data.data(), remaining - 2);
|
||||
shouldEmit = true;
|
||||
[[fallthrough]];
|
||||
case 2:
|
||||
// remaining >= 2: validate both \r and \n
|
||||
if (data[remaining - 2] != '\r' || data[remaining - 1] != '\n') {
|
||||
state = STATE_IS_ERROR;
|
||||
return std::nullopt;
|
||||
}
|
||||
break;
|
||||
case 1:
|
||||
// remaining == 1: only \n left to validate
|
||||
if (data[0] != '\n') {
|
||||
state = STATE_IS_ERROR;
|
||||
return std::nullopt;
|
||||
}
|
||||
break;
|
||||
case 0:
|
||||
// remaining == 0: terminator already consumed
|
||||
break;
|
||||
}
|
||||
// Validate that the chunk terminator is \r\n to prevent request smuggling
|
||||
// The last 2 bytes of the chunk must be exactly \r\n
|
||||
// Note: chunkSize always includes +2 for the terminator (added in consumeHexNumber),
|
||||
// and chunks with size 0 (chunkSize == 2) are handled earlier at line 190.
|
||||
// Therefore chunkSize >= 3 here, so no underflow is possible.
|
||||
size_t terminatorOffset = chunkSize(state) - 2;
|
||||
if (data[terminatorOffset] != '\r' || data[terminatorOffset + 1] != '\n') {
|
||||
state = STATE_IS_ERROR;
|
||||
return std::nullopt;
|
||||
}
|
||||
data.remove_prefix(chunkSize(state));
|
||||
data.remove_prefix(remaining);
|
||||
state = STATE_IS_CHUNKED;
|
||||
if (shouldEmit) {
|
||||
return emitSoon;
|
||||
@@ -232,19 +244,45 @@ namespace uWS {
|
||||
} else {
|
||||
/* We will consume all our input data */
|
||||
std::string_view emitSoon;
|
||||
if (chunkSize(state) > 2) {
|
||||
uint64_t maximalAppEmit = chunkSize(state) - 2;
|
||||
if (data.length() > maximalAppEmit) {
|
||||
unsigned int size = chunkSize(state);
|
||||
size_t len = data.length();
|
||||
if (size > 2) {
|
||||
uint64_t maximalAppEmit = size - 2;
|
||||
if (len > maximalAppEmit) {
|
||||
emitSoon = data.substr(0, maximalAppEmit);
|
||||
// Validate terminator bytes being consumed
|
||||
size_t terminatorBytesConsumed = len - maximalAppEmit;
|
||||
if (terminatorBytesConsumed >= 1 && data[maximalAppEmit] != '\r') {
|
||||
state = STATE_IS_ERROR;
|
||||
return std::nullopt;
|
||||
}
|
||||
if (terminatorBytesConsumed >= 2 && data[maximalAppEmit + 1] != '\n') {
|
||||
state = STATE_IS_ERROR;
|
||||
return std::nullopt;
|
||||
}
|
||||
} else {
|
||||
//cb(data);
|
||||
emitSoon = data;
|
||||
}
|
||||
} else if (size == 2) {
|
||||
// Only terminator bytes remain, validate what we have
|
||||
if (len >= 1 && data[0] != '\r') {
|
||||
state = STATE_IS_ERROR;
|
||||
return std::nullopt;
|
||||
}
|
||||
if (len >= 2 && data[1] != '\n') {
|
||||
state = STATE_IS_ERROR;
|
||||
return std::nullopt;
|
||||
}
|
||||
} else if (size == 1) {
|
||||
// Only \n remains
|
||||
if (data[0] != '\n') {
|
||||
state = STATE_IS_ERROR;
|
||||
return std::nullopt;
|
||||
}
|
||||
}
|
||||
decChunkSize(state, (unsigned int) data.length());
|
||||
decChunkSize(state, (unsigned int) len);
|
||||
state |= STATE_IS_CHUNKED;
|
||||
// new: decrease data by its size (bug)
|
||||
data.remove_prefix(data.length()); // ny bug fix för getNextChunk
|
||||
data.remove_prefix(len);
|
||||
if (emitSoon.length()) {
|
||||
return emitSoon;
|
||||
} else {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#!/bin/sh
|
||||
# Version: 26
|
||||
# Version: 27
|
||||
|
||||
# A script that installs the dependencies needed to build and test Bun.
|
||||
# This should work on macOS and Linux with a POSIX shell.
|
||||
@@ -1061,6 +1061,11 @@ install_build_essentials() {
|
||||
go \
|
||||
xz
|
||||
install_packages apache2-utils
|
||||
# QEMU user-mode for baseline CPU verification in CI
|
||||
case "$arch" in
|
||||
x64) install_packages qemu-x86_64 ;;
|
||||
aarch64) install_packages qemu-aarch64 ;;
|
||||
esac
|
||||
;;
|
||||
esac
|
||||
|
||||
|
||||
@@ -129,6 +129,7 @@ const getCommonFlags = (config: BuildConfig) => {
|
||||
"-DBUN_FAST_TLS=ON",
|
||||
"-DPTHREAD_JIT_PERMISSIONS_API=1",
|
||||
"-DUSE_PTHREAD_JIT_PERMISSIONS_API=ON",
|
||||
"-DENABLE_REMOTE_INSPECTOR=ON",
|
||||
);
|
||||
} else if (IS_LINUX) {
|
||||
flags.push(
|
||||
@@ -172,7 +173,6 @@ const getBuildFlags = (config: BuildConfig) => {
|
||||
"-DCMAKE_BUILD_TYPE=Debug",
|
||||
"-DENABLE_BUN_SKIP_FAILING_ASSERTIONS=ON",
|
||||
"-DCMAKE_EXPORT_COMPILE_COMMANDS=ON",
|
||||
"-DENABLE_REMOTE_INSPECTOR=ON",
|
||||
"-DUSE_VISIBILITY_ATTRIBUTE=1",
|
||||
);
|
||||
|
||||
|
||||
82
scripts/update-uucode.sh
Executable file
82
scripts/update-uucode.sh
Executable file
@@ -0,0 +1,82 @@
|
||||
#!/bin/bash
|
||||
# Updates the vendored uucode library and regenerates grapheme tables.
|
||||
#
|
||||
# Usage:
|
||||
# ./scripts/update-uucode.sh # update from default URL
|
||||
# ./scripts/update-uucode.sh /path/to/uucode # update from local directory
|
||||
# ./scripts/update-uucode.sh https://url.tar.gz # update from URL
|
||||
#
|
||||
# After running, verify with:
|
||||
# bun bd test test/js/bun/util/stringWidth.test.ts
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
BUN_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
UUCODE_DIR="$BUN_ROOT/src/deps/uucode"
|
||||
ZIG="$BUN_ROOT/vendor/zig/zig"
|
||||
|
||||
if [ ! -x "$ZIG" ]; then
|
||||
echo "error: zig not found at $ZIG"
|
||||
echo " run scripts/bootstrap.sh first"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
update_from_dir() {
|
||||
local src="$1"
|
||||
echo "Updating uucode from: $src"
|
||||
rm -rf "$UUCODE_DIR"
|
||||
mkdir -p "$UUCODE_DIR"
|
||||
cp -r "$src"/* "$UUCODE_DIR/"
|
||||
}
|
||||
|
||||
update_from_url() {
|
||||
local url="$1"
|
||||
local tmp
|
||||
tmp=$(mktemp -d)
|
||||
trap "rm -rf $tmp" EXIT
|
||||
|
||||
echo "Downloading uucode from: $url"
|
||||
curl -fsSL "$url" | tar -xz -C "$tmp" --strip-components=1
|
||||
|
||||
update_from_dir "$tmp"
|
||||
}
|
||||
|
||||
# Handle source argument
|
||||
if [ $# -ge 1 ]; then
|
||||
SOURCE="$1"
|
||||
if [ -d "$SOURCE" ]; then
|
||||
update_from_dir "$SOURCE"
|
||||
elif [[ "$SOURCE" == http* ]]; then
|
||||
update_from_url "$SOURCE"
|
||||
else
|
||||
echo "error: argument must be a directory or URL"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
# Default: use the zig global cache if available
|
||||
CACHED=$(find "$HOME/.cache/zig/p" -maxdepth 1 -name "uucode-*" -type d 2>/dev/null | sort -V | tail -1)
|
||||
if [ -n "$CACHED" ]; then
|
||||
update_from_dir "$CACHED"
|
||||
else
|
||||
echo "error: no uucode source specified and none found in zig cache"
|
||||
echo ""
|
||||
echo "usage: $0 <path-to-uucode-dir-or-url>"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Regenerating grapheme tables..."
|
||||
cd "$BUN_ROOT"
|
||||
"$ZIG" build generate-grapheme-tables
|
||||
|
||||
echo ""
|
||||
echo "Done. Updated files:"
|
||||
echo " src/deps/uucode/ (vendored library)"
|
||||
echo " src/string/immutable/grapheme_tables.zig (regenerated)"
|
||||
echo ""
|
||||
echo "Next steps:"
|
||||
echo " 1. bun bd test test/js/bun/util/stringWidth.test.ts"
|
||||
echo " 2. git add src/deps/uucode src/string/immutable/grapheme_tables.zig"
|
||||
echo " 3. git commit -m 'Update uucode to <version>'"
|
||||
100
scripts/verify-baseline-cpu.sh
Executable file
100
scripts/verify-baseline-cpu.sh
Executable file
@@ -0,0 +1,100 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Verify that a Bun binary doesn't use CPU instructions beyond its baseline target.
|
||||
# Uses QEMU user-mode emulation with restricted CPU features.
|
||||
# Any illegal instruction (SIGILL) causes exit code 132 and fails the build.
|
||||
#
|
||||
# QEMU must be pre-installed in the CI image (see .buildkite/Dockerfile and
|
||||
# scripts/bootstrap.sh).
|
||||
|
||||
ARCH=""
|
||||
BINARY=""
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
--arch) ARCH="$2"; shift 2 ;;
|
||||
--binary) BINARY="$2"; shift 2 ;;
|
||||
*) echo "Unknown arg: $1"; exit 1 ;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ -z "$ARCH" ] || [ -z "$BINARY" ]; then
|
||||
echo "Usage: $0 --arch <x64|aarch64> --binary <path>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -f "$BINARY" ]; then
|
||||
echo "ERROR: Binary not found: $BINARY"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Select QEMU binary and CPU model
|
||||
HOST_ARCH=$(uname -m)
|
||||
if [ "$ARCH" = "x64" ]; then
|
||||
QEMU_BIN="qemu-x86_64"
|
||||
if [ -f "/usr/bin/qemu-x86_64-static" ]; then
|
||||
QEMU_BIN="qemu-x86_64-static"
|
||||
fi
|
||||
QEMU_CPU="Nehalem"
|
||||
CPU_DESC="Nehalem (SSE4.2, no AVX/AVX2/AVX512)"
|
||||
elif [ "$ARCH" = "aarch64" ]; then
|
||||
QEMU_BIN="qemu-aarch64"
|
||||
if [ -f "/usr/bin/qemu-aarch64-static" ]; then
|
||||
QEMU_BIN="qemu-aarch64-static"
|
||||
fi
|
||||
# cortex-a53 is ARMv8.0-A (no LSE atomics, no SVE). It's the most widely
|
||||
# supported ARMv8.0 model across QEMU versions.
|
||||
QEMU_CPU="cortex-a53"
|
||||
CPU_DESC="Cortex-A53 (ARMv8.0-A+CRC, no LSE/SVE)"
|
||||
else
|
||||
echo "ERROR: Unknown arch: $ARCH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! command -v "$QEMU_BIN" &>/dev/null; then
|
||||
echo "ERROR: $QEMU_BIN not found. It must be pre-installed in the CI image."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
BINARY_NAME=$(basename "$BINARY")
|
||||
|
||||
echo "--- Verifying $BINARY_NAME on $CPU_DESC"
|
||||
echo " Binary: $BINARY"
|
||||
echo " QEMU: $QEMU_BIN -cpu $QEMU_CPU"
|
||||
echo " Host: $HOST_ARCH"
|
||||
echo ""
|
||||
|
||||
run_test() {
|
||||
local label="$1"
|
||||
shift
|
||||
echo "+++ $BINARY_NAME: $label"
|
||||
if "$QEMU_BIN" -cpu "$QEMU_CPU" "$@"; then
|
||||
echo " PASS"
|
||||
return 0
|
||||
else
|
||||
local exit_code=$?
|
||||
echo ""
|
||||
if [ $exit_code -eq 132 ]; then
|
||||
echo " FAIL: Illegal instruction (SIGILL)"
|
||||
echo ""
|
||||
echo " The $BINARY_NAME binary uses CPU instructions not available on $QEMU_CPU."
|
||||
if [ "$ARCH" = "x64" ]; then
|
||||
echo " The baseline x64 build targets Nehalem (SSE4.2)."
|
||||
echo " AVX, AVX2, and AVX512 instructions are not allowed."
|
||||
else
|
||||
echo " The aarch64 build targets Cortex-A53 (ARMv8.0-A+CRC)."
|
||||
echo " LSE atomics, SVE, and dotprod instructions are not allowed."
|
||||
fi
|
||||
else
|
||||
echo " FAIL: exit code $exit_code"
|
||||
fi
|
||||
exit $exit_code
|
||||
fi
|
||||
}
|
||||
|
||||
run_test "bun --version" "$BINARY" --version
|
||||
run_test "bun -e eval" "$BINARY" -e "console.log(JSON.stringify({ok:1+1}))"
|
||||
|
||||
echo ""
|
||||
echo " All checks passed for $BINARY_NAME on $QEMU_CPU."
|
||||
148
scripts/verify-jit-stress-qemu.sh
Executable file
148
scripts/verify-jit-stress-qemu.sh
Executable file
@@ -0,0 +1,148 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Run JSC JIT stress tests under QEMU to verify that JIT-compiled code
|
||||
# doesn't use CPU instructions beyond the baseline target.
|
||||
#
|
||||
# This script exercises all JIT tiers (DFG, FTL, Wasm BBQ/OMG) and catches
|
||||
# cases where JIT-generated code emits AVX instructions on x64 or LSE
|
||||
# atomics on aarch64.
|
||||
#
|
||||
# See: test/js/bun/jsc-stress/ for the test fixtures.
|
||||
|
||||
ARCH=""
|
||||
BINARY=""
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
--arch) ARCH="$2"; shift 2 ;;
|
||||
--binary) BINARY="$2"; shift 2 ;;
|
||||
*) echo "Unknown arg: $1"; exit 1 ;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ -z "$ARCH" ] || [ -z "$BINARY" ]; then
|
||||
echo "Usage: $0 --arch <x64|aarch64> --binary <path>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -f "$BINARY" ]; then
|
||||
echo "ERROR: Binary not found: $BINARY"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Convert to absolute path for use after pushd
|
||||
BINARY="$(cd "$(dirname "$BINARY")" && pwd)/$(basename "$BINARY")"
|
||||
|
||||
# Select QEMU binary and CPU model
|
||||
if [ "$ARCH" = "x64" ]; then
|
||||
QEMU_BIN="qemu-x86_64"
|
||||
if [ -f "/usr/bin/qemu-x86_64-static" ]; then
|
||||
QEMU_BIN="qemu-x86_64-static"
|
||||
fi
|
||||
QEMU_CPU="Nehalem"
|
||||
CPU_DESC="Nehalem (SSE4.2, no AVX/AVX2/AVX512)"
|
||||
elif [ "$ARCH" = "aarch64" ]; then
|
||||
QEMU_BIN="qemu-aarch64"
|
||||
if [ -f "/usr/bin/qemu-aarch64-static" ]; then
|
||||
QEMU_BIN="qemu-aarch64-static"
|
||||
fi
|
||||
QEMU_CPU="cortex-a53"
|
||||
CPU_DESC="Cortex-A53 (ARMv8.0-A+CRC, no LSE/SVE)"
|
||||
else
|
||||
echo "ERROR: Unknown arch: $ARCH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! command -v "$QEMU_BIN" &>/dev/null; then
|
||||
echo "ERROR: $QEMU_BIN not found. It must be pre-installed in the CI image."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
BINARY_NAME=$(basename "$BINARY")
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
FIXTURES_DIR="$REPO_ROOT/test/js/bun/jsc-stress/fixtures"
|
||||
WASM_FIXTURES_DIR="$FIXTURES_DIR/wasm"
|
||||
PRELOAD_PATH="$REPO_ROOT/test/js/bun/jsc-stress/preload.js"
|
||||
|
||||
echo "--- Running JSC JIT stress tests on $CPU_DESC"
|
||||
echo " Binary: $BINARY"
|
||||
echo " QEMU: $QEMU_BIN -cpu $QEMU_CPU"
|
||||
echo ""
|
||||
|
||||
SIGILL_FAILURES=0
|
||||
OTHER_FAILURES=0
|
||||
PASSED=0
|
||||
|
||||
run_fixture() {
|
||||
local fixture="$1"
|
||||
local fixture_name
|
||||
fixture_name=$(basename "$fixture")
|
||||
|
||||
echo "+++ $fixture_name"
|
||||
if "$QEMU_BIN" -cpu "$QEMU_CPU" "$BINARY" --preload "$PRELOAD_PATH" "$fixture" 2>&1; then
|
||||
echo " PASS"
|
||||
((PASSED++))
|
||||
return 0
|
||||
else
|
||||
local exit_code=$?
|
||||
if [ $exit_code -eq 132 ]; then
|
||||
echo " FAIL: Illegal instruction (SIGILL)"
|
||||
echo ""
|
||||
echo " JIT-compiled code in $fixture_name uses CPU instructions not available on $QEMU_CPU."
|
||||
if [ "$ARCH" = "x64" ]; then
|
||||
echo " The baseline x64 build targets Nehalem (SSE4.2)."
|
||||
echo " JIT must not emit AVX, AVX2, or AVX512 instructions."
|
||||
else
|
||||
echo " The aarch64 build targets Cortex-A53 (ARMv8.0-A+CRC)."
|
||||
echo " JIT must not emit LSE atomics, SVE, or dotprod instructions."
|
||||
fi
|
||||
((SIGILL_FAILURES++))
|
||||
else
|
||||
# Non-SIGILL failures are warnings (test issues, not CPU instruction issues)
|
||||
echo " WARN: exit code $exit_code (not a CPU instruction issue)"
|
||||
((OTHER_FAILURES++))
|
||||
fi
|
||||
return $exit_code
|
||||
fi
|
||||
}
|
||||
|
||||
# Run JS fixtures (DFG/FTL)
|
||||
echo "--- JS fixtures (DFG/FTL)"
|
||||
for fixture in "$FIXTURES_DIR"/*.js; do
|
||||
if [ -f "$fixture" ]; then
|
||||
run_fixture "$fixture" || true
|
||||
fi
|
||||
done
|
||||
|
||||
# Run Wasm fixtures (BBQ/OMG)
|
||||
echo "--- Wasm fixtures (BBQ/OMG)"
|
||||
for fixture in "$WASM_FIXTURES_DIR"/*.js; do
|
||||
if [ -f "$fixture" ]; then
|
||||
# Wasm tests need to run from the wasm fixtures directory
|
||||
# because they reference .wasm files relative to the script
|
||||
pushd "$WASM_FIXTURES_DIR" > /dev/null
|
||||
run_fixture "$fixture" || true
|
||||
popd > /dev/null
|
||||
fi
|
||||
done
|
||||
|
||||
echo ""
|
||||
echo "--- Summary"
|
||||
echo " Passed: $PASSED"
|
||||
echo " SIGILL failures: $SIGILL_FAILURES"
|
||||
echo " Other failures: $OTHER_FAILURES (warnings, not CPU instruction issues)"
|
||||
echo ""
|
||||
|
||||
if [ $SIGILL_FAILURES -gt 0 ]; then
|
||||
echo " FAILED: JIT-generated code uses unsupported CPU instructions."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $OTHER_FAILURES -gt 0 ]; then
|
||||
echo " Some tests failed for reasons unrelated to CPU instructions."
|
||||
echo " These are warnings and do not indicate JIT instruction issues."
|
||||
fi
|
||||
|
||||
echo " All JIT stress tests passed on $QEMU_CPU (no SIGILL)."
|
||||
@@ -15,6 +15,7 @@ hash: u64 = 0,
|
||||
is_executable: bool = false,
|
||||
source_map_index: u32 = std.math.maxInt(u32),
|
||||
bytecode_index: u32 = std.math.maxInt(u32),
|
||||
module_info_index: u32 = std.math.maxInt(u32),
|
||||
output_kind: jsc.API.BuildArtifact.OutputKind,
|
||||
/// Relative
|
||||
dest_path: []const u8 = "",
|
||||
@@ -210,6 +211,7 @@ pub const Options = struct {
|
||||
hash: ?u64 = null,
|
||||
source_map_index: ?u32 = null,
|
||||
bytecode_index: ?u32 = null,
|
||||
module_info_index: ?u32 = null,
|
||||
output_path: string,
|
||||
source_index: Index.Optional = .none,
|
||||
size: ?usize = null,
|
||||
@@ -251,6 +253,7 @@ pub fn init(options: Options) OutputFile {
|
||||
.hash = options.hash orelse 0,
|
||||
.output_kind = options.output_kind,
|
||||
.bytecode_index = options.bytecode_index orelse std.math.maxInt(u32),
|
||||
.module_info_index = options.module_info_index orelse std.math.maxInt(u32),
|
||||
.source_map_index = options.source_map_index orelse std.math.maxInt(u32),
|
||||
.is_executable = options.is_executable,
|
||||
.value = switch (options.data) {
|
||||
|
||||
@@ -92,6 +92,10 @@ pub const StandaloneModuleGraph = struct {
|
||||
contents: Schema.StringPointer = .{},
|
||||
sourcemap: Schema.StringPointer = .{},
|
||||
bytecode: Schema.StringPointer = .{},
|
||||
module_info: Schema.StringPointer = .{},
|
||||
/// The file path used when generating bytecode (e.g., "B:/~BUN/root/app.js").
|
||||
/// Must match exactly at runtime for bytecode cache hits.
|
||||
bytecode_origin_path: Schema.StringPointer = .{},
|
||||
encoding: Encoding = .latin1,
|
||||
loader: bun.options.Loader = .file,
|
||||
module_format: ModuleFormat = .none,
|
||||
@@ -159,6 +163,10 @@ pub const StandaloneModuleGraph = struct {
|
||||
encoding: Encoding = .binary,
|
||||
wtf_string: bun.String = bun.String.empty,
|
||||
bytecode: []u8 = "",
|
||||
module_info: []u8 = "",
|
||||
/// The file path used when generating bytecode (e.g., "B:/~BUN/root/app.js").
|
||||
/// Must match exactly at runtime for bytecode cache hits.
|
||||
bytecode_origin_path: []const u8 = "",
|
||||
module_format: ModuleFormat = .none,
|
||||
side: FileSide = .server,
|
||||
|
||||
@@ -333,6 +341,8 @@ pub const StandaloneModuleGraph = struct {
|
||||
else
|
||||
.none,
|
||||
.bytecode = if (module.bytecode.length > 0) @constCast(sliceTo(raw_bytes, module.bytecode)) else &.{},
|
||||
.module_info = if (module.module_info.length > 0) @constCast(sliceTo(raw_bytes, module.module_info)) else &.{},
|
||||
.bytecode_origin_path = if (module.bytecode_origin_path.length > 0) sliceToZ(raw_bytes, module.bytecode_origin_path) else "",
|
||||
.module_format = module.module_format,
|
||||
.side = module.side,
|
||||
},
|
||||
@@ -382,6 +392,8 @@ pub const StandaloneModuleGraph = struct {
|
||||
} else if (output_file.output_kind == .bytecode) {
|
||||
// Allocate up to 256 byte alignment for bytecode
|
||||
string_builder.cap += (output_file.value.buffer.bytes.len + 255) / 256 * 256 + 256;
|
||||
} else if (output_file.output_kind == .module_info) {
|
||||
string_builder.cap += output_file.value.buffer.bytes.len;
|
||||
} else {
|
||||
if (entry_point_id == null) {
|
||||
if (output_file.side == null or output_file.side.? == .server) {
|
||||
@@ -477,6 +489,19 @@ pub const StandaloneModuleGraph = struct {
|
||||
}
|
||||
};
|
||||
|
||||
// Embed module_info for ESM bytecode
|
||||
const module_info: StringPointer = brk: {
|
||||
if (output_file.module_info_index != std.math.maxInt(u32)) {
|
||||
const mi_bytes = output_files[output_file.module_info_index].value.buffer.bytes;
|
||||
const offset = string_builder.len;
|
||||
const writable = string_builder.writable();
|
||||
@memcpy(writable[0..mi_bytes.len], mi_bytes[0..mi_bytes.len]);
|
||||
string_builder.len += mi_bytes.len;
|
||||
break :brk StringPointer{ .offset = @truncate(offset), .length = @truncate(mi_bytes.len) };
|
||||
}
|
||||
break :brk .{};
|
||||
};
|
||||
|
||||
if (comptime bun.Environment.is_canary or bun.Environment.isDebug) {
|
||||
if (bun.env_var.BUN_FEATURE_FLAG_DUMP_CODE.get()) |dump_code_dir| {
|
||||
const buf = bun.path_buffer_pool.get();
|
||||
@@ -498,6 +523,13 @@ pub const StandaloneModuleGraph = struct {
|
||||
}
|
||||
}
|
||||
|
||||
// When there's bytecode, store the bytecode output file's path as bytecode_origin_path.
|
||||
// This path was used to generate the bytecode cache and must match at runtime.
|
||||
const bytecode_origin_path: StringPointer = if (output_file.bytecode_index != std.math.maxInt(u32))
|
||||
string_builder.appendCountZ(output_files[output_file.bytecode_index].dest_path)
|
||||
else
|
||||
.{};
|
||||
|
||||
var module = CompiledModuleGraphFile{
|
||||
.name = string_builder.fmtAppendCountZ("{s}{s}", .{
|
||||
prefix,
|
||||
@@ -515,6 +547,8 @@ pub const StandaloneModuleGraph = struct {
|
||||
else => .none,
|
||||
} else .none,
|
||||
.bytecode = bytecode,
|
||||
.module_info = module_info,
|
||||
.bytecode_origin_path = bytecode_origin_path,
|
||||
.side = switch (output_file.side orelse .server) {
|
||||
.server => .server,
|
||||
.client => .client,
|
||||
|
||||
@@ -2,7 +2,10 @@
|
||||
|
||||
const Self = @This();
|
||||
|
||||
#heap: if (safety_checks) Owned(*DebugHeap) else *mimalloc.Heap,
|
||||
const safety_checks = bun.Environment.isDebug or bun.Environment.enable_asan;
|
||||
|
||||
#heap: *mimalloc.Heap,
|
||||
thread_id: if (safety_checks) std.Thread.Id else void,
|
||||
|
||||
/// Uses the default thread-local heap. This type is zero-sized.
|
||||
///
|
||||
@@ -20,18 +23,18 @@ pub const Default = struct {
|
||||
///
|
||||
/// This type is a `GenericAllocator`; see `src/allocators.zig`.
|
||||
pub const Borrowed = struct {
|
||||
#heap: BorrowedHeap,
|
||||
#heap: *mimalloc.Heap,
|
||||
|
||||
pub fn allocator(self: Borrowed) std.mem.Allocator {
|
||||
return .{ .ptr = self.#heap, .vtable = &c_allocator_vtable };
|
||||
return .{ .ptr = self.#heap, .vtable = c_allocator_vtable };
|
||||
}
|
||||
|
||||
pub fn getDefault() Borrowed {
|
||||
return .{ .#heap = getThreadHeap() };
|
||||
return .{ .#heap = mimalloc.mi_heap_main() };
|
||||
}
|
||||
|
||||
pub fn gc(self: Borrowed) void {
|
||||
mimalloc.mi_heap_collect(self.getMimallocHeap(), false);
|
||||
mimalloc.mi_heap_collect(self.#heap, false);
|
||||
}
|
||||
|
||||
pub fn helpCatchMemoryIssues(self: Borrowed) void {
|
||||
@@ -41,30 +44,17 @@ pub const Borrowed = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ownsPtr(self: Borrowed, ptr: *const anyopaque) bool {
|
||||
return mimalloc.mi_heap_check_owned(self.getMimallocHeap(), ptr);
|
||||
}
|
||||
|
||||
fn fromOpaque(ptr: *anyopaque) Borrowed {
|
||||
return .{ .#heap = @ptrCast(@alignCast(ptr)) };
|
||||
}
|
||||
|
||||
fn getMimallocHeap(self: Borrowed) *mimalloc.Heap {
|
||||
return if (comptime safety_checks) self.#heap.inner else self.#heap;
|
||||
}
|
||||
|
||||
fn assertThreadLock(self: Borrowed) void {
|
||||
if (comptime safety_checks) self.#heap.thread_lock.assertLocked();
|
||||
}
|
||||
|
||||
fn alignedAlloc(self: Borrowed, len: usize, alignment: Alignment) ?[*]u8 {
|
||||
log("Malloc: {d}\n", .{len});
|
||||
|
||||
const heap = self.getMimallocHeap();
|
||||
const ptr: ?*anyopaque = if (mimalloc.mustUseAlignedAlloc(alignment))
|
||||
mimalloc.mi_heap_malloc_aligned(heap, len, alignment.toByteUnits())
|
||||
mimalloc.mi_heap_malloc_aligned(self.#heap, len, alignment.toByteUnits())
|
||||
else
|
||||
mimalloc.mi_heap_malloc(heap, len);
|
||||
mimalloc.mi_heap_malloc(self.#heap, len);
|
||||
|
||||
if (comptime bun.Environment.isDebug) {
|
||||
const usable = mimalloc.mi_malloc_usable_size(ptr);
|
||||
@@ -89,42 +79,17 @@ pub const Borrowed = struct {
|
||||
}
|
||||
};
|
||||
|
||||
const BorrowedHeap = if (safety_checks) *DebugHeap else *mimalloc.Heap;
|
||||
|
||||
const DebugHeap = struct {
|
||||
inner: *mimalloc.Heap,
|
||||
thread_lock: bun.safety.ThreadLock,
|
||||
|
||||
pub const deinit = void;
|
||||
};
|
||||
|
||||
threadlocal var thread_heap: if (safety_checks) ?DebugHeap else void = if (safety_checks) null;
|
||||
|
||||
fn getThreadHeap() BorrowedHeap {
|
||||
if (comptime !safety_checks) return mimalloc.mi_heap_get_default();
|
||||
if (thread_heap == null) {
|
||||
thread_heap = .{
|
||||
.inner = mimalloc.mi_heap_get_default(),
|
||||
.thread_lock = .initLocked(),
|
||||
};
|
||||
}
|
||||
return &thread_heap.?;
|
||||
}
|
||||
|
||||
const log = bun.Output.scoped(.mimalloc, .hidden);
|
||||
|
||||
pub fn allocator(self: Self) std.mem.Allocator {
|
||||
self.assertThreadOwnership();
|
||||
return self.borrow().allocator();
|
||||
}
|
||||
|
||||
pub fn borrow(self: Self) Borrowed {
|
||||
return .{ .#heap = if (comptime safety_checks) self.#heap.get() else self.#heap };
|
||||
return .{ .#heap = self.#heap };
|
||||
}
|
||||
|
||||
/// Internally, mimalloc calls mi_heap_get_default()
|
||||
/// to get the default heap.
|
||||
/// It uses pthread_getspecific to do that.
|
||||
/// We can save those extra calls if we just do it once in here
|
||||
pub fn getThreadLocalDefault() std.mem.Allocator {
|
||||
if (bun.Environment.enable_asan) return bun.default_allocator;
|
||||
return Borrowed.getDefault().allocator();
|
||||
@@ -157,22 +122,15 @@ pub fn dumpStats(_: Self) void {
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Self) void {
|
||||
const mimalloc_heap = self.borrow().getMimallocHeap();
|
||||
if (comptime safety_checks) {
|
||||
self.#heap.deinit();
|
||||
}
|
||||
mimalloc.mi_heap_destroy(mimalloc_heap);
|
||||
mimalloc.mi_heap_destroy(self.#heap);
|
||||
self.* = undefined;
|
||||
}
|
||||
|
||||
pub fn init() Self {
|
||||
const mimalloc_heap = mimalloc.mi_heap_new() orelse bun.outOfMemory();
|
||||
if (comptime !safety_checks) return .{ .#heap = mimalloc_heap };
|
||||
const heap: Owned(*DebugHeap) = .new(.{
|
||||
.inner = mimalloc_heap,
|
||||
.thread_lock = .initLocked(),
|
||||
});
|
||||
return .{ .#heap = heap };
|
||||
return .{
|
||||
.#heap = mimalloc.mi_heap_new() orelse bun.outOfMemory(),
|
||||
.thread_id = if (safety_checks) std.Thread.getCurrentId() else {},
|
||||
};
|
||||
}
|
||||
|
||||
pub fn gc(self: Self) void {
|
||||
@@ -183,8 +141,16 @@ pub fn helpCatchMemoryIssues(self: Self) void {
|
||||
self.borrow().helpCatchMemoryIssues();
|
||||
}
|
||||
|
||||
pub fn ownsPtr(self: Self, ptr: *const anyopaque) bool {
|
||||
return self.borrow().ownsPtr(ptr);
|
||||
fn assertThreadOwnership(self: Self) void {
|
||||
if (comptime safety_checks) {
|
||||
const current_thread = std.Thread.getCurrentId();
|
||||
if (current_thread != self.thread_id) {
|
||||
std.debug.panic(
|
||||
"MimallocArena used from wrong thread: arena belongs to thread {d}, but current thread is {d}",
|
||||
.{ self.thread_id, current_thread },
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn alignedAllocSize(ptr: [*]u8) usize {
|
||||
@@ -193,13 +159,10 @@ fn alignedAllocSize(ptr: [*]u8) usize {
|
||||
|
||||
fn vtable_alloc(ptr: *anyopaque, len: usize, alignment: Alignment, _: usize) ?[*]u8 {
|
||||
const self: Borrowed = .fromOpaque(ptr);
|
||||
self.assertThreadLock();
|
||||
return self.alignedAlloc(len, alignment);
|
||||
}
|
||||
|
||||
fn vtable_resize(ptr: *anyopaque, buf: []u8, _: Alignment, new_len: usize, _: usize) bool {
|
||||
const self: Borrowed = .fromOpaque(ptr);
|
||||
self.assertThreadLock();
|
||||
fn vtable_resize(_: *anyopaque, buf: []u8, _: Alignment, new_len: usize, _: usize) bool {
|
||||
return mimalloc.mi_expand(buf.ptr, new_len) != null;
|
||||
}
|
||||
|
||||
@@ -223,39 +186,17 @@ fn vtable_free(
|
||||
}
|
||||
}
|
||||
|
||||
/// Attempt to expand or shrink memory, allowing relocation.
|
||||
///
|
||||
/// `memory.len` must equal the length requested from the most recent
|
||||
/// successful call to `alloc`, `resize`, or `remap`. `alignment` must
|
||||
/// equal the same value that was passed as the `alignment` parameter to
|
||||
/// the original `alloc` call.
|
||||
///
|
||||
/// A non-`null` return value indicates the resize was successful. The
|
||||
/// allocation may have same address, or may have been relocated. In either
|
||||
/// case, the allocation now has size of `new_len`. A `null` return value
|
||||
/// indicates that the resize would be equivalent to allocating new memory,
|
||||
/// copying the bytes from the old memory, and then freeing the old memory.
|
||||
/// In such case, it is more efficient for the caller to perform the copy.
|
||||
///
|
||||
/// `new_len` must be greater than zero.
|
||||
///
|
||||
/// `ret_addr` is optionally provided as the first return address of the
|
||||
/// allocation call stack. If the value is `0` it means no return address
|
||||
/// has been provided.
|
||||
fn vtable_remap(ptr: *anyopaque, buf: []u8, alignment: Alignment, new_len: usize, _: usize) ?[*]u8 {
|
||||
const self: Borrowed = .fromOpaque(ptr);
|
||||
self.assertThreadLock();
|
||||
const heap = self.getMimallocHeap();
|
||||
const aligned_size = alignment.toByteUnits();
|
||||
const value = mimalloc.mi_heap_realloc_aligned(heap, buf.ptr, new_len, aligned_size);
|
||||
const value = mimalloc.mi_heap_realloc_aligned(self.#heap, buf.ptr, new_len, alignment.toByteUnits());
|
||||
return @ptrCast(value);
|
||||
}
|
||||
|
||||
pub fn isInstance(alloc: std.mem.Allocator) bool {
|
||||
return alloc.vtable == &c_allocator_vtable;
|
||||
return alloc.vtable == c_allocator_vtable;
|
||||
}
|
||||
|
||||
const c_allocator_vtable = std.mem.Allocator.VTable{
|
||||
const c_allocator_vtable = &std.mem.Allocator.VTable{
|
||||
.alloc = vtable_alloc,
|
||||
.resize = vtable_resize,
|
||||
.remap = vtable_remap,
|
||||
@@ -268,5 +209,3 @@ const Alignment = std.mem.Alignment;
|
||||
const bun = @import("bun");
|
||||
const assert = bun.assert;
|
||||
const mimalloc = bun.mimalloc;
|
||||
const Owned = bun.ptr.Owned;
|
||||
const safety_checks = bun.Environment.ci_assert;
|
||||
|
||||
@@ -60,17 +60,29 @@ pub const Heap = opaque {
|
||||
return mi_heap_realloc(self, p, newsize);
|
||||
}
|
||||
|
||||
pub fn isOwned(self: *Heap, p: ?*anyopaque) bool {
|
||||
return mi_heap_check_owned(self, p);
|
||||
pub fn isOwned(self: *Heap, p: ?*const anyopaque) bool {
|
||||
return mi_heap_contains(self, p);
|
||||
}
|
||||
};
|
||||
pub extern fn mi_heap_new() ?*Heap;
|
||||
pub extern fn mi_heap_delete(heap: *Heap) void;
|
||||
pub extern fn mi_heap_destroy(heap: *Heap) void;
|
||||
pub extern fn mi_heap_set_default(heap: *Heap) *Heap;
|
||||
pub extern fn mi_heap_get_default() *Heap;
|
||||
pub extern fn mi_heap_get_backing() *Heap;
|
||||
pub extern fn mi_heap_collect(heap: *Heap, force: bool) void;
|
||||
pub extern fn mi_heap_main() *Heap;
|
||||
|
||||
// Thread-local heap (theap) API - new in mimalloc v3
|
||||
pub const THeap = opaque {};
|
||||
pub extern fn mi_theap_get_default() *THeap;
|
||||
pub extern fn mi_theap_set_default(theap: *THeap) *THeap;
|
||||
pub extern fn mi_theap_collect(theap: *THeap, force: bool) void;
|
||||
pub extern fn mi_theap_malloc(theap: *THeap, size: usize) ?*anyopaque;
|
||||
pub extern fn mi_theap_zalloc(theap: *THeap, size: usize) ?*anyopaque;
|
||||
pub extern fn mi_theap_calloc(theap: *THeap, count: usize, size: usize) ?*anyopaque;
|
||||
pub extern fn mi_theap_malloc_small(theap: *THeap, size: usize) ?*anyopaque;
|
||||
pub extern fn mi_theap_malloc_aligned(theap: *THeap, size: usize, alignment: usize) ?*anyopaque;
|
||||
pub extern fn mi_theap_realloc(theap: *THeap, p: ?*anyopaque, newsize: usize) ?*anyopaque;
|
||||
pub extern fn mi_theap_destroy(theap: *THeap) void;
|
||||
pub extern fn mi_heap_theap(heap: *Heap) *THeap;
|
||||
pub extern fn mi_heap_malloc(heap: *Heap, size: usize) ?*anyopaque;
|
||||
pub extern fn mi_heap_zalloc(heap: *Heap, size: usize) ?*anyopaque;
|
||||
pub extern fn mi_heap_calloc(heap: *Heap, count: usize, size: usize) ?*anyopaque;
|
||||
@@ -102,8 +114,7 @@ pub extern fn mi_heap_rezalloc_aligned(heap: *Heap, p: ?*anyopaque, newsize: usi
|
||||
pub extern fn mi_heap_rezalloc_aligned_at(heap: *Heap, p: ?*anyopaque, newsize: usize, alignment: usize, offset: usize) ?*anyopaque;
|
||||
pub extern fn mi_heap_recalloc_aligned(heap: *Heap, p: ?*anyopaque, newcount: usize, size: usize, alignment: usize) ?*anyopaque;
|
||||
pub extern fn mi_heap_recalloc_aligned_at(heap: *Heap, p: ?*anyopaque, newcount: usize, size: usize, alignment: usize, offset: usize) ?*anyopaque;
|
||||
pub extern fn mi_heap_contains_block(heap: *Heap, p: *const anyopaque) bool;
|
||||
pub extern fn mi_heap_check_owned(heap: *Heap, p: *const anyopaque) bool;
|
||||
pub extern fn mi_heap_contains(heap: *const Heap, p: ?*const anyopaque) bool;
|
||||
pub extern fn mi_check_owned(p: ?*const anyopaque) bool;
|
||||
pub const struct_mi_heap_area_s = extern struct {
|
||||
blocks: ?*anyopaque,
|
||||
|
||||
513
src/analyze_transpiled_module.zig
Normal file
513
src/analyze_transpiled_module.zig
Normal file
@@ -0,0 +1,513 @@
|
||||
pub const RecordKind = enum(u8) {
|
||||
/// var_name
|
||||
declared_variable,
|
||||
/// let_name
|
||||
lexical_variable,
|
||||
/// module_name, import_name, local_name
|
||||
import_info_single,
|
||||
/// module_name, import_name, local_name
|
||||
import_info_single_type_script,
|
||||
/// module_name, import_name = '*', local_name
|
||||
import_info_namespace,
|
||||
/// export_name, import_name, module_name
|
||||
export_info_indirect,
|
||||
/// export_name, local_name, padding (for local => indirect conversion)
|
||||
export_info_local,
|
||||
/// export_name, module_name
|
||||
export_info_namespace,
|
||||
/// module_name
|
||||
export_info_star,
|
||||
_,
|
||||
|
||||
pub fn len(record: RecordKind) !usize {
|
||||
return switch (record) {
|
||||
.declared_variable, .lexical_variable => 1,
|
||||
.import_info_single => 3,
|
||||
.import_info_single_type_script => 3,
|
||||
.import_info_namespace => 3,
|
||||
.export_info_indirect => 3,
|
||||
.export_info_local => 3,
|
||||
.export_info_namespace => 2,
|
||||
.export_info_star => 1,
|
||||
else => return error.InvalidRecordKind,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const Flags = packed struct(u8) {
|
||||
contains_import_meta: bool = false,
|
||||
is_typescript: bool = false,
|
||||
_padding: u6 = 0,
|
||||
};
|
||||
|
||||
pub const ModuleInfoDeserialized = struct {
|
||||
strings_buf: []const u8,
|
||||
strings_lens: []align(1) const u32,
|
||||
requested_modules_keys: []align(1) const StringID,
|
||||
requested_modules_values: []align(1) const ModuleInfo.FetchParameters,
|
||||
buffer: []align(1) const StringID,
|
||||
record_kinds: []align(1) const RecordKind,
|
||||
flags: Flags,
|
||||
owner: union(enum) {
|
||||
module_info,
|
||||
allocated_slice: struct {
|
||||
slice: []const u8,
|
||||
allocator: std.mem.Allocator,
|
||||
},
|
||||
},
|
||||
pub fn deinit(self: *ModuleInfoDeserialized) void {
|
||||
switch (self.owner) {
|
||||
.module_info => {
|
||||
const mi: *ModuleInfo = @fieldParentPtr("_deserialized", self);
|
||||
mi.destroy();
|
||||
},
|
||||
.allocated_slice => |as| {
|
||||
as.allocator.free(as.slice);
|
||||
as.allocator.destroy(self);
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
inline fn eat(rem: *[]const u8, len: usize) ![]const u8 {
|
||||
if (rem.*.len < len) return error.BadModuleInfo;
|
||||
const res = rem.*[0..len];
|
||||
rem.* = rem.*[len..];
|
||||
return res;
|
||||
}
|
||||
inline fn eatC(rem: *[]const u8, comptime len: usize) !*const [len]u8 {
|
||||
if (rem.*.len < len) return error.BadModuleInfo;
|
||||
const res = rem.*[0..len];
|
||||
rem.* = rem.*[len..];
|
||||
return res;
|
||||
}
|
||||
pub fn create(source: []const u8, gpa: std.mem.Allocator) !*ModuleInfoDeserialized {
|
||||
const duped = try gpa.dupe(u8, source);
|
||||
errdefer gpa.free(duped);
|
||||
var rem: []const u8 = duped;
|
||||
const res = try gpa.create(ModuleInfoDeserialized);
|
||||
errdefer gpa.destroy(res);
|
||||
|
||||
const record_kinds_len = std.mem.readInt(u32, try eatC(&rem, 4), .little);
|
||||
const record_kinds = std.mem.bytesAsSlice(RecordKind, try eat(&rem, record_kinds_len * @sizeOf(RecordKind)));
|
||||
_ = try eat(&rem, (4 - (record_kinds_len % 4)) % 4); // alignment padding
|
||||
|
||||
const buffer_len = std.mem.readInt(u32, try eatC(&rem, 4), .little);
|
||||
const buffer = std.mem.bytesAsSlice(StringID, try eat(&rem, buffer_len * @sizeOf(StringID)));
|
||||
|
||||
const requested_modules_len = std.mem.readInt(u32, try eatC(&rem, 4), .little);
|
||||
const requested_modules_keys = std.mem.bytesAsSlice(StringID, try eat(&rem, requested_modules_len * @sizeOf(StringID)));
|
||||
const requested_modules_values = std.mem.bytesAsSlice(ModuleInfo.FetchParameters, try eat(&rem, requested_modules_len * @sizeOf(ModuleInfo.FetchParameters)));
|
||||
|
||||
const flags: Flags = @bitCast((try eatC(&rem, 1))[0]);
|
||||
_ = try eat(&rem, 3); // alignment padding
|
||||
|
||||
const strings_len = std.mem.readInt(u32, try eatC(&rem, 4), .little);
|
||||
const strings_lens = std.mem.bytesAsSlice(u32, try eat(&rem, strings_len * @sizeOf(u32)));
|
||||
const strings_buf = rem;
|
||||
|
||||
res.* = .{
|
||||
.strings_buf = strings_buf,
|
||||
.strings_lens = strings_lens,
|
||||
.requested_modules_keys = requested_modules_keys,
|
||||
.requested_modules_values = requested_modules_values,
|
||||
.buffer = buffer,
|
||||
.record_kinds = record_kinds,
|
||||
.flags = flags,
|
||||
.owner = .{ .allocated_slice = .{
|
||||
.slice = duped,
|
||||
.allocator = gpa,
|
||||
} },
|
||||
};
|
||||
return res;
|
||||
}
|
||||
|
||||
/// Wrapper around `create` for use when loading from a cache (transpiler cache or standalone module graph).
|
||||
/// Returns `null` instead of panicking on corrupt/truncated data.
|
||||
pub fn createFromCachedRecord(source: []const u8, gpa: std.mem.Allocator) ?*ModuleInfoDeserialized {
|
||||
return create(source, gpa) catch |e| switch (e) {
|
||||
error.OutOfMemory => bun.outOfMemory(),
|
||||
error.BadModuleInfo => null,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn serialize(self: *const ModuleInfoDeserialized, writer: anytype) !void {
|
||||
try writer.writeInt(u32, @truncate(self.record_kinds.len), .little);
|
||||
try writer.writeAll(std.mem.sliceAsBytes(self.record_kinds));
|
||||
try writer.writeByteNTimes(0, (4 - (self.record_kinds.len % 4)) % 4); // alignment padding
|
||||
|
||||
try writer.writeInt(u32, @truncate(self.buffer.len), .little);
|
||||
try writer.writeAll(std.mem.sliceAsBytes(self.buffer));
|
||||
|
||||
try writer.writeInt(u32, @truncate(self.requested_modules_keys.len), .little);
|
||||
try writer.writeAll(std.mem.sliceAsBytes(self.requested_modules_keys));
|
||||
try writer.writeAll(std.mem.sliceAsBytes(self.requested_modules_values));
|
||||
|
||||
try writer.writeByte(@bitCast(self.flags));
|
||||
try writer.writeByteNTimes(0, 3); // alignment padding
|
||||
|
||||
try writer.writeInt(u32, @truncate(self.strings_lens.len), .little);
|
||||
try writer.writeAll(std.mem.sliceAsBytes(self.strings_lens));
|
||||
try writer.writeAll(self.strings_buf);
|
||||
}
|
||||
};
|
||||
|
||||
const StringMapKey = enum(u32) {
|
||||
_,
|
||||
};
|
||||
pub const StringContext = struct {
|
||||
strings_buf: []const u8,
|
||||
strings_lens: []const u32,
|
||||
|
||||
pub fn hash(_: @This(), s: []const u8) u32 {
|
||||
return @as(u32, @truncate(std.hash.Wyhash.hash(0, s)));
|
||||
}
|
||||
pub fn eql(self: @This(), fetch_key: []const u8, item_key: StringMapKey, item_i: usize) bool {
|
||||
return bun.strings.eqlLong(fetch_key, self.strings_buf[@intFromEnum(item_key)..][0..self.strings_lens[item_i]], true);
|
||||
}
|
||||
};
|
||||
|
||||
pub const ModuleInfo = struct {
|
||||
/// all strings in wtf-8. index in hashmap = StringID
|
||||
gpa: std.mem.Allocator,
|
||||
strings_map: std.ArrayHashMapUnmanaged(StringMapKey, void, void, true),
|
||||
strings_buf: std.ArrayListUnmanaged(u8),
|
||||
strings_lens: std.ArrayListUnmanaged(u32),
|
||||
requested_modules: std.AutoArrayHashMap(StringID, FetchParameters),
|
||||
buffer: std.ArrayListUnmanaged(StringID),
|
||||
record_kinds: std.ArrayListUnmanaged(RecordKind),
|
||||
flags: Flags,
|
||||
exported_names: std.AutoArrayHashMapUnmanaged(StringID, void),
|
||||
finalized: bool = false,
|
||||
|
||||
/// only initialized after .finalize() is called
|
||||
_deserialized: ModuleInfoDeserialized,
|
||||
|
||||
pub fn asDeserialized(self: *ModuleInfo) *ModuleInfoDeserialized {
|
||||
bun.assert(self.finalized);
|
||||
return &self._deserialized;
|
||||
}
|
||||
|
||||
pub const FetchParameters = enum(u32) {
|
||||
none = std.math.maxInt(u32),
|
||||
javascript = std.math.maxInt(u32) - 1,
|
||||
webassembly = std.math.maxInt(u32) - 2,
|
||||
json = std.math.maxInt(u32) - 3,
|
||||
_, // host_defined: cast to StringID
|
||||
pub fn hostDefined(value: StringID) FetchParameters {
|
||||
return @enumFromInt(@intFromEnum(value));
|
||||
}
|
||||
};
|
||||
|
||||
pub const VarKind = enum { declared, lexical };
|
||||
pub fn addVar(self: *ModuleInfo, name: StringID, kind: VarKind) !void {
|
||||
switch (kind) {
|
||||
.declared => try self.addDeclaredVariable(name),
|
||||
.lexical => try self.addLexicalVariable(name),
|
||||
}
|
||||
}
|
||||
|
||||
fn _addRecord(self: *ModuleInfo, kind: RecordKind, data: []const StringID) !void {
|
||||
bun.assert(!self.finalized);
|
||||
bun.assert(data.len == kind.len() catch unreachable);
|
||||
try self.record_kinds.append(self.gpa, kind);
|
||||
try self.buffer.appendSlice(self.gpa, data);
|
||||
}
|
||||
pub fn addDeclaredVariable(self: *ModuleInfo, id: StringID) !void {
|
||||
try self._addRecord(.declared_variable, &.{id});
|
||||
}
|
||||
pub fn addLexicalVariable(self: *ModuleInfo, id: StringID) !void {
|
||||
try self._addRecord(.lexical_variable, &.{id});
|
||||
}
|
||||
pub fn addImportInfoSingle(self: *ModuleInfo, module_name: StringID, import_name: StringID, local_name: StringID, only_used_as_type: bool) !void {
|
||||
try self._addRecord(if (only_used_as_type) .import_info_single_type_script else .import_info_single, &.{ module_name, import_name, local_name });
|
||||
}
|
||||
pub fn addImportInfoNamespace(self: *ModuleInfo, module_name: StringID, local_name: StringID) !void {
|
||||
try self._addRecord(.import_info_namespace, &.{ module_name, try self.str("*"), local_name });
|
||||
}
|
||||
pub fn addExportInfoIndirect(self: *ModuleInfo, export_name: StringID, import_name: StringID, module_name: StringID) !void {
|
||||
if (try self._hasOrAddExportedName(export_name)) return; // a syntax error will be emitted later in this case
|
||||
try self._addRecord(.export_info_indirect, &.{ export_name, import_name, module_name });
|
||||
}
|
||||
pub fn addExportInfoLocal(self: *ModuleInfo, export_name: StringID, local_name: StringID) !void {
|
||||
if (try self._hasOrAddExportedName(export_name)) return; // a syntax error will be emitted later in this case
|
||||
try self._addRecord(.export_info_local, &.{ export_name, local_name, @enumFromInt(std.math.maxInt(u32)) });
|
||||
}
|
||||
pub fn addExportInfoNamespace(self: *ModuleInfo, export_name: StringID, module_name: StringID) !void {
|
||||
if (try self._hasOrAddExportedName(export_name)) return; // a syntax error will be emitted later in this case
|
||||
try self._addRecord(.export_info_namespace, &.{ export_name, module_name });
|
||||
}
|
||||
pub fn addExportInfoStar(self: *ModuleInfo, module_name: StringID) !void {
|
||||
try self._addRecord(.export_info_star, &.{module_name});
|
||||
}
|
||||
|
||||
pub fn _hasOrAddExportedName(self: *ModuleInfo, name: StringID) !bool {
|
||||
if (try self.exported_names.fetchPut(self.gpa, name, {}) != null) return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
pub fn create(gpa: std.mem.Allocator, is_typescript: bool) !*ModuleInfo {
|
||||
const res = try gpa.create(ModuleInfo);
|
||||
res.* = ModuleInfo.init(gpa, is_typescript);
|
||||
return res;
|
||||
}
|
||||
fn init(allocator: std.mem.Allocator, is_typescript: bool) ModuleInfo {
|
||||
return .{
|
||||
.gpa = allocator,
|
||||
.strings_map = .{},
|
||||
.strings_buf = .{},
|
||||
.strings_lens = .{},
|
||||
.exported_names = .{},
|
||||
.requested_modules = std.AutoArrayHashMap(StringID, FetchParameters).init(allocator),
|
||||
.buffer = .empty,
|
||||
.record_kinds = .empty,
|
||||
.flags = .{ .contains_import_meta = false, .is_typescript = is_typescript },
|
||||
._deserialized = undefined,
|
||||
};
|
||||
}
|
||||
fn deinit(self: *ModuleInfo) void {
|
||||
self.strings_map.deinit(self.gpa);
|
||||
self.strings_buf.deinit(self.gpa);
|
||||
self.strings_lens.deinit(self.gpa);
|
||||
self.exported_names.deinit(self.gpa);
|
||||
self.requested_modules.deinit();
|
||||
self.buffer.deinit(self.gpa);
|
||||
self.record_kinds.deinit(self.gpa);
|
||||
}
|
||||
pub fn destroy(self: *ModuleInfo) void {
|
||||
const alloc = self.gpa;
|
||||
self.deinit();
|
||||
alloc.destroy(self);
|
||||
}
|
||||
pub fn str(self: *ModuleInfo, value: []const u8) !StringID {
|
||||
try self.strings_buf.ensureUnusedCapacity(self.gpa, value.len);
|
||||
try self.strings_lens.ensureUnusedCapacity(self.gpa, 1);
|
||||
const gpres = try self.strings_map.getOrPutAdapted(self.gpa, value, StringContext{
|
||||
.strings_buf = self.strings_buf.items,
|
||||
.strings_lens = self.strings_lens.items,
|
||||
});
|
||||
if (gpres.found_existing) return @enumFromInt(@as(u32, @intCast(gpres.index)));
|
||||
|
||||
gpres.key_ptr.* = @enumFromInt(@as(u32, @truncate(self.strings_buf.items.len)));
|
||||
gpres.value_ptr.* = {};
|
||||
self.strings_buf.appendSliceAssumeCapacity(value);
|
||||
self.strings_lens.appendAssumeCapacity(@as(u32, @truncate(value.len)));
|
||||
return @enumFromInt(@as(u32, @intCast(gpres.index)));
|
||||
}
|
||||
pub fn requestModule(self: *ModuleInfo, import_record_path: StringID, fetch_parameters: FetchParameters) !void {
|
||||
// jsc only records the attributes of the first import with the given import_record_path. so only put if not exists.
|
||||
const gpres = try self.requested_modules.getOrPut(import_record_path);
|
||||
if (!gpres.found_existing) gpres.value_ptr.* = fetch_parameters;
|
||||
}
|
||||
|
||||
/// Replace all occurrences of old_id with new_id in records and requested_modules.
|
||||
/// Used to fix up cross-chunk import specifiers after final paths are computed.
|
||||
pub fn replaceStringID(self: *ModuleInfo, old_id: StringID, new_id: StringID) void {
|
||||
bun.assert(!self.finalized);
|
||||
// Replace in record buffer
|
||||
for (self.buffer.items) |*item| {
|
||||
if (item.* == old_id) item.* = new_id;
|
||||
}
|
||||
// Replace in requested_modules keys (preserving insertion order)
|
||||
if (self.requested_modules.getIndex(old_id)) |idx| {
|
||||
self.requested_modules.keys()[idx] = new_id;
|
||||
self.requested_modules.reIndex() catch {};
|
||||
}
|
||||
}
|
||||
|
||||
/// find any exports marked as 'local' that are actually 'indirect' and fix them
|
||||
pub fn finalize(self: *ModuleInfo) !void {
|
||||
bun.assert(!self.finalized);
|
||||
var local_name_to_module_name = std.AutoArrayHashMap(StringID, struct { module_name: StringID, import_name: StringID, record_kinds_idx: usize }).init(bun.default_allocator);
|
||||
defer local_name_to_module_name.deinit();
|
||||
{
|
||||
var i: usize = 0;
|
||||
for (self.record_kinds.items, 0..) |k, idx| {
|
||||
if (k == .import_info_single or k == .import_info_single_type_script) {
|
||||
try local_name_to_module_name.put(self.buffer.items[i + 2], .{ .module_name = self.buffer.items[i], .import_name = self.buffer.items[i + 1], .record_kinds_idx = idx });
|
||||
}
|
||||
i += k.len() catch unreachable;
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
var i: usize = 0;
|
||||
for (self.record_kinds.items) |*k| {
|
||||
if (k.* == .export_info_local) {
|
||||
if (local_name_to_module_name.get(self.buffer.items[i + 1])) |ip| {
|
||||
k.* = .export_info_indirect;
|
||||
self.buffer.items[i + 1] = ip.import_name;
|
||||
self.buffer.items[i + 2] = ip.module_name;
|
||||
// In TypeScript, the re-exported import may target a type-only
|
||||
// export that was elided. Convert the import to SingleTypeScript
|
||||
// so JSC tolerates it being NotFound during linking.
|
||||
if (self.flags.is_typescript) {
|
||||
self.record_kinds.items[ip.record_kinds_idx] = .import_info_single_type_script;
|
||||
}
|
||||
}
|
||||
}
|
||||
i += k.len() catch unreachable;
|
||||
}
|
||||
}
|
||||
|
||||
self._deserialized = .{
|
||||
.strings_buf = self.strings_buf.items,
|
||||
.strings_lens = self.strings_lens.items,
|
||||
.requested_modules_keys = self.requested_modules.keys(),
|
||||
.requested_modules_values = self.requested_modules.values(),
|
||||
.buffer = self.buffer.items,
|
||||
.record_kinds = self.record_kinds.items,
|
||||
.flags = self.flags,
|
||||
.owner = .module_info,
|
||||
};
|
||||
|
||||
self.finalized = true;
|
||||
}
|
||||
};
|
||||
pub const StringID = enum(u32) {
|
||||
star_default = std.math.maxInt(u32),
|
||||
star_namespace = std.math.maxInt(u32) - 1,
|
||||
_,
|
||||
};
|
||||
|
||||
export fn zig__renderDiff(expected_ptr: [*:0]const u8, expected_len: usize, received_ptr: [*:0]const u8, received_len: usize, globalThis: *bun.jsc.JSGlobalObject) void {
|
||||
const formatter = DiffFormatter{
|
||||
.received_string = received_ptr[0..received_len],
|
||||
.expected_string = expected_ptr[0..expected_len],
|
||||
.globalThis = globalThis,
|
||||
};
|
||||
bun.Output.errorWriter().print("DIFF:\n{any}\n", .{formatter}) catch {};
|
||||
}
|
||||
|
||||
export fn zig__ModuleInfoDeserialized__toJSModuleRecord(
|
||||
globalObject: *bun.jsc.JSGlobalObject,
|
||||
vm: *bun.jsc.VM,
|
||||
module_key: *const IdentifierArray,
|
||||
source_code: *const SourceCode,
|
||||
declared_variables: *VariableEnvironment,
|
||||
lexical_variables: *VariableEnvironment,
|
||||
res: *ModuleInfoDeserialized,
|
||||
) ?*JSModuleRecord {
|
||||
defer res.deinit();
|
||||
|
||||
var identifiers = IdentifierArray.create(res.strings_lens.len);
|
||||
defer identifiers.destroy();
|
||||
var offset: usize = 0;
|
||||
for (0.., res.strings_lens) |index, len| {
|
||||
if (res.strings_buf.len < offset + len) return null; // error!
|
||||
const sub = res.strings_buf[offset..][0..len];
|
||||
identifiers.setFromUtf8(index, vm, sub);
|
||||
offset += len;
|
||||
}
|
||||
|
||||
{
|
||||
var i: usize = 0;
|
||||
for (res.record_kinds) |k| {
|
||||
if (i + (k.len() catch 0) > res.buffer.len) return null;
|
||||
switch (k) {
|
||||
.declared_variable => declared_variables.add(vm, identifiers, res.buffer[i]),
|
||||
.lexical_variable => lexical_variables.add(vm, identifiers, res.buffer[i]),
|
||||
.import_info_single, .import_info_single_type_script, .import_info_namespace, .export_info_indirect, .export_info_local, .export_info_namespace, .export_info_star => {},
|
||||
else => return null,
|
||||
}
|
||||
i += k.len() catch unreachable; // handled above
|
||||
}
|
||||
}
|
||||
|
||||
const module_record = JSModuleRecord.create(globalObject, vm, module_key, source_code, declared_variables, lexical_variables, res.flags.contains_import_meta, res.flags.is_typescript);
|
||||
|
||||
for (res.requested_modules_keys, res.requested_modules_values) |reqk, reqv| {
|
||||
switch (reqv) {
|
||||
.none => module_record.addRequestedModuleNullAttributesPtr(identifiers, reqk),
|
||||
.javascript => module_record.addRequestedModuleJavaScript(identifiers, reqk),
|
||||
.webassembly => module_record.addRequestedModuleWebAssembly(identifiers, reqk),
|
||||
.json => module_record.addRequestedModuleJSON(identifiers, reqk),
|
||||
else => |uv| module_record.addRequestedModuleHostDefined(identifiers, reqk, @enumFromInt(@intFromEnum(uv))),
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
var i: usize = 0;
|
||||
for (res.record_kinds) |k| {
|
||||
if (i + (k.len() catch unreachable) > res.buffer.len) unreachable; // handled above
|
||||
switch (k) {
|
||||
.declared_variable, .lexical_variable => {},
|
||||
.import_info_single => module_record.addImportEntrySingle(identifiers, res.buffer[i + 1], res.buffer[i + 2], res.buffer[i]),
|
||||
.import_info_single_type_script => module_record.addImportEntrySingleTypeScript(identifiers, res.buffer[i + 1], res.buffer[i + 2], res.buffer[i]),
|
||||
.import_info_namespace => module_record.addImportEntryNamespace(identifiers, res.buffer[i + 1], res.buffer[i + 2], res.buffer[i]),
|
||||
.export_info_indirect => module_record.addIndirectExport(identifiers, res.buffer[i + 0], res.buffer[i + 1], res.buffer[i + 2]),
|
||||
.export_info_local => module_record.addLocalExport(identifiers, res.buffer[i], res.buffer[i + 1]),
|
||||
.export_info_namespace => module_record.addNamespaceExport(identifiers, res.buffer[i], res.buffer[i + 1]),
|
||||
.export_info_star => module_record.addStarExport(identifiers, res.buffer[i]),
|
||||
else => unreachable, // handled above
|
||||
}
|
||||
i += k.len() catch unreachable; // handled above
|
||||
}
|
||||
}
|
||||
|
||||
return module_record;
|
||||
}
|
||||
export fn zig__ModuleInfo__destroy(info: *ModuleInfo) void {
|
||||
info.destroy();
|
||||
}
|
||||
|
||||
const VariableEnvironment = opaque {
|
||||
extern fn JSC__VariableEnvironment__add(environment: *VariableEnvironment, vm: *bun.jsc.VM, identifier_array: *IdentifierArray, identifier_index: StringID) void;
|
||||
pub const add = JSC__VariableEnvironment__add;
|
||||
};
|
||||
const IdentifierArray = opaque {
|
||||
extern fn JSC__IdentifierArray__create(len: usize) *IdentifierArray;
|
||||
pub const create = JSC__IdentifierArray__create;
|
||||
|
||||
extern fn JSC__IdentifierArray__destroy(identifier_array: *IdentifierArray) void;
|
||||
pub const destroy = JSC__IdentifierArray__destroy;
|
||||
|
||||
extern fn JSC__IdentifierArray__setFromUtf8(identifier_array: *IdentifierArray, n: usize, vm: *bun.jsc.VM, str: [*]const u8, len: usize) void;
|
||||
pub fn setFromUtf8(self: *IdentifierArray, n: usize, vm: *bun.jsc.VM, str: []const u8) void {
|
||||
JSC__IdentifierArray__setFromUtf8(self, n, vm, str.ptr, str.len);
|
||||
}
|
||||
};
|
||||
const SourceCode = opaque {};
|
||||
const JSModuleRecord = opaque {
|
||||
extern fn JSC_JSModuleRecord__create(global_object: *bun.jsc.JSGlobalObject, vm: *bun.jsc.VM, module_key: *const IdentifierArray, source_code: *const SourceCode, declared_variables: *VariableEnvironment, lexical_variables: *VariableEnvironment, has_import_meta: bool, is_typescript: bool) *JSModuleRecord;
|
||||
pub const create = JSC_JSModuleRecord__create;
|
||||
|
||||
extern fn JSC_JSModuleRecord__declaredVariables(module_record: *JSModuleRecord) *VariableEnvironment;
|
||||
pub const declaredVariables = JSC_JSModuleRecord__declaredVariables;
|
||||
extern fn JSC_JSModuleRecord__lexicalVariables(module_record: *JSModuleRecord) *VariableEnvironment;
|
||||
pub const lexicalVariables = JSC_JSModuleRecord__lexicalVariables;
|
||||
|
||||
extern fn JSC_JSModuleRecord__addIndirectExport(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, export_name: StringID, import_name: StringID, module_name: StringID) void;
|
||||
pub const addIndirectExport = JSC_JSModuleRecord__addIndirectExport;
|
||||
extern fn JSC_JSModuleRecord__addLocalExport(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, export_name: StringID, local_name: StringID) void;
|
||||
pub const addLocalExport = JSC_JSModuleRecord__addLocalExport;
|
||||
extern fn JSC_JSModuleRecord__addNamespaceExport(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, export_name: StringID, module_name: StringID) void;
|
||||
pub const addNamespaceExport = JSC_JSModuleRecord__addNamespaceExport;
|
||||
extern fn JSC_JSModuleRecord__addStarExport(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, module_name: StringID) void;
|
||||
pub const addStarExport = JSC_JSModuleRecord__addStarExport;
|
||||
|
||||
extern fn JSC_JSModuleRecord__addRequestedModuleNullAttributesPtr(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, module_name: StringID) void;
|
||||
pub const addRequestedModuleNullAttributesPtr = JSC_JSModuleRecord__addRequestedModuleNullAttributesPtr;
|
||||
extern fn JSC_JSModuleRecord__addRequestedModuleJavaScript(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, module_name: StringID) void;
|
||||
pub const addRequestedModuleJavaScript = JSC_JSModuleRecord__addRequestedModuleJavaScript;
|
||||
extern fn JSC_JSModuleRecord__addRequestedModuleWebAssembly(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, module_name: StringID) void;
|
||||
pub const addRequestedModuleWebAssembly = JSC_JSModuleRecord__addRequestedModuleWebAssembly;
|
||||
extern fn JSC_JSModuleRecord__addRequestedModuleJSON(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, module_name: StringID) void;
|
||||
pub const addRequestedModuleJSON = JSC_JSModuleRecord__addRequestedModuleJSON;
|
||||
extern fn JSC_JSModuleRecord__addRequestedModuleHostDefined(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, module_name: StringID, host_defined_import_type: StringID) void;
|
||||
pub const addRequestedModuleHostDefined = JSC_JSModuleRecord__addRequestedModuleHostDefined;
|
||||
|
||||
extern fn JSC_JSModuleRecord__addImportEntrySingle(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, import_name: StringID, local_name: StringID, module_name: StringID) void;
|
||||
pub const addImportEntrySingle = JSC_JSModuleRecord__addImportEntrySingle;
|
||||
extern fn JSC_JSModuleRecord__addImportEntrySingleTypeScript(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, import_name: StringID, local_name: StringID, module_name: StringID) void;
|
||||
pub const addImportEntrySingleTypeScript = JSC_JSModuleRecord__addImportEntrySingleTypeScript;
|
||||
extern fn JSC_JSModuleRecord__addImportEntryNamespace(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, import_name: StringID, local_name: StringID, module_name: StringID) void;
|
||||
pub const addImportEntryNamespace = JSC_JSModuleRecord__addImportEntryNamespace;
|
||||
};
|
||||
|
||||
export fn zig_log(msg: [*:0]const u8) void {
|
||||
bun.Output.errorWriter().print("{s}\n", .{std.mem.span(msg)}) catch {};
|
||||
}
|
||||
|
||||
const bun = @import("bun");
|
||||
const std = @import("std");
|
||||
const DiffFormatter = @import("./bun.js/test/diff_format.zig").DiffFormatter;
|
||||
@@ -343,6 +343,8 @@ pub const api = struct {
|
||||
sqlite_embedded = 17,
|
||||
html = 18,
|
||||
yaml = 19,
|
||||
json5 = 20,
|
||||
md = 21,
|
||||
_,
|
||||
|
||||
pub fn jsonStringify(self: @This(), writer: anytype) !void {
|
||||
|
||||
@@ -68,6 +68,7 @@ ts_enums: TsEnumsMap = .{},
|
||||
/// This is a list of named exports that may exist in a CommonJS module
|
||||
/// We use this with `commonjs_at_runtime` to re-export CommonJS
|
||||
has_commonjs_export_names: bool = false,
|
||||
has_import_meta: bool = false,
|
||||
import_meta_ref: Ref = Ref.None,
|
||||
|
||||
pub const CommonJSNamedExport = struct {
|
||||
|
||||
@@ -52,7 +52,7 @@ ts_enums: Ast.TsEnumsMap = .{},
|
||||
|
||||
flags: BundledAst.Flags = .{},
|
||||
|
||||
pub const Flags = packed struct(u8) {
|
||||
pub const Flags = packed struct(u16) {
|
||||
// This is a list of CommonJS features. When a file uses CommonJS features,
|
||||
// it's not a candidate for "flat bundling" and must be wrapped in its own
|
||||
// closure.
|
||||
@@ -65,6 +65,8 @@ pub const Flags = packed struct(u8) {
|
||||
has_lazy_export: bool = false,
|
||||
commonjs_module_exports_assigned_deoptimized: bool = false,
|
||||
has_explicit_use_strict_directive: bool = false,
|
||||
has_import_meta: bool = false,
|
||||
_padding: u7 = 0,
|
||||
};
|
||||
|
||||
pub const empty = BundledAst.init(Ast.empty);
|
||||
@@ -116,6 +118,7 @@ pub fn toAST(this: *const BundledAst) Ast {
|
||||
.has_lazy_export = this.flags.has_lazy_export,
|
||||
.commonjs_module_exports_assigned_deoptimized = this.flags.commonjs_module_exports_assigned_deoptimized,
|
||||
.directive = if (this.flags.has_explicit_use_strict_directive) "use strict" else null,
|
||||
.has_import_meta = this.flags.has_import_meta,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -168,6 +171,7 @@ pub fn init(ast: Ast) BundledAst {
|
||||
.has_lazy_export = ast.has_lazy_export,
|
||||
.commonjs_module_exports_assigned_deoptimized = ast.commonjs_module_exports_assigned_deoptimized,
|
||||
.has_explicit_use_strict_directive = strings.eqlComptime(ast.directive orelse "", "use strict"),
|
||||
.has_import_meta = ast.has_import_meta,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -320,9 +320,8 @@ pub const Runner = struct {
|
||||
.Null => return Expr.init(E.Null, E.Null{}, this.caller.loc),
|
||||
.Private => {
|
||||
this.is_top_level = false;
|
||||
const _entry = this.visited.getOrPut(this.allocator, value) catch unreachable;
|
||||
if (_entry.found_existing) {
|
||||
return _entry.value_ptr.*;
|
||||
if (this.visited.get(value)) |cached| {
|
||||
return cached;
|
||||
}
|
||||
|
||||
var blob_: ?*const jsc.WebCore.Blob = null;
|
||||
@@ -470,9 +469,8 @@ pub const Runner = struct {
|
||||
return Expr.init(E.String, E.String.init(out_slice), this.caller.loc);
|
||||
},
|
||||
.Promise => {
|
||||
const _entry = this.visited.getOrPut(this.allocator, value) catch unreachable;
|
||||
if (_entry.found_existing) {
|
||||
return _entry.value_ptr.*;
|
||||
if (this.visited.get(value)) |cached| {
|
||||
return cached;
|
||||
}
|
||||
|
||||
const promise = value.asAnyPromise() orelse @panic("Unexpected promise type");
|
||||
@@ -494,7 +492,7 @@ pub const Runner = struct {
|
||||
this.is_top_level = false;
|
||||
const result = try this.run(promise_result);
|
||||
|
||||
_entry.value_ptr.* = result;
|
||||
this.visited.put(this.allocator, value, result) catch unreachable;
|
||||
return result;
|
||||
},
|
||||
else => {},
|
||||
|
||||
@@ -6591,6 +6591,7 @@ pub fn NewParser_(
|
||||
.top_level_await_keyword = p.top_level_await_keyword,
|
||||
.commonjs_named_exports = p.commonjs_named_exports,
|
||||
.has_commonjs_export_names = p.has_commonjs_export_names,
|
||||
.has_import_meta = p.has_import_meta,
|
||||
|
||||
.hashbang = hashbang,
|
||||
// TODO: cross-module constant inlining
|
||||
|
||||
@@ -48,6 +48,7 @@ pub fn trackResolutionFailure(store: *DirectoryWatchStore, import_source: []cons
|
||||
.jsonc,
|
||||
.toml,
|
||||
.yaml,
|
||||
.json5,
|
||||
.wasm,
|
||||
.napi,
|
||||
.base64,
|
||||
@@ -56,6 +57,7 @@ pub fn trackResolutionFailure(store: *DirectoryWatchStore, import_source: []cons
|
||||
.bunsh,
|
||||
.sqlite,
|
||||
.sqlite_embedded,
|
||||
.md,
|
||||
=> bun.debugAssert(false),
|
||||
}
|
||||
|
||||
|
||||
@@ -433,6 +433,8 @@ pub fn buildWithVm(ctx: bun.cli.Command.Context, cwd: []const u8, vm: *VirtualMa
|
||||
.asset => {},
|
||||
.bytecode => {},
|
||||
.sourcemap => {},
|
||||
.module_info => {},
|
||||
.@"metafile-json", .@"metafile-markdown" => {},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@@ -277,19 +277,33 @@ pub const Run = struct {
|
||||
vm.onUnhandledRejection = &onUnhandledRejectionBeforeClose;
|
||||
|
||||
// Start CPU profiler if enabled
|
||||
if (this.ctx.runtime_options.cpu_prof.enabled or bun.env_var.BUN_CPU_PROFILE.get()) {
|
||||
if (this.ctx.runtime_options.cpu_prof.enabled) {
|
||||
const cpu_prof_opts = this.ctx.runtime_options.cpu_prof;
|
||||
|
||||
vm.cpu_profiler_config = CPUProfiler.CPUProfilerConfig{
|
||||
.name = cpu_prof_opts.name orelse bun.env_var.BUN_CPU_PROFILE_NAME.get() orelse "",
|
||||
.dir = cpu_prof_opts.dir orelse bun.env_var.BUN_CPU_PROFILE_DIR.get() orelse "",
|
||||
.name = cpu_prof_opts.name,
|
||||
.dir = cpu_prof_opts.dir,
|
||||
.md_format = cpu_prof_opts.md_format,
|
||||
.json_format = cpu_prof_opts.json_format,
|
||||
.interval = cpu_prof_opts.interval,
|
||||
};
|
||||
CPUProfiler.setSamplingInterval(cpu_prof_opts.interval);
|
||||
CPUProfiler.startCPUProfiler(vm.jsc_vm);
|
||||
bun.analytics.Features.cpu_profile += 1;
|
||||
}
|
||||
|
||||
// Set up heap profiler config if enabled (actual profiling happens on exit)
|
||||
if (this.ctx.runtime_options.heap_prof.enabled) {
|
||||
const heap_prof_opts = this.ctx.runtime_options.heap_prof;
|
||||
|
||||
vm.heap_profiler_config = HeapProfiler.HeapProfilerConfig{
|
||||
.name = heap_prof_opts.name,
|
||||
.dir = heap_prof_opts.dir,
|
||||
.text_format = heap_prof_opts.text_format,
|
||||
};
|
||||
bun.analytics.Features.heap_snapshot += 1;
|
||||
}
|
||||
|
||||
this.addConditionalGlobals();
|
||||
do_redis_preconnect: {
|
||||
// This must happen within the API lock, which is why it's not in the "doPreconnect" function
|
||||
@@ -551,6 +565,7 @@ const VirtualMachine = jsc.VirtualMachine;
|
||||
const string = []const u8;
|
||||
|
||||
const CPUProfiler = @import("./bun.js/bindings/BunCPUProfiler.zig");
|
||||
const HeapProfiler = @import("./bun.js/bindings/BunHeapProfiler.zig");
|
||||
const options = @import("./options.zig");
|
||||
const std = @import("std");
|
||||
const Command = @import("./cli.zig").Command;
|
||||
|
||||
@@ -417,7 +417,7 @@ pub const AsyncModule = struct {
|
||||
jsc.markBinding(@src());
|
||||
var specifier = specifier_;
|
||||
var referrer = referrer_;
|
||||
var scope: jsc.CatchScope = undefined;
|
||||
var scope: jsc.TopExceptionScope = undefined;
|
||||
scope.init(globalThis, @src());
|
||||
defer {
|
||||
specifier.deref();
|
||||
@@ -694,6 +694,7 @@ pub const AsyncModule = struct {
|
||||
&printer,
|
||||
.esm_ascii,
|
||||
mapper.get(),
|
||||
null,
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -100,7 +100,7 @@ pub fn transpileSourceCode(
|
||||
const disable_transpilying = comptime flags.disableTranspiling();
|
||||
|
||||
if (comptime disable_transpilying) {
|
||||
if (!(loader.isJavaScriptLike() or loader == .toml or loader == .yaml or loader == .text or loader == .json or loader == .jsonc)) {
|
||||
if (!(loader.isJavaScriptLike() or loader == .toml or loader == .yaml or loader == .json5 or loader == .text or loader == .json or loader == .jsonc)) {
|
||||
// Don't print "export default <file path>"
|
||||
return ResolvedSource{
|
||||
.allocator = null,
|
||||
@@ -112,7 +112,7 @@ pub fn transpileSourceCode(
|
||||
}
|
||||
|
||||
switch (loader) {
|
||||
.js, .jsx, .ts, .tsx, .json, .jsonc, .toml, .yaml, .text => {
|
||||
.js, .jsx, .ts, .tsx, .json, .jsonc, .toml, .yaml, .json5, .text, .md => {
|
||||
// Ensure that if there was an ASTMemoryAllocator in use, it's not used anymore.
|
||||
var ast_scope = js_ast.ASTMemoryAllocator.Scope{};
|
||||
ast_scope.enter();
|
||||
@@ -178,6 +178,7 @@ pub fn transpileSourceCode(
|
||||
var cache = jsc.RuntimeTranspilerCache{
|
||||
.output_code_allocator = allocator,
|
||||
.sourcemap_allocator = bun.default_allocator,
|
||||
.esm_record_allocator = bun.default_allocator,
|
||||
};
|
||||
|
||||
const old = jsc_vm.transpiler.log;
|
||||
@@ -361,7 +362,7 @@ pub fn transpileSourceCode(
|
||||
};
|
||||
}
|
||||
|
||||
if (loader == .json or loader == .jsonc or loader == .toml or loader == .yaml) {
|
||||
if (loader == .json or loader == .jsonc or loader == .toml or loader == .yaml or loader == .json5) {
|
||||
if (parse_result.empty) {
|
||||
return ResolvedSource{
|
||||
.allocator = null,
|
||||
@@ -422,6 +423,10 @@ pub fn transpileSourceCode(
|
||||
dumpSourceString(jsc_vm, specifier, entry.output_code.byteSlice());
|
||||
}
|
||||
|
||||
// TODO: module_info is only needed for standalone ESM bytecode.
|
||||
// For now, skip it entirely in the runtime transpiler.
|
||||
const module_info: ?*analyze_transpiled_module.ModuleInfoDeserialized = null;
|
||||
|
||||
return ResolvedSource{
|
||||
.allocator = null,
|
||||
.source_code = switch (entry.output_code) {
|
||||
@@ -436,6 +441,7 @@ pub fn transpileSourceCode(
|
||||
.specifier = input_specifier,
|
||||
.source_url = input_specifier.createIfDifferent(path.text),
|
||||
.is_commonjs_module = entry.metadata.module_type == .cjs,
|
||||
.module_info = module_info,
|
||||
.tag = brk: {
|
||||
if (entry.metadata.module_type == .cjs and source.path.isFile()) {
|
||||
const actual_package_json: *PackageJSON = package_json orelse brk2: {
|
||||
@@ -504,6 +510,11 @@ pub fn transpileSourceCode(
|
||||
jsc_vm.resolved_count += jsc_vm.transpiler.linker.import_counter - start_count;
|
||||
jsc_vm.transpiler.linker.import_counter = 0;
|
||||
|
||||
const is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs;
|
||||
// TODO: module_info is only needed for standalone ESM bytecode.
|
||||
// For now, skip it entirely in the runtime transpiler.
|
||||
const module_info: ?*analyze_transpiled_module.ModuleInfo = null;
|
||||
|
||||
var printer = source_code_printer.*;
|
||||
printer.ctx.reset();
|
||||
defer source_code_printer.* = printer;
|
||||
@@ -516,6 +527,7 @@ pub fn transpileSourceCode(
|
||||
&printer,
|
||||
.esm_ascii,
|
||||
mapper.get(),
|
||||
module_info,
|
||||
);
|
||||
};
|
||||
|
||||
@@ -529,9 +541,12 @@ pub fn transpileSourceCode(
|
||||
}
|
||||
}
|
||||
|
||||
const module_info_deserialized: ?*anyopaque = if (module_info) |mi| @ptrCast(mi.asDeserialized()) else null;
|
||||
|
||||
if (jsc_vm.isWatcherEnabled()) {
|
||||
var resolved_source = jsc_vm.refCountedResolvedSource(printer.ctx.written, input_specifier, path.text, null, false);
|
||||
resolved_source.is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs;
|
||||
resolved_source.is_commonjs_module = is_commonjs_module;
|
||||
resolved_source.module_info = module_info_deserialized;
|
||||
return resolved_source;
|
||||
}
|
||||
|
||||
@@ -564,7 +579,8 @@ pub fn transpileSourceCode(
|
||||
},
|
||||
.specifier = input_specifier,
|
||||
.source_url = input_specifier.createIfDifferent(path.text),
|
||||
.is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs,
|
||||
.is_commonjs_module = is_commonjs_module,
|
||||
.module_info = module_info_deserialized,
|
||||
.tag = tag,
|
||||
};
|
||||
},
|
||||
@@ -1192,9 +1208,15 @@ pub fn fetchBuiltinModule(jsc_vm: *VirtualMachine, specifier: bun.String) !?Reso
|
||||
.source_code = file.toWTFString(),
|
||||
.specifier = specifier,
|
||||
.source_url = specifier.dupeRef(),
|
||||
// bytecode_origin_path is the path used when generating bytecode; must match for cache hits
|
||||
.bytecode_origin_path = if (file.bytecode_origin_path.len > 0) bun.String.fromBytes(file.bytecode_origin_path) else bun.String.empty,
|
||||
.source_code_needs_deref = false,
|
||||
.bytecode_cache = if (file.bytecode.len > 0) file.bytecode.ptr else null,
|
||||
.bytecode_cache_size = file.bytecode.len,
|
||||
.module_info = if (file.module_info.len > 0)
|
||||
analyze_transpiled_module.ModuleInfoDeserialized.createFromCachedRecord(file.module_info, bun.default_allocator)
|
||||
else
|
||||
null,
|
||||
.is_commonjs_module = file.module_format == .cjs,
|
||||
};
|
||||
}
|
||||
@@ -1324,6 +1346,7 @@ const string = []const u8;
|
||||
|
||||
const Fs = @import("../fs.zig");
|
||||
const Runtime = @import("../runtime.zig");
|
||||
const analyze_transpiled_module = @import("../analyze_transpiled_module.zig");
|
||||
const ast = @import("../import_record.zig");
|
||||
const node_module_module = @import("./bindings/NodeModuleModule.zig");
|
||||
const std = @import("std");
|
||||
|
||||
@@ -14,7 +14,8 @@
|
||||
/// Version 15: Updated global defines table list.
|
||||
/// Version 16: Added typeof undefined minification optimization.
|
||||
/// Version 17: Removed transpiler import rewrite for bun:test. Not bumping it causes test/js/bun/http/req-url-leak.test.ts to fail with SyntaxError: Export named 'expect' not found in module 'bun:test'.
|
||||
const expected_version = 17;
|
||||
/// Version 18: Include ESM record (module info) with an ES Module, see #15758
|
||||
const expected_version = 18;
|
||||
|
||||
const debug = Output.scoped(.cache, .visible);
|
||||
const MINIMUM_CACHE_SIZE = 50 * 1024;
|
||||
@@ -32,6 +33,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
|
||||
sourcemap_allocator: std.mem.Allocator,
|
||||
output_code_allocator: std.mem.Allocator,
|
||||
esm_record_allocator: std.mem.Allocator,
|
||||
|
||||
const seed = 42;
|
||||
pub const Metadata = struct {
|
||||
@@ -52,6 +54,10 @@ pub const RuntimeTranspilerCache = struct {
|
||||
sourcemap_byte_length: u64 = 0,
|
||||
sourcemap_hash: u64 = 0,
|
||||
|
||||
esm_record_byte_offset: u64 = 0,
|
||||
esm_record_byte_length: u64 = 0,
|
||||
esm_record_hash: u64 = 0,
|
||||
|
||||
pub const size = brk: {
|
||||
var count: usize = 0;
|
||||
const meta: Metadata = .{};
|
||||
@@ -78,6 +84,10 @@ pub const RuntimeTranspilerCache = struct {
|
||||
try writer.writeInt(u64, this.sourcemap_byte_offset, .little);
|
||||
try writer.writeInt(u64, this.sourcemap_byte_length, .little);
|
||||
try writer.writeInt(u64, this.sourcemap_hash, .little);
|
||||
|
||||
try writer.writeInt(u64, this.esm_record_byte_offset, .little);
|
||||
try writer.writeInt(u64, this.esm_record_byte_length, .little);
|
||||
try writer.writeInt(u64, this.esm_record_hash, .little);
|
||||
}
|
||||
|
||||
pub fn decode(this: *Metadata, reader: anytype) !void {
|
||||
@@ -102,6 +112,10 @@ pub const RuntimeTranspilerCache = struct {
|
||||
this.sourcemap_byte_length = try reader.readInt(u64, .little);
|
||||
this.sourcemap_hash = try reader.readInt(u64, .little);
|
||||
|
||||
this.esm_record_byte_offset = try reader.readInt(u64, .little);
|
||||
this.esm_record_byte_length = try reader.readInt(u64, .little);
|
||||
this.esm_record_hash = try reader.readInt(u64, .little);
|
||||
|
||||
switch (this.module_type) {
|
||||
.esm, .cjs => {},
|
||||
// Invalid module type
|
||||
@@ -120,6 +134,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
metadata: Metadata,
|
||||
output_code: OutputCode = .{ .utf8 = "" },
|
||||
sourcemap: []const u8 = "",
|
||||
esm_record: []const u8 = "",
|
||||
|
||||
pub const OutputCode = union(enum) {
|
||||
utf8: []const u8,
|
||||
@@ -142,11 +157,14 @@ pub const RuntimeTranspilerCache = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub fn deinit(this: *Entry, sourcemap_allocator: std.mem.Allocator, output_code_allocator: std.mem.Allocator) void {
|
||||
pub fn deinit(this: *Entry, sourcemap_allocator: std.mem.Allocator, output_code_allocator: std.mem.Allocator, esm_record_allocator: std.mem.Allocator) void {
|
||||
this.output_code.deinit(output_code_allocator);
|
||||
if (this.sourcemap.len > 0) {
|
||||
sourcemap_allocator.free(this.sourcemap);
|
||||
}
|
||||
if (this.esm_record.len > 0) {
|
||||
esm_record_allocator.free(this.esm_record);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn save(
|
||||
@@ -156,6 +174,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
input_hash: u64,
|
||||
features_hash: u64,
|
||||
sourcemap: []const u8,
|
||||
esm_record: []const u8,
|
||||
output_code: OutputCode,
|
||||
exports_kind: bun.ast.ExportsKind,
|
||||
) !void {
|
||||
@@ -201,10 +220,16 @@ pub const RuntimeTranspilerCache = struct {
|
||||
.output_byte_offset = Metadata.size,
|
||||
.output_byte_length = output_bytes.len,
|
||||
.sourcemap_byte_offset = Metadata.size + output_bytes.len,
|
||||
.esm_record_byte_offset = Metadata.size + output_bytes.len + sourcemap.len,
|
||||
.esm_record_byte_length = esm_record.len,
|
||||
};
|
||||
|
||||
metadata.output_hash = hash(output_bytes);
|
||||
metadata.sourcemap_hash = hash(sourcemap);
|
||||
if (esm_record.len > 0) {
|
||||
metadata.esm_record_hash = hash(esm_record);
|
||||
}
|
||||
|
||||
var metadata_stream = std.io.fixedBufferStream(&metadata_buf);
|
||||
|
||||
try metadata.encode(metadata_stream.writer());
|
||||
@@ -219,20 +244,26 @@ pub const RuntimeTranspilerCache = struct {
|
||||
break :brk metadata_buf[0..metadata_stream.pos];
|
||||
};
|
||||
|
||||
const vecs: []const bun.PlatformIOVecConst = if (output_bytes.len > 0)
|
||||
&.{
|
||||
bun.platformIOVecConstCreate(metadata_bytes),
|
||||
bun.platformIOVecConstCreate(output_bytes),
|
||||
bun.platformIOVecConstCreate(sourcemap),
|
||||
}
|
||||
else
|
||||
&.{
|
||||
bun.platformIOVecConstCreate(metadata_bytes),
|
||||
bun.platformIOVecConstCreate(sourcemap),
|
||||
};
|
||||
var vecs_buf: [4]bun.PlatformIOVecConst = undefined;
|
||||
var vecs_i: usize = 0;
|
||||
vecs_buf[vecs_i] = bun.platformIOVecConstCreate(metadata_bytes);
|
||||
vecs_i += 1;
|
||||
if (output_bytes.len > 0) {
|
||||
vecs_buf[vecs_i] = bun.platformIOVecConstCreate(output_bytes);
|
||||
vecs_i += 1;
|
||||
}
|
||||
if (sourcemap.len > 0) {
|
||||
vecs_buf[vecs_i] = bun.platformIOVecConstCreate(sourcemap);
|
||||
vecs_i += 1;
|
||||
}
|
||||
if (esm_record.len > 0) {
|
||||
vecs_buf[vecs_i] = bun.platformIOVecConstCreate(esm_record);
|
||||
vecs_i += 1;
|
||||
}
|
||||
const vecs: []const bun.PlatformIOVecConst = vecs_buf[0..vecs_i];
|
||||
|
||||
var position: isize = 0;
|
||||
const end_position = Metadata.size + output_bytes.len + sourcemap.len;
|
||||
const end_position = Metadata.size + output_bytes.len + sourcemap.len + esm_record.len;
|
||||
|
||||
if (bun.Environment.allow_assert) {
|
||||
var total: usize = 0;
|
||||
@@ -242,7 +273,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
}
|
||||
bun.assert(end_position == total);
|
||||
}
|
||||
bun.assert(end_position == @as(i64, @intCast(sourcemap.len + output_bytes.len + Metadata.size)));
|
||||
bun.assert(end_position == @as(i64, @intCast(sourcemap.len + output_bytes.len + Metadata.size + esm_record.len)));
|
||||
|
||||
bun.sys.preallocate_file(tmpfile.fd.cast(), 0, @intCast(end_position)) catch {};
|
||||
while (position < end_position) {
|
||||
@@ -263,6 +294,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
file: std.fs.File,
|
||||
sourcemap_allocator: std.mem.Allocator,
|
||||
output_code_allocator: std.mem.Allocator,
|
||||
esm_record_allocator: std.mem.Allocator,
|
||||
) !void {
|
||||
const stat_size = try file.getEndPos();
|
||||
if (stat_size < Metadata.size + this.metadata.output_byte_length + this.metadata.sourcemap_byte_length) {
|
||||
@@ -338,6 +370,23 @@ pub const RuntimeTranspilerCache = struct {
|
||||
|
||||
this.sourcemap = sourcemap;
|
||||
}
|
||||
|
||||
if (this.metadata.esm_record_byte_length > 0) {
|
||||
const esm_record = try esm_record_allocator.alloc(u8, this.metadata.esm_record_byte_length);
|
||||
errdefer esm_record_allocator.free(esm_record);
|
||||
const read_bytes = try file.preadAll(esm_record, this.metadata.esm_record_byte_offset);
|
||||
if (read_bytes != this.metadata.esm_record_byte_length) {
|
||||
return error.MissingData;
|
||||
}
|
||||
|
||||
if (this.metadata.esm_record_hash != 0) {
|
||||
if (hash(esm_record) != this.metadata.esm_record_hash) {
|
||||
return error.InvalidHash;
|
||||
}
|
||||
}
|
||||
|
||||
this.esm_record = esm_record;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@@ -455,6 +504,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
input_stat_size: u64,
|
||||
sourcemap_allocator: std.mem.Allocator,
|
||||
output_code_allocator: std.mem.Allocator,
|
||||
esm_record_allocator: std.mem.Allocator,
|
||||
) !Entry {
|
||||
var tracer = bun.perf.trace("RuntimeTranspilerCache.fromFile");
|
||||
defer tracer.end();
|
||||
@@ -469,6 +519,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
input_stat_size,
|
||||
sourcemap_allocator,
|
||||
output_code_allocator,
|
||||
esm_record_allocator,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -479,6 +530,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
input_stat_size: u64,
|
||||
sourcemap_allocator: std.mem.Allocator,
|
||||
output_code_allocator: std.mem.Allocator,
|
||||
esm_record_allocator: std.mem.Allocator,
|
||||
) !Entry {
|
||||
var metadata_bytes_buf: [Metadata.size * 2]u8 = undefined;
|
||||
const cache_fd = try bun.sys.open(cache_file_path.sliceAssumeZ(), bun.O.RDONLY, 0).unwrap();
|
||||
@@ -510,7 +562,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
return error.MismatchedFeatureHash;
|
||||
}
|
||||
|
||||
try entry.load(file, sourcemap_allocator, output_code_allocator);
|
||||
try entry.load(file, sourcemap_allocator, output_code_allocator, esm_record_allocator);
|
||||
|
||||
return entry;
|
||||
}
|
||||
@@ -527,6 +579,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
input_hash: u64,
|
||||
features_hash: u64,
|
||||
sourcemap: []const u8,
|
||||
esm_record: []const u8,
|
||||
source_code: bun.String,
|
||||
exports_kind: bun.ast.ExportsKind,
|
||||
) !void {
|
||||
@@ -566,6 +619,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
input_hash,
|
||||
features_hash,
|
||||
sourcemap,
|
||||
esm_record,
|
||||
output_code,
|
||||
exports_kind,
|
||||
);
|
||||
@@ -599,7 +653,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
parser_options.hashForRuntimeTranspiler(&features_hasher, used_jsx);
|
||||
this.features_hash = features_hasher.final();
|
||||
|
||||
this.entry = fromFile(input_hash, this.features_hash.?, source.contents.len, this.sourcemap_allocator, this.output_code_allocator) catch |err| {
|
||||
this.entry = fromFile(input_hash, this.features_hash.?, source.contents.len, this.sourcemap_allocator, this.output_code_allocator, this.esm_record_allocator) catch |err| {
|
||||
debug("get(\"{s}\") = {s}", .{ source.path.text, @errorName(err) });
|
||||
return false;
|
||||
};
|
||||
@@ -615,7 +669,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
if (comptime bun.Environment.isDebug) {
|
||||
if (!bun_debug_restore_from_cache) {
|
||||
if (this.entry) |*entry| {
|
||||
entry.deinit(this.sourcemap_allocator, this.output_code_allocator);
|
||||
entry.deinit(this.sourcemap_allocator, this.output_code_allocator, this.esm_record_allocator);
|
||||
this.entry = null;
|
||||
}
|
||||
}
|
||||
@@ -624,7 +678,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
return this.entry != null;
|
||||
}
|
||||
|
||||
pub fn put(this: *RuntimeTranspilerCache, output_code_bytes: []const u8, sourcemap: []const u8) void {
|
||||
pub fn put(this: *RuntimeTranspilerCache, output_code_bytes: []const u8, sourcemap: []const u8, esm_record: []const u8) void {
|
||||
if (comptime !bun.FeatureFlags.runtime_transpiler_cache)
|
||||
@compileError("RuntimeTranspilerCache is disabled");
|
||||
|
||||
@@ -635,7 +689,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
const output_code = bun.String.cloneLatin1(output_code_bytes);
|
||||
this.output_code = output_code;
|
||||
|
||||
toFile(this.input_byte_length.?, this.input_hash.?, this.features_hash.?, sourcemap, output_code, this.exports_kind) catch |err| {
|
||||
toFile(this.input_byte_length.?, this.input_hash.?, this.features_hash.?, sourcemap, esm_record, output_code, this.exports_kind) catch |err| {
|
||||
debug("put() = {s}", .{@errorName(err)});
|
||||
return;
|
||||
};
|
||||
|
||||
@@ -315,6 +315,7 @@ pub const RuntimeTranspilerStore = struct {
|
||||
var cache = jsc.RuntimeTranspilerCache{
|
||||
.output_code_allocator = allocator,
|
||||
.sourcemap_allocator = bun.default_allocator,
|
||||
.esm_record_allocator = bun.default_allocator,
|
||||
};
|
||||
var log = logger.Log.init(allocator);
|
||||
defer {
|
||||
@@ -471,6 +472,10 @@ pub const RuntimeTranspilerStore = struct {
|
||||
dumpSourceString(vm, specifier, entry.output_code.byteSlice());
|
||||
}
|
||||
|
||||
// TODO: module_info is only needed for standalone ESM bytecode.
|
||||
// For now, skip it entirely in the runtime transpiler.
|
||||
const module_info: ?*analyze_transpiled_module.ModuleInfoDeserialized = null;
|
||||
|
||||
this.resolved_source = ResolvedSource{
|
||||
.allocator = null,
|
||||
.source_code = switch (entry.output_code) {
|
||||
@@ -483,6 +488,7 @@ pub const RuntimeTranspilerStore = struct {
|
||||
},
|
||||
},
|
||||
.is_commonjs_module = entry.metadata.module_type == .cjs,
|
||||
.module_info = module_info,
|
||||
.tag = this.resolved_source.tag,
|
||||
};
|
||||
|
||||
@@ -541,6 +547,11 @@ pub const RuntimeTranspilerStore = struct {
|
||||
printer = source_code_printer.?.*;
|
||||
}
|
||||
|
||||
const is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs;
|
||||
// TODO: module_info is only needed for standalone ESM bytecode.
|
||||
// For now, skip it entirely in the runtime transpiler.
|
||||
const module_info: ?*analyze_transpiled_module.ModuleInfo = null;
|
||||
|
||||
{
|
||||
var mapper = vm.sourceMapHandler(&printer);
|
||||
defer source_code_printer.?.* = printer;
|
||||
@@ -550,7 +561,9 @@ pub const RuntimeTranspilerStore = struct {
|
||||
&printer,
|
||||
.esm_ascii,
|
||||
mapper.get(),
|
||||
module_info,
|
||||
) catch |err| {
|
||||
if (module_info) |mi| mi.destroy();
|
||||
this.parse_error = err;
|
||||
return;
|
||||
};
|
||||
@@ -589,7 +602,8 @@ pub const RuntimeTranspilerStore = struct {
|
||||
this.resolved_source = ResolvedSource{
|
||||
.allocator = null,
|
||||
.source_code = source_code,
|
||||
.is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs,
|
||||
.is_commonjs_module = is_commonjs_module,
|
||||
.module_info = if (module_info) |mi| @ptrCast(mi.asDeserialized()) else null,
|
||||
.tag = this.resolved_source.tag,
|
||||
};
|
||||
}
|
||||
@@ -597,6 +611,7 @@ pub const RuntimeTranspilerStore = struct {
|
||||
};
|
||||
|
||||
const Fs = @import("../fs.zig");
|
||||
const analyze_transpiled_module = @import("../analyze_transpiled_module.zig");
|
||||
const node_fallbacks = @import("../node_fallbacks.zig");
|
||||
const std = @import("std");
|
||||
const AsyncModule = @import("./AsyncModule.zig").AsyncModule;
|
||||
|
||||
@@ -49,6 +49,7 @@ standalone_module_graph: ?*bun.StandaloneModuleGraph = null,
|
||||
smol: bool = false,
|
||||
dns_result_order: DNSResolver.Order = .verbatim,
|
||||
cpu_profiler_config: ?CPUProfilerConfig = null,
|
||||
heap_profiler_config: ?HeapProfilerConfig = null,
|
||||
counters: Counters = .{},
|
||||
|
||||
hot_reload: bun.cli.Command.HotReload = .none,
|
||||
@@ -542,7 +543,7 @@ fn wrapUnhandledRejectionErrorForUncaughtException(globalObject: *JSGlobalObject
|
||||
break :blk false;
|
||||
}) return reason;
|
||||
const reasonStr = blk: {
|
||||
var scope: jsc.CatchScope = undefined;
|
||||
var scope: jsc.TopExceptionScope = undefined;
|
||||
scope.init(globalObject, @src());
|
||||
defer scope.deinit();
|
||||
defer if (scope.exception()) |_| scope.clearException();
|
||||
@@ -843,6 +844,15 @@ pub fn onExit(this: *VirtualMachine) void {
|
||||
};
|
||||
}
|
||||
|
||||
// Write heap profile if profiling was enabled - do this after CPU profile but before shutdown
|
||||
// Grab the config and null it out to make this idempotent
|
||||
if (this.heap_profiler_config) |config| {
|
||||
this.heap_profiler_config = null;
|
||||
HeapProfiler.generateAndWriteProfile(this.jsc_vm, config) catch |err| {
|
||||
Output.err(err, "Failed to write heap profile", .{});
|
||||
};
|
||||
}
|
||||
|
||||
this.exit_handler.dispatchOnExit();
|
||||
this.is_shutting_down = true;
|
||||
|
||||
@@ -3715,6 +3725,9 @@ const Allocator = std.mem.Allocator;
|
||||
const CPUProfiler = @import("./bindings/BunCPUProfiler.zig");
|
||||
const CPUProfilerConfig = CPUProfiler.CPUProfilerConfig;
|
||||
|
||||
const HeapProfiler = @import("./bindings/BunHeapProfiler.zig");
|
||||
const HeapProfilerConfig = HeapProfiler.HeapProfilerConfig;
|
||||
|
||||
const bun = @import("bun");
|
||||
const Async = bun.Async;
|
||||
const DotEnv = bun.DotEnv;
|
||||
|
||||
@@ -27,8 +27,10 @@ pub const Subprocess = @import("./api/bun/subprocess.zig");
|
||||
pub const Terminal = @import("./api/bun/Terminal.zig");
|
||||
pub const HashObject = @import("./api/HashObject.zig");
|
||||
pub const JSONCObject = @import("./api/JSONCObject.zig");
|
||||
pub const MarkdownObject = @import("./api/MarkdownObject.zig");
|
||||
pub const TOMLObject = @import("./api/TOMLObject.zig");
|
||||
pub const UnsafeObject = @import("./api/UnsafeObject.zig");
|
||||
pub const JSON5Object = @import("./api/JSON5Object.zig");
|
||||
pub const YAMLObject = @import("./api/YAMLObject.zig");
|
||||
pub const Timer = @import("./api/Timer.zig");
|
||||
pub const FFIObject = @import("./api/FFIObject.zig");
|
||||
|
||||
@@ -63,7 +63,9 @@ pub const BunObject = struct {
|
||||
pub const SHA512 = toJSLazyPropertyCallback(Crypto.SHA512.getter);
|
||||
pub const SHA512_256 = toJSLazyPropertyCallback(Crypto.SHA512_256.getter);
|
||||
pub const JSONC = toJSLazyPropertyCallback(Bun.getJSONCObject);
|
||||
pub const markdown = toJSLazyPropertyCallback(Bun.getMarkdownObject);
|
||||
pub const TOML = toJSLazyPropertyCallback(Bun.getTOMLObject);
|
||||
pub const JSON5 = toJSLazyPropertyCallback(Bun.getJSON5Object);
|
||||
pub const YAML = toJSLazyPropertyCallback(Bun.getYAMLObject);
|
||||
pub const Transpiler = toJSLazyPropertyCallback(Bun.getTranspilerConstructor);
|
||||
pub const argv = toJSLazyPropertyCallback(Bun.getArgv);
|
||||
@@ -130,7 +132,9 @@ pub const BunObject = struct {
|
||||
@export(&BunObject.SHA512, .{ .name = lazyPropertyCallbackName("SHA512") });
|
||||
@export(&BunObject.SHA512_256, .{ .name = lazyPropertyCallbackName("SHA512_256") });
|
||||
@export(&BunObject.JSONC, .{ .name = lazyPropertyCallbackName("JSONC") });
|
||||
@export(&BunObject.markdown, .{ .name = lazyPropertyCallbackName("markdown") });
|
||||
@export(&BunObject.TOML, .{ .name = lazyPropertyCallbackName("TOML") });
|
||||
@export(&BunObject.JSON5, .{ .name = lazyPropertyCallbackName("JSON5") });
|
||||
@export(&BunObject.YAML, .{ .name = lazyPropertyCallbackName("YAML") });
|
||||
@export(&BunObject.Glob, .{ .name = lazyPropertyCallbackName("Glob") });
|
||||
@export(&BunObject.Transpiler, .{ .name = lazyPropertyCallbackName("Transpiler") });
|
||||
@@ -1265,10 +1269,17 @@ pub fn getHashObject(globalThis: *jsc.JSGlobalObject, _: *jsc.JSObject) jsc.JSVa
|
||||
pub fn getJSONCObject(globalThis: *jsc.JSGlobalObject, _: *jsc.JSObject) jsc.JSValue {
|
||||
return JSONCObject.create(globalThis);
|
||||
}
|
||||
pub fn getMarkdownObject(globalThis: *jsc.JSGlobalObject, _: *jsc.JSObject) jsc.JSValue {
|
||||
return MarkdownObject.create(globalThis);
|
||||
}
|
||||
pub fn getTOMLObject(globalThis: *jsc.JSGlobalObject, _: *jsc.JSObject) jsc.JSValue {
|
||||
return TOMLObject.create(globalThis);
|
||||
}
|
||||
|
||||
pub fn getJSON5Object(globalThis: *jsc.JSGlobalObject, _: *jsc.JSObject) jsc.JSValue {
|
||||
return JSON5Object.create(globalThis);
|
||||
}
|
||||
|
||||
pub fn getYAMLObject(globalThis: *jsc.JSGlobalObject, _: *jsc.JSObject) jsc.JSValue {
|
||||
return YAMLObject.create(globalThis);
|
||||
}
|
||||
@@ -2066,7 +2077,9 @@ const gen = bun.gen.BunObject;
|
||||
const api = bun.api;
|
||||
const FFIObject = bun.api.FFIObject;
|
||||
const HashObject = bun.api.HashObject;
|
||||
const JSON5Object = bun.api.JSON5Object;
|
||||
const JSONCObject = bun.api.JSONCObject;
|
||||
const MarkdownObject = bun.api.MarkdownObject;
|
||||
const TOMLObject = bun.api.TOMLObject;
|
||||
const UnsafeObject = bun.api.UnsafeObject;
|
||||
const YAMLObject = bun.api.YAMLObject;
|
||||
|
||||
@@ -242,6 +242,10 @@ pub const JSBundler = struct {
|
||||
bytecode: bool = false,
|
||||
banner: OwnedString = OwnedString.initEmpty(bun.default_allocator),
|
||||
footer: OwnedString = OwnedString.initEmpty(bun.default_allocator),
|
||||
/// Path to write JSON metafile (if specified via metafile object) - TEST: moved here
|
||||
metafile_json_path: OwnedString = OwnedString.initEmpty(bun.default_allocator),
|
||||
/// Path to write markdown metafile (if specified via metafile object) - TEST: moved here
|
||||
metafile_markdown_path: OwnedString = OwnedString.initEmpty(bun.default_allocator),
|
||||
css_chunking: bool = false,
|
||||
drop: bun.StringSet = bun.StringSet.init(bun.default_allocator),
|
||||
features: bun.StringSet = bun.StringSet.init(bun.default_allocator),
|
||||
@@ -254,6 +258,7 @@ pub const JSBundler = struct {
|
||||
/// In-memory files that can be used as entrypoints or imported.
|
||||
/// These files do not need to exist on disk.
|
||||
files: FileMap = .{},
|
||||
/// Generate metafile (JSON module graph)
|
||||
metafile: bool = false,
|
||||
|
||||
pub const CompileOptions = struct {
|
||||
@@ -670,8 +675,8 @@ pub const JSBundler = struct {
|
||||
if (try config.getOptionalEnum(globalThis, "format", options.Format)) |format| {
|
||||
this.format = format;
|
||||
|
||||
if (this.bytecode and format != .cjs) {
|
||||
return globalThis.throwInvalidArguments("format must be 'cjs' when bytecode is true. Eventually we'll add esm support as well.", .{});
|
||||
if (this.bytecode and format != .cjs and format != .esm) {
|
||||
return globalThis.throwInvalidArguments("format must be 'cjs' or 'esm' when bytecode is true.", .{});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -936,8 +941,30 @@ pub const JSBundler = struct {
|
||||
this.throw_on_error = flag;
|
||||
}
|
||||
|
||||
if (try config.getBooleanLoose(globalThis, "metafile")) |flag| {
|
||||
this.metafile = flag;
|
||||
// Parse metafile option: boolean | string | { json?: string, markdown?: string }
|
||||
if (try config.getOwn(globalThis, "metafile")) |metafile_value| {
|
||||
if (metafile_value.isBoolean()) {
|
||||
this.metafile = metafile_value == .true;
|
||||
} else if (metafile_value.isString()) {
|
||||
// metafile: "path/to/meta.json" - shorthand for { json: "..." }
|
||||
this.metafile = true;
|
||||
const slice = try metafile_value.toSlice(globalThis, bun.default_allocator);
|
||||
defer slice.deinit();
|
||||
try this.metafile_json_path.appendSliceExact(slice.slice());
|
||||
} else if (metafile_value.isObject()) {
|
||||
// metafile: { json?: string, markdown?: string }
|
||||
this.metafile = true;
|
||||
if (try metafile_value.getOptional(globalThis, "json", ZigString.Slice)) |slice| {
|
||||
defer slice.deinit();
|
||||
try this.metafile_json_path.appendSliceExact(slice.slice());
|
||||
}
|
||||
if (try metafile_value.getOptional(globalThis, "markdown", ZigString.Slice)) |slice| {
|
||||
defer slice.deinit();
|
||||
try this.metafile_markdown_path.appendSliceExact(slice.slice());
|
||||
}
|
||||
} else if (!metafile_value.isUndefinedOrNull()) {
|
||||
return globalThis.throwInvalidArguments("Expected metafile to be a boolean, string, or object with json/markdown paths", .{});
|
||||
}
|
||||
}
|
||||
|
||||
if (try CompileOptions.fromJS(
|
||||
@@ -992,6 +1019,13 @@ pub const JSBundler = struct {
|
||||
}
|
||||
}
|
||||
|
||||
// ESM bytecode requires compile because module_info (import/export metadata)
|
||||
// is only available in compiled binaries. Without it, JSC must parse the file
|
||||
// twice (once for module analysis, once for bytecode), which is a deopt.
|
||||
if (this.bytecode and this.format == .esm and this.compile == null) {
|
||||
return globalThis.throwInvalidArguments("ESM bytecode requires compile: true. Use format: 'cjs' for bytecode without compile.", .{});
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -1070,6 +1104,8 @@ pub const JSBundler = struct {
|
||||
self.footer.deinit();
|
||||
self.tsconfig_override.deinit();
|
||||
self.files.deinitAndUnprotect();
|
||||
self.metafile_json_path.deinit();
|
||||
self.metafile_markdown_path.deinit();
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1081,6 +1117,28 @@ pub const JSBundler = struct {
|
||||
return globalThis.throwInvalidArguments("Expected a config object to be passed to Bun.build", .{});
|
||||
}
|
||||
|
||||
const vm = globalThis.bunVM();
|
||||
|
||||
// Detect and prevent calling Bun.build from within a macro during bundling.
|
||||
// This would cause a deadlock because:
|
||||
// 1. The bundler thread (singleton) is processing the outer Bun.build
|
||||
// 2. During parsing, it encounters a macro and evaluates it
|
||||
// 3. The macro calls Bun.build, which tries to enqueue to the same singleton thread
|
||||
// 4. The singleton thread is blocked waiting for the macro to complete -> deadlock
|
||||
if (vm.macro_mode) {
|
||||
return globalThis.throw(
|
||||
\\Bun.build cannot be called from within a macro during bundling.
|
||||
\\
|
||||
\\This would cause a deadlock because the bundler is waiting for the macro to complete,
|
||||
\\but the macro's Bun.build call is waiting for the bundler.
|
||||
\\
|
||||
\\To bundle code at compile time in a macro, use Bun.spawnSync to invoke the CLI:
|
||||
\\ const result = Bun.spawnSync(["bun", "build", entrypoint, "--format=esm"]);
|
||||
,
|
||||
.{},
|
||||
);
|
||||
}
|
||||
|
||||
var plugins: ?*Plugin = null;
|
||||
const config = try Config.fromJS(globalThis, arguments[0], &plugins, bun.default_allocator);
|
||||
|
||||
@@ -1088,7 +1146,7 @@ pub const JSBundler = struct {
|
||||
config,
|
||||
plugins,
|
||||
globalThis,
|
||||
globalThis.bunVM().eventLoop(),
|
||||
vm.eventLoop(),
|
||||
bun.default_allocator,
|
||||
);
|
||||
}
|
||||
@@ -1439,7 +1497,7 @@ pub const JSBundler = struct {
|
||||
error.JSTerminated => return error.JSTerminated,
|
||||
};
|
||||
|
||||
var scope: jsc.CatchScope = undefined;
|
||||
var scope: jsc.TopExceptionScope = undefined;
|
||||
scope.init(globalThis, @src());
|
||||
defer scope.deinit();
|
||||
|
||||
@@ -1666,9 +1724,12 @@ pub const BuildArtifact = struct {
|
||||
@"entry-point",
|
||||
sourcemap,
|
||||
bytecode,
|
||||
module_info,
|
||||
@"metafile-json",
|
||||
@"metafile-markdown",
|
||||
|
||||
pub fn isFileInStandaloneMode(this: OutputKind) bool {
|
||||
return this != .sourcemap and this != .bytecode;
|
||||
return this != .sourcemap and this != .bytecode and this != .module_info and this != .@"metafile-json" and this != .@"metafile-markdown";
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
433
src/bun.js/api/JSON5Object.zig
Normal file
433
src/bun.js/api/JSON5Object.zig
Normal file
@@ -0,0 +1,433 @@
|
||||
pub fn create(globalThis: *jsc.JSGlobalObject) jsc.JSValue {
|
||||
const object = JSValue.createEmptyObject(globalThis, 2);
|
||||
object.put(
|
||||
globalThis,
|
||||
ZigString.static("parse"),
|
||||
jsc.JSFunction.create(globalThis, "parse", parse, 1, .{}),
|
||||
);
|
||||
object.put(
|
||||
globalThis,
|
||||
ZigString.static("stringify"),
|
||||
jsc.JSFunction.create(globalThis, "stringify", stringify, 3, .{}),
|
||||
);
|
||||
return object;
|
||||
}
|
||||
|
||||
pub fn stringify(
|
||||
global: *jsc.JSGlobalObject,
|
||||
callFrame: *jsc.CallFrame,
|
||||
) bun.JSError!jsc.JSValue {
|
||||
const value, const replacer, const space_value = callFrame.argumentsAsArray(3);
|
||||
|
||||
value.ensureStillAlive();
|
||||
|
||||
if (value.isUndefined() or value.isSymbol() or value.isFunction()) {
|
||||
return .js_undefined;
|
||||
}
|
||||
|
||||
if (!replacer.isUndefinedOrNull()) {
|
||||
return global.throw("JSON5.stringify does not support the replacer argument", .{});
|
||||
}
|
||||
|
||||
var stringifier: Stringifier = try .init(global, space_value);
|
||||
defer stringifier.deinit();
|
||||
|
||||
stringifier.stringifyValue(global, value) catch |err| return switch (err) {
|
||||
error.OutOfMemory, error.JSError, error.JSTerminated => |js_err| js_err,
|
||||
error.StackOverflow => global.throwStackOverflow(),
|
||||
};
|
||||
|
||||
return stringifier.builder.toString(global);
|
||||
}
|
||||
|
||||
pub fn parse(
|
||||
global: *jsc.JSGlobalObject,
|
||||
callFrame: *jsc.CallFrame,
|
||||
) bun.JSError!jsc.JSValue {
|
||||
var arena: bun.ArenaAllocator = .init(bun.default_allocator);
|
||||
defer arena.deinit();
|
||||
const allocator = arena.allocator();
|
||||
|
||||
var ast_memory_allocator = bun.handleOom(allocator.create(ast.ASTMemoryAllocator));
|
||||
var ast_scope = ast_memory_allocator.enter(allocator);
|
||||
defer ast_scope.exit();
|
||||
|
||||
const input_value = callFrame.argument(0);
|
||||
|
||||
if (input_value.isEmptyOrUndefinedOrNull()) {
|
||||
return global.throwInvalidArguments("Expected a string to parse", .{});
|
||||
}
|
||||
|
||||
const input: jsc.Node.BlobOrStringOrBuffer =
|
||||
try jsc.Node.BlobOrStringOrBuffer.fromJS(global, allocator, input_value) orelse input: {
|
||||
var str = try input_value.toBunString(global);
|
||||
defer str.deref();
|
||||
break :input .{ .string_or_buffer = .{ .string = str.toSlice(allocator) } };
|
||||
};
|
||||
defer input.deinit();
|
||||
|
||||
var log = logger.Log.init(bun.default_allocator);
|
||||
defer log.deinit();
|
||||
|
||||
const source = &logger.Source.initPathString("input.json5", input.slice());
|
||||
|
||||
const root = json5.JSON5Parser.parse(source, &log, allocator) catch |err| return switch (err) {
|
||||
error.OutOfMemory => |oom| oom,
|
||||
error.StackOverflow => global.throwStackOverflow(),
|
||||
else => {
|
||||
if (log.msgs.items.len > 0) {
|
||||
const first_msg = log.msgs.items[0];
|
||||
return global.throwValue(global.createSyntaxErrorInstance(
|
||||
"JSON5 Parse error: {s}",
|
||||
.{first_msg.data.text},
|
||||
));
|
||||
}
|
||||
return global.throwValue(global.createSyntaxErrorInstance(
|
||||
"JSON5 Parse error: Unable to parse JSON5 string",
|
||||
.{},
|
||||
));
|
||||
},
|
||||
};
|
||||
|
||||
return exprToJS(root, global);
|
||||
}
|
||||
|
||||
const Stringifier = struct {
|
||||
stack_check: bun.StackCheck,
|
||||
builder: wtf.StringBuilder,
|
||||
indent: usize,
|
||||
space: Space,
|
||||
visiting: std.AutoHashMapUnmanaged(JSValue, void),
|
||||
allocator: std.mem.Allocator,
|
||||
|
||||
const StringifyError = bun.JSError || bun.StackOverflow;
|
||||
|
||||
const Space = union(enum) {
|
||||
minified,
|
||||
number: u32,
|
||||
str: bun.String,
|
||||
|
||||
pub fn init(global: *jsc.JSGlobalObject, space_value: JSValue) bun.JSError!Space {
|
||||
const space = try space_value.unwrapBoxedPrimitive(global);
|
||||
if (space.isNumber()) {
|
||||
// Clamp on the float to match the spec's min(10, ToIntegerOrInfinity(space)).
|
||||
// toInt32() wraps large values and Infinity to 0, which is wrong.
|
||||
const num_f = space.asNumber();
|
||||
if (!(num_f >= 1)) return .minified; // handles NaN, -Infinity, 0, negatives
|
||||
return .{ .number = if (num_f > 10) 10 else @intFromFloat(num_f) };
|
||||
}
|
||||
if (space.isString()) {
|
||||
const str = try space.toBunString(global);
|
||||
if (str.length() == 0) {
|
||||
str.deref();
|
||||
return .minified;
|
||||
}
|
||||
return .{ .str = str };
|
||||
}
|
||||
return .minified;
|
||||
}
|
||||
|
||||
pub fn deinit(this: *const Space) void {
|
||||
switch (this.*) {
|
||||
.str => |str| str.deref(),
|
||||
.minified, .number => {},
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub fn init(global: *jsc.JSGlobalObject, space_value: JSValue) bun.JSError!Stringifier {
|
||||
return .{
|
||||
.stack_check = .init(),
|
||||
.builder = .init(),
|
||||
.indent = 0,
|
||||
.space = try Space.init(global, space_value),
|
||||
.visiting = .empty,
|
||||
.allocator = bun.default_allocator,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(this: *Stringifier) void {
|
||||
this.builder.deinit();
|
||||
this.space.deinit();
|
||||
this.visiting.deinit(this.allocator);
|
||||
}
|
||||
|
||||
pub fn stringifyValue(this: *Stringifier, global: *jsc.JSGlobalObject, value: JSValue) StringifyError!void {
|
||||
if (!this.stack_check.isSafeToRecurse()) {
|
||||
return error.StackOverflow;
|
||||
}
|
||||
|
||||
const unwrapped = try value.unwrapBoxedPrimitive(global);
|
||||
|
||||
if (unwrapped.isNull()) {
|
||||
this.builder.append(.latin1, "null");
|
||||
return;
|
||||
}
|
||||
|
||||
if (unwrapped.isNumber()) {
|
||||
if (unwrapped.isInt32()) {
|
||||
this.builder.append(.int, unwrapped.asInt32());
|
||||
return;
|
||||
}
|
||||
const num = unwrapped.asNumber();
|
||||
if (std.math.isNegativeInf(num)) {
|
||||
this.builder.append(.latin1, "-Infinity");
|
||||
} else if (std.math.isInf(num)) {
|
||||
this.builder.append(.latin1, "Infinity");
|
||||
} else if (std.math.isNan(num)) {
|
||||
this.builder.append(.latin1, "NaN");
|
||||
} else {
|
||||
this.builder.append(.double, num);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (unwrapped.isBigInt()) {
|
||||
return global.throw("JSON5.stringify cannot serialize BigInt", .{});
|
||||
}
|
||||
|
||||
if (unwrapped.isBoolean()) {
|
||||
this.builder.append(.latin1, if (unwrapped.asBoolean()) "true" else "false");
|
||||
return;
|
||||
}
|
||||
|
||||
if (unwrapped.isString()) {
|
||||
const str = try unwrapped.toBunString(global);
|
||||
defer str.deref();
|
||||
this.appendQuotedString(str);
|
||||
return;
|
||||
}
|
||||
|
||||
// Object or array — check for circular references
|
||||
const gop = try this.visiting.getOrPut(this.allocator, unwrapped);
|
||||
if (gop.found_existing) {
|
||||
return global.throw("Converting circular structure to JSON5", .{});
|
||||
}
|
||||
defer _ = this.visiting.remove(unwrapped);
|
||||
|
||||
if (unwrapped.isArray()) {
|
||||
try this.stringifyArray(global, unwrapped);
|
||||
} else {
|
||||
try this.stringifyObject(global, unwrapped);
|
||||
}
|
||||
}
|
||||
|
||||
fn stringifyArray(this: *Stringifier, global: *jsc.JSGlobalObject, value: JSValue) StringifyError!void {
|
||||
var iter = try value.arrayIterator(global);
|
||||
|
||||
if (iter.len == 0) {
|
||||
this.builder.append(.latin1, "[]");
|
||||
return;
|
||||
}
|
||||
|
||||
this.builder.append(.lchar, '[');
|
||||
|
||||
switch (this.space) {
|
||||
.minified => {
|
||||
var first = true;
|
||||
while (try iter.next()) |item| {
|
||||
if (!first) this.builder.append(.lchar, ',');
|
||||
first = false;
|
||||
if (item.isUndefined() or item.isSymbol() or item.isFunction()) {
|
||||
this.builder.append(.latin1, "null");
|
||||
} else {
|
||||
try this.stringifyValue(global, item);
|
||||
}
|
||||
}
|
||||
},
|
||||
.number, .str => {
|
||||
this.indent += 1;
|
||||
var first = true;
|
||||
while (try iter.next()) |item| {
|
||||
if (!first) this.builder.append(.lchar, ',');
|
||||
first = false;
|
||||
this.newline();
|
||||
if (item.isUndefined() or item.isSymbol() or item.isFunction()) {
|
||||
this.builder.append(.latin1, "null");
|
||||
} else {
|
||||
try this.stringifyValue(global, item);
|
||||
}
|
||||
}
|
||||
// Trailing comma
|
||||
this.builder.append(.lchar, ',');
|
||||
this.indent -= 1;
|
||||
this.newline();
|
||||
},
|
||||
}
|
||||
|
||||
this.builder.append(.lchar, ']');
|
||||
}
|
||||
|
||||
fn stringifyObject(this: *Stringifier, global: *jsc.JSGlobalObject, value: JSValue) StringifyError!void {
|
||||
var iter: jsc.JSPropertyIterator(.{ .skip_empty_name = false, .include_value = true }) = try .init(
|
||||
global,
|
||||
try value.toObject(global),
|
||||
);
|
||||
defer iter.deinit();
|
||||
|
||||
if (iter.len == 0) {
|
||||
this.builder.append(.latin1, "{}");
|
||||
return;
|
||||
}
|
||||
|
||||
this.builder.append(.lchar, '{');
|
||||
|
||||
switch (this.space) {
|
||||
.minified => {
|
||||
var first = true;
|
||||
while (try iter.next()) |prop_name| {
|
||||
if (iter.value.isUndefined() or iter.value.isSymbol() or iter.value.isFunction()) {
|
||||
continue;
|
||||
}
|
||||
if (!first) this.builder.append(.lchar, ',');
|
||||
first = false;
|
||||
this.appendKey(prop_name);
|
||||
this.builder.append(.lchar, ':');
|
||||
try this.stringifyValue(global, iter.value);
|
||||
}
|
||||
},
|
||||
.number, .str => {
|
||||
this.indent += 1;
|
||||
var first = true;
|
||||
while (try iter.next()) |prop_name| {
|
||||
if (iter.value.isUndefined() or iter.value.isSymbol() or iter.value.isFunction()) {
|
||||
continue;
|
||||
}
|
||||
if (!first) this.builder.append(.lchar, ',');
|
||||
first = false;
|
||||
this.newline();
|
||||
this.appendKey(prop_name);
|
||||
this.builder.append(.latin1, ": ");
|
||||
try this.stringifyValue(global, iter.value);
|
||||
}
|
||||
this.indent -= 1;
|
||||
if (!first) {
|
||||
// Trailing comma
|
||||
this.builder.append(.lchar, ',');
|
||||
this.newline();
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
this.builder.append(.lchar, '}');
|
||||
}
|
||||
|
||||
fn appendKey(this: *Stringifier, name: bun.String) void {
|
||||
const is_identifier = is_identifier: {
|
||||
if (name.length() == 0) break :is_identifier false;
|
||||
if (!bun.js_lexer.isIdentifierStart(@intCast(name.charAt(0)))) break :is_identifier false;
|
||||
for (1..name.length()) |i| {
|
||||
if (!bun.js_lexer.isIdentifierContinue(@intCast(name.charAt(i)))) break :is_identifier false;
|
||||
}
|
||||
break :is_identifier true;
|
||||
};
|
||||
|
||||
if (is_identifier) {
|
||||
this.builder.append(.string, name);
|
||||
} else {
|
||||
this.appendQuotedString(name);
|
||||
}
|
||||
}
|
||||
|
||||
fn appendQuotedString(this: *Stringifier, str: bun.String) void {
|
||||
this.builder.append(.lchar, '\'');
|
||||
for (0..str.length()) |i| {
|
||||
const c = str.charAt(i);
|
||||
switch (c) {
|
||||
0x00 => this.builder.append(.latin1, "\\0"),
|
||||
0x08 => this.builder.append(.latin1, "\\b"),
|
||||
0x09 => this.builder.append(.latin1, "\\t"),
|
||||
0x0a => this.builder.append(.latin1, "\\n"),
|
||||
0x0b => this.builder.append(.latin1, "\\v"),
|
||||
0x0c => this.builder.append(.latin1, "\\f"),
|
||||
0x0d => this.builder.append(.latin1, "\\r"),
|
||||
0x27 => this.builder.append(.latin1, "\\'"), // single quote
|
||||
0x5c => this.builder.append(.latin1, "\\\\"), // backslash
|
||||
0x2028 => this.builder.append(.latin1, "\\u2028"),
|
||||
0x2029 => this.builder.append(.latin1, "\\u2029"),
|
||||
0x01...0x07, 0x0e...0x1f, 0x7f => {
|
||||
// Other control chars → \xHH
|
||||
this.builder.append(.latin1, "\\x");
|
||||
this.builder.append(.lchar, hexDigit(c >> 4));
|
||||
this.builder.append(.lchar, hexDigit(c & 0x0f));
|
||||
},
|
||||
else => this.builder.append(.uchar, c),
|
||||
}
|
||||
}
|
||||
this.builder.append(.lchar, '\'');
|
||||
}
|
||||
|
||||
fn hexDigit(v: u16) u8 {
|
||||
const nibble: u8 = @intCast(v & 0x0f);
|
||||
return if (nibble < 10) '0' + nibble else 'a' + nibble - 10;
|
||||
}
|
||||
|
||||
fn newline(this: *Stringifier) void {
|
||||
switch (this.space) {
|
||||
.minified => {},
|
||||
.number => |space_num| {
|
||||
this.builder.append(.lchar, '\n');
|
||||
for (0..this.indent * space_num) |_| {
|
||||
this.builder.append(.lchar, ' ');
|
||||
}
|
||||
},
|
||||
.str => |space_str| {
|
||||
this.builder.append(.lchar, '\n');
|
||||
const clamped = if (space_str.length() > 10)
|
||||
space_str.substringWithLen(0, 10)
|
||||
else
|
||||
space_str;
|
||||
for (0..this.indent) |_| {
|
||||
this.builder.append(.string, clamped);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
fn exprToJS(expr: Expr, global: *jsc.JSGlobalObject) bun.JSError!jsc.JSValue {
|
||||
switch (expr.data) {
|
||||
.e_null => return .null,
|
||||
.e_boolean => |boolean| return .jsBoolean(boolean.value),
|
||||
.e_number => |number| return .jsNumber(number.value),
|
||||
.e_string => |str| {
|
||||
return str.toJS(bun.default_allocator, global);
|
||||
},
|
||||
.e_array => |arr| {
|
||||
var js_arr = try JSValue.createEmptyArray(global, arr.items.len);
|
||||
for (arr.slice(), 0..) |item, _i| {
|
||||
const i: u32 = @intCast(_i);
|
||||
const value = try exprToJS(item, global);
|
||||
try js_arr.putIndex(global, i, value);
|
||||
}
|
||||
return js_arr;
|
||||
},
|
||||
.e_object => |obj| {
|
||||
var js_obj = JSValue.createEmptyObject(global, obj.properties.len);
|
||||
for (obj.properties.slice()) |prop| {
|
||||
const key_expr = prop.key.?;
|
||||
const value = try exprToJS(prop.value.?, global);
|
||||
const key_js = try exprToJS(key_expr, global);
|
||||
const key_str = try key_js.toBunString(global);
|
||||
defer key_str.deref();
|
||||
try js_obj.putMayBeIndex(global, &key_str, value);
|
||||
}
|
||||
return js_obj;
|
||||
},
|
||||
else => return .js_undefined,
|
||||
}
|
||||
}
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const logger = bun.logger;
|
||||
const json5 = bun.interchange.json5;
|
||||
|
||||
const ast = bun.ast;
|
||||
const Expr = ast.Expr;
|
||||
|
||||
const jsc = bun.jsc;
|
||||
const JSValue = jsc.JSValue;
|
||||
const ZigString = jsc.ZigString;
|
||||
const wtf = jsc.wtf;
|
||||
1135
src/bun.js/api/MarkdownObject.zig
Normal file
1135
src/bun.js/api/MarkdownObject.zig
Normal file
File diff suppressed because it is too large
Load Diff
@@ -75,17 +75,17 @@ const Stringifier = struct {
|
||||
str: String,
|
||||
|
||||
pub fn init(global: *JSGlobalObject, space_value: JSValue) JSError!Space {
|
||||
if (space_value.isNumber()) {
|
||||
var num = space_value.toInt32();
|
||||
num = @max(0, @min(num, 10));
|
||||
if (num == 0) {
|
||||
return .minified;
|
||||
}
|
||||
return .{ .number = @intCast(num) };
|
||||
const space = try space_value.unwrapBoxedPrimitive(global);
|
||||
if (space.isNumber()) {
|
||||
// Clamp on the float to match the spec's min(10, ToIntegerOrInfinity(space)).
|
||||
// toInt32() wraps large values and Infinity to 0, which is wrong.
|
||||
const num_f = space.asNumber();
|
||||
if (!(num_f >= 1)) return .minified; // handles NaN, -Infinity, 0, negatives
|
||||
return .{ .number = if (num_f > 10) 10 else @intFromFloat(num_f) };
|
||||
}
|
||||
|
||||
if (space_value.isString()) {
|
||||
const str = try space_value.toBunString(global);
|
||||
if (space.isString()) {
|
||||
const str = try space.toBunString(global);
|
||||
if (str.length() == 0) {
|
||||
str.deref();
|
||||
return .minified;
|
||||
@@ -490,6 +490,9 @@ const Stringifier = struct {
|
||||
try this.stringify(global, iter.value);
|
||||
this.indent -= 1;
|
||||
}
|
||||
if (first) {
|
||||
this.builder.append(.latin1, "{}");
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1129,10 +1129,16 @@ pub const H2FrameParser = struct {
|
||||
return stream;
|
||||
}
|
||||
|
||||
/// Returns true if the stream can still receive data from the remote peer.
|
||||
/// Per RFC 7540 Section 5.1:
|
||||
/// - OPEN: both endpoints can send and receive
|
||||
/// - HALF_CLOSED_LOCAL: local sent END_STREAM, but can still receive from remote
|
||||
/// - HALF_CLOSED_REMOTE: remote sent END_STREAM, no more data to receive
|
||||
/// - CLOSED: stream is finished
|
||||
pub fn canReceiveData(this: *Stream) bool {
|
||||
return switch (this.state) {
|
||||
.IDLE, .RESERVED_LOCAL, .RESERVED_REMOTE, .OPEN, .HALF_CLOSED_LOCAL => false,
|
||||
.HALF_CLOSED_REMOTE, .CLOSED => true,
|
||||
.IDLE, .RESERVED_LOCAL, .RESERVED_REMOTE, .OPEN, .HALF_CLOSED_LOCAL => true,
|
||||
.HALF_CLOSED_REMOTE, .CLOSED => false,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -84,6 +84,7 @@ pub const ProcessExitHandler = struct {
|
||||
LifecycleScriptSubprocess,
|
||||
ShellSubprocess,
|
||||
ProcessHandle,
|
||||
MultiRunProcessHandle,
|
||||
SecurityScanSubprocess,
|
||||
SyncProcess,
|
||||
},
|
||||
@@ -111,6 +112,10 @@ pub const ProcessExitHandler = struct {
|
||||
const subprocess = this.ptr.as(ProcessHandle);
|
||||
subprocess.onProcessExit(process, status, rusage);
|
||||
},
|
||||
@field(TaggedPointer.Tag, @typeName(MultiRunProcessHandle)) => {
|
||||
const subprocess = this.ptr.as(MultiRunProcessHandle);
|
||||
subprocess.onProcessExit(process, status, rusage);
|
||||
},
|
||||
@field(TaggedPointer.Tag, @typeName(ShellSubprocess)) => {
|
||||
const subprocess = this.ptr.as(ShellSubprocess);
|
||||
subprocess.onProcessExit(process, status, rusage);
|
||||
@@ -2251,6 +2256,7 @@ pub const sync = struct {
|
||||
};
|
||||
|
||||
const std = @import("std");
|
||||
const MultiRunProcessHandle = @import("../../../cli/multi_run.zig").ProcessHandle;
|
||||
const ProcessHandle = @import("../../../cli/filter_run.zig").ProcessHandle;
|
||||
|
||||
const bun = @import("bun");
|
||||
|
||||
@@ -910,8 +910,8 @@ fn HandlerCallback(
|
||||
wrapper.deref();
|
||||
}
|
||||
|
||||
// Use a CatchScope to properly handle exceptions from the JavaScript callback
|
||||
var scope: bun.jsc.CatchScope = undefined;
|
||||
// Use a TopExceptionScope to properly handle exceptions from the JavaScript callback
|
||||
var scope: bun.jsc.TopExceptionScope = undefined;
|
||||
scope.init(this.global, @src());
|
||||
defer scope.deinit();
|
||||
|
||||
|
||||
@@ -1489,7 +1489,7 @@ pub fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool,
|
||||
const path = blob.store.?.data.s3.path();
|
||||
const env = globalThis.bunVM().transpiler.env;
|
||||
|
||||
S3.stat(credentials, path, @ptrCast(&onS3SizeResolved), this, if (env.getHttpProxy(true, null)) |proxy| proxy.href else null, blob.store.?.data.s3.request_payer) catch {}; // TODO: properly propagate exception upwards
|
||||
S3.stat(credentials, path, @ptrCast(&onS3SizeResolved), this, if (env.getHttpProxy(true, null, null)) |proxy| proxy.href else null, blob.store.?.data.s3.request_payer) catch {}; // TODO: properly propagate exception upwards
|
||||
return;
|
||||
}
|
||||
this.renderMetadata();
|
||||
@@ -1871,6 +1871,9 @@ pub fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool,
|
||||
switch (stream.ptr) {
|
||||
.Invalid => {
|
||||
this.response_body_readable_stream_ref.deinit();
|
||||
// Stream is invalid, render empty body
|
||||
this.doRenderBlob();
|
||||
return;
|
||||
},
|
||||
// toBlobIfPossible will typically convert .Blob streams, or .File streams into a Blob object, but cannot always.
|
||||
.Blob,
|
||||
|
||||
@@ -73,7 +73,7 @@ pub const AnyPromise = union(enum) {
|
||||
}
|
||||
};
|
||||
|
||||
var scope: jsc.CatchScope = undefined;
|
||||
var scope: jsc.TopExceptionScope = undefined;
|
||||
scope.init(globalObject, @src());
|
||||
defer scope.deinit();
|
||||
var ctx = Wrapper{ .args = args };
|
||||
|
||||
@@ -43,6 +43,11 @@ JSValue AsyncContextFrame::withAsyncContextIfNeeded(JSGlobalObject* globalObject
|
||||
return callback;
|
||||
}
|
||||
|
||||
// If already wrapped in an AsyncContextFrame, return as-is to avoid double-wrapping.
|
||||
if (jsDynamicCast<AsyncContextFrame*>(callback)) {
|
||||
return callback;
|
||||
}
|
||||
|
||||
// Construct a low-overhead wrapper
|
||||
auto& vm = JSC::getVM(globalObject);
|
||||
return AsyncContextFrame::create(
|
||||
|
||||
337
src/bun.js/bindings/BunAnalyzeTranspiledModule.cpp
Normal file
337
src/bun.js/bindings/BunAnalyzeTranspiledModule.cpp
Normal file
@@ -0,0 +1,337 @@
|
||||
#include "root.h"
|
||||
|
||||
#include "JavaScriptCore/JSInternalPromise.h"
|
||||
#include "JavaScriptCore/JSModuleRecord.h"
|
||||
#include "JavaScriptCore/GlobalObjectMethodTable.h"
|
||||
#include "JavaScriptCore/Nodes.h"
|
||||
#include "JavaScriptCore/Parser.h"
|
||||
#include "JavaScriptCore/ParserError.h"
|
||||
#include "JavaScriptCore/SyntheticModuleRecord.h"
|
||||
#include <wtf/text/MakeString.h>
|
||||
#include "JavaScriptCore/JSGlobalObject.h"
|
||||
#include "JavaScriptCore/ExceptionScope.h"
|
||||
#include "ZigSourceProvider.h"
|
||||
#include "BunAnalyzeTranspiledModule.h"
|
||||
|
||||
// ref: JSModuleLoader.cpp
|
||||
// ref: ModuleAnalyzer.cpp
|
||||
// ref: JSModuleRecord.cpp
|
||||
// ref: NodesAnalyzeModule.cpp, search ::analyzeModule
|
||||
|
||||
#include "JavaScriptCore/ModuleAnalyzer.h"
|
||||
#include "JavaScriptCore/ErrorType.h"
|
||||
|
||||
namespace JSC {
|
||||
|
||||
String dumpRecordInfo(JSModuleRecord* moduleRecord);
|
||||
|
||||
Identifier getFromIdentifierArray(VM& vm, Identifier* identifierArray, uint32_t n)
|
||||
{
|
||||
if (n == std::numeric_limits<uint32_t>::max()) {
|
||||
return vm.propertyNames->starDefaultPrivateName;
|
||||
}
|
||||
return identifierArray[n];
|
||||
}
|
||||
|
||||
extern "C" JSModuleRecord* zig__ModuleInfoDeserialized__toJSModuleRecord(JSGlobalObject* globalObject, VM& vm, const Identifier& module_key, const SourceCode& source_code, VariableEnvironment& declared_variables, VariableEnvironment& lexical_variables, bun_ModuleInfoDeserialized* module_info);
|
||||
extern "C" void zig__renderDiff(const char* expected_ptr, size_t expected_len, const char* received_ptr, size_t received_len, JSGlobalObject* globalObject);
|
||||
|
||||
extern "C" Identifier* JSC__IdentifierArray__create(size_t len)
|
||||
{
|
||||
return new Identifier[len];
|
||||
}
|
||||
extern "C" void JSC__IdentifierArray__destroy(Identifier* identifier)
|
||||
{
|
||||
delete[] identifier;
|
||||
}
|
||||
extern "C" void JSC__IdentifierArray__setFromUtf8(Identifier* identifierArray, size_t n, VM& vm, char* str, size_t len)
|
||||
{
|
||||
identifierArray[n] = Identifier::fromString(vm, AtomString::fromUTF8(std::span<const char>(str, len)));
|
||||
}
|
||||
|
||||
extern "C" void JSC__VariableEnvironment__add(VariableEnvironment& environment, VM& vm, Identifier* identifierArray, uint32_t index)
|
||||
{
|
||||
environment.add(getFromIdentifierArray(vm, identifierArray, index));
|
||||
}
|
||||
|
||||
extern "C" VariableEnvironment* JSC_JSModuleRecord__declaredVariables(JSModuleRecord* moduleRecord)
|
||||
{
|
||||
return const_cast<VariableEnvironment*>(&moduleRecord->declaredVariables());
|
||||
}
|
||||
extern "C" VariableEnvironment* JSC_JSModuleRecord__lexicalVariables(JSModuleRecord* moduleRecord)
|
||||
{
|
||||
return const_cast<VariableEnvironment*>(&moduleRecord->lexicalVariables());
|
||||
}
|
||||
|
||||
extern "C" JSModuleRecord* JSC_JSModuleRecord__create(JSGlobalObject* globalObject, VM& vm, const Identifier* moduleKey, const SourceCode& sourceCode, const VariableEnvironment& declaredVariables, const VariableEnvironment& lexicalVariables, bool hasImportMeta, bool isTypescript)
|
||||
{
|
||||
JSModuleRecord* result = JSModuleRecord::create(globalObject, vm, globalObject->moduleRecordStructure(), *moduleKey, sourceCode, declaredVariables, lexicalVariables, hasImportMeta ? ImportMetaFeature : 0);
|
||||
result->m_isTypeScript = isTypescript;
|
||||
return result;
|
||||
}
|
||||
|
||||
extern "C" void JSC_JSModuleRecord__addIndirectExport(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t exportName, uint32_t importName, uint32_t moduleName)
|
||||
{
|
||||
moduleRecord->addExportEntry(JSModuleRecord::ExportEntry::createIndirect(getFromIdentifierArray(moduleRecord->vm(), identifierArray, exportName), getFromIdentifierArray(moduleRecord->vm(), identifierArray, importName), getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName)));
|
||||
}
|
||||
extern "C" void JSC_JSModuleRecord__addLocalExport(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t exportName, uint32_t localName)
|
||||
{
|
||||
moduleRecord->addExportEntry(JSModuleRecord::ExportEntry::createLocal(getFromIdentifierArray(moduleRecord->vm(), identifierArray, exportName), getFromIdentifierArray(moduleRecord->vm(), identifierArray, localName)));
|
||||
}
|
||||
extern "C" void JSC_JSModuleRecord__addNamespaceExport(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t exportName, uint32_t moduleName)
|
||||
{
|
||||
moduleRecord->addExportEntry(JSModuleRecord::ExportEntry::createNamespace(getFromIdentifierArray(moduleRecord->vm(), identifierArray, exportName), getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName)));
|
||||
}
|
||||
extern "C" void JSC_JSModuleRecord__addStarExport(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t moduleName)
|
||||
{
|
||||
moduleRecord->addStarExportEntry(getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName));
|
||||
}
|
||||
extern "C" void JSC_JSModuleRecord__addRequestedModuleNullAttributesPtr(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t moduleName)
|
||||
{
|
||||
RefPtr<ScriptFetchParameters> attributes = RefPtr<ScriptFetchParameters> {};
|
||||
moduleRecord->appendRequestedModule(getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName), std::move(attributes));
|
||||
}
|
||||
extern "C" void JSC_JSModuleRecord__addRequestedModuleJavaScript(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t moduleName)
|
||||
{
|
||||
Ref<ScriptFetchParameters> attributes = ScriptFetchParameters::create(ScriptFetchParameters::Type::JavaScript);
|
||||
moduleRecord->appendRequestedModule(getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName), std::move(attributes));
|
||||
}
|
||||
extern "C" void JSC_JSModuleRecord__addRequestedModuleWebAssembly(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t moduleName)
|
||||
{
|
||||
Ref<ScriptFetchParameters> attributes = ScriptFetchParameters::create(ScriptFetchParameters::Type::WebAssembly);
|
||||
moduleRecord->appendRequestedModule(getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName), std::move(attributes));
|
||||
}
|
||||
extern "C" void JSC_JSModuleRecord__addRequestedModuleJSON(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t moduleName)
|
||||
{
|
||||
Ref<ScriptFetchParameters> attributes = ScriptFetchParameters::create(ScriptFetchParameters::Type::JSON);
|
||||
moduleRecord->appendRequestedModule(getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName), std::move(attributes));
|
||||
}
|
||||
extern "C" void JSC_JSModuleRecord__addRequestedModuleHostDefined(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t moduleName, uint32_t hostDefinedImportType)
|
||||
{
|
||||
Ref<ScriptFetchParameters> attributes = ScriptFetchParameters::create(getFromIdentifierArray(moduleRecord->vm(), identifierArray, hostDefinedImportType).string());
|
||||
moduleRecord->appendRequestedModule(getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName), std::move(attributes));
|
||||
}
|
||||
|
||||
extern "C" void JSC_JSModuleRecord__addImportEntrySingle(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t importName, uint32_t localName, uint32_t moduleName)
|
||||
{
|
||||
moduleRecord->addImportEntry(JSModuleRecord::ImportEntry {
|
||||
.type = JSModuleRecord::ImportEntryType::Single,
|
||||
.moduleRequest = getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName),
|
||||
.importName = getFromIdentifierArray(moduleRecord->vm(), identifierArray, importName),
|
||||
.localName = getFromIdentifierArray(moduleRecord->vm(), identifierArray, localName),
|
||||
});
|
||||
}
|
||||
extern "C" void JSC_JSModuleRecord__addImportEntrySingleTypeScript(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t importName, uint32_t localName, uint32_t moduleName)
|
||||
{
|
||||
moduleRecord->addImportEntry(JSModuleRecord::ImportEntry {
|
||||
.type = JSModuleRecord::ImportEntryType::SingleTypeScript,
|
||||
.moduleRequest = getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName),
|
||||
.importName = getFromIdentifierArray(moduleRecord->vm(), identifierArray, importName),
|
||||
.localName = getFromIdentifierArray(moduleRecord->vm(), identifierArray, localName),
|
||||
});
|
||||
}
|
||||
extern "C" void JSC_JSModuleRecord__addImportEntryNamespace(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t importName, uint32_t localName, uint32_t moduleName)
|
||||
{
|
||||
moduleRecord->addImportEntry(JSModuleRecord::ImportEntry {
|
||||
.type = JSModuleRecord::ImportEntryType::Namespace,
|
||||
.moduleRequest = getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName),
|
||||
.importName = getFromIdentifierArray(moduleRecord->vm(), identifierArray, importName),
|
||||
.localName = getFromIdentifierArray(moduleRecord->vm(), identifierArray, localName),
|
||||
});
|
||||
}
|
||||
|
||||
static EncodedJSValue fallbackParse(JSGlobalObject* globalObject, const Identifier& moduleKey, const SourceCode& sourceCode, JSInternalPromise* promise, JSModuleRecord* resultValue = nullptr);
|
||||
extern "C" EncodedJSValue Bun__analyzeTranspiledModule(JSGlobalObject* globalObject, const Identifier& moduleKey, const SourceCode& sourceCode, JSInternalPromise* promise)
|
||||
{
|
||||
VM& vm = globalObject->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
|
||||
auto rejectWithError = [&](JSValue error) {
|
||||
promise->reject(vm, globalObject, error);
|
||||
return promise;
|
||||
};
|
||||
|
||||
VariableEnvironment declaredVariables = VariableEnvironment();
|
||||
VariableEnvironment lexicalVariables = VariableEnvironment();
|
||||
|
||||
auto provider = static_cast<Zig::SourceProvider*>(sourceCode.provider());
|
||||
|
||||
if (provider->m_resolvedSource.module_info == nullptr) {
|
||||
dataLog("[note] module_info is null for module: ", moduleKey.utf8(), "\n");
|
||||
RELEASE_AND_RETURN(scope, JSValue::encode(rejectWithError(createError(globalObject, WTF::String::fromLatin1("module_info is null")))));
|
||||
}
|
||||
|
||||
auto moduleRecord = zig__ModuleInfoDeserialized__toJSModuleRecord(globalObject, vm, moduleKey, sourceCode, declaredVariables, lexicalVariables, static_cast<bun_ModuleInfoDeserialized*>(provider->m_resolvedSource.module_info));
|
||||
// zig__ModuleInfoDeserialized__toJSModuleRecord consumes and frees the module_info.
|
||||
// Null it out to prevent use-after-free via the dangling pointer.
|
||||
provider->m_resolvedSource.module_info = nullptr;
|
||||
if (moduleRecord == nullptr) {
|
||||
RELEASE_AND_RETURN(scope, JSValue::encode(rejectWithError(createError(globalObject, WTF::String::fromLatin1("parseFromSourceCode failed")))));
|
||||
}
|
||||
|
||||
#if BUN_DEBUG
|
||||
RELEASE_AND_RETURN(scope, fallbackParse(globalObject, moduleKey, sourceCode, promise, moduleRecord));
|
||||
#else
|
||||
promise->resolve(globalObject, moduleRecord);
|
||||
RELEASE_AND_RETURN(scope, JSValue::encode(promise));
|
||||
#endif
|
||||
}
|
||||
static EncodedJSValue fallbackParse(JSGlobalObject* globalObject, const Identifier& moduleKey, const SourceCode& sourceCode, JSInternalPromise* promise, JSModuleRecord* resultValue)
|
||||
{
|
||||
VM& vm = globalObject->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
auto rejectWithError = [&](JSValue error) {
|
||||
promise->reject(vm, globalObject, error);
|
||||
return promise;
|
||||
};
|
||||
|
||||
ParserError error;
|
||||
std::unique_ptr<ModuleProgramNode> moduleProgramNode = parseRootNode<ModuleProgramNode>(
|
||||
vm, sourceCode, ImplementationVisibility::Public, JSParserBuiltinMode::NotBuiltin,
|
||||
StrictModeLexicallyScopedFeature, JSParserScriptMode::Module, SourceParseMode::ModuleAnalyzeMode, error);
|
||||
if (error.isValid())
|
||||
RELEASE_AND_RETURN(scope, JSValue::encode(rejectWithError(error.toErrorObject(globalObject, sourceCode))));
|
||||
ASSERT(moduleProgramNode);
|
||||
|
||||
ModuleAnalyzer moduleAnalyzer(globalObject, moduleKey, sourceCode, moduleProgramNode->varDeclarations(), moduleProgramNode->lexicalVariables(), moduleProgramNode->features());
|
||||
RETURN_IF_EXCEPTION(scope, JSValue::encode(promise->rejectWithCaughtException(globalObject, scope)));
|
||||
|
||||
auto result = moduleAnalyzer.analyze(*moduleProgramNode);
|
||||
if (!result) {
|
||||
auto [errorType, message] = std::move(result.error());
|
||||
RELEASE_AND_RETURN(scope, JSValue::encode(rejectWithError(createError(globalObject, errorType, message))));
|
||||
}
|
||||
|
||||
JSModuleRecord* moduleRecord = result.value();
|
||||
|
||||
if (resultValue != nullptr) {
|
||||
auto actual = dumpRecordInfo(resultValue);
|
||||
auto expected = dumpRecordInfo(moduleRecord);
|
||||
if (actual != expected) {
|
||||
dataLog("\n\n\n\n\n\n\x1b[95mBEGIN analyzeTranspiledModule\x1b(B\x1b[m\n --- module key ---\n", moduleKey.utf8().data(), "\n --- code ---\n\n", sourceCode.toUTF8().data(), "\n");
|
||||
dataLog(" ------", "\n");
|
||||
dataLog(" BunAnalyzeTranspiledModule:", "\n");
|
||||
|
||||
zig__renderDiff(expected.utf8().data(), expected.utf8().length(), actual.utf8().data(), actual.utf8().length(), globalObject);
|
||||
|
||||
RELEASE_AND_RETURN(scope, JSValue::encode(rejectWithError(createError(globalObject, WTF::String::fromLatin1("Imports different between parseFromSourceCode and fallbackParse")))));
|
||||
}
|
||||
}
|
||||
|
||||
scope.release();
|
||||
promise->resolve(globalObject, resultValue == nullptr ? moduleRecord : resultValue);
|
||||
return JSValue::encode(promise);
|
||||
}
|
||||
|
||||
String dumpRecordInfo(JSModuleRecord* moduleRecord)
|
||||
{
|
||||
WTF::StringPrintStream stream;
|
||||
|
||||
{
|
||||
Vector<String> sortedVars;
|
||||
for (const auto& pair : moduleRecord->declaredVariables())
|
||||
sortedVars.append(String(pair.key.get()));
|
||||
std::sort(sortedVars.begin(), sortedVars.end(), [](const String& a, const String& b) {
|
||||
return codePointCompare(a, b) < 0;
|
||||
});
|
||||
stream.print(" varDeclarations:\n");
|
||||
for (const auto& name : sortedVars)
|
||||
stream.print(" - ", name, "\n");
|
||||
}
|
||||
|
||||
{
|
||||
Vector<String> sortedVars;
|
||||
for (const auto& pair : moduleRecord->lexicalVariables())
|
||||
sortedVars.append(String(pair.key.get()));
|
||||
std::sort(sortedVars.begin(), sortedVars.end(), [](const String& a, const String& b) {
|
||||
return codePointCompare(a, b) < 0;
|
||||
});
|
||||
stream.print(" lexicalVariables:\n");
|
||||
for (const auto& name : sortedVars)
|
||||
stream.print(" - ", name, "\n");
|
||||
}
|
||||
|
||||
stream.print(" features: (not accessible)\n");
|
||||
|
||||
stream.print("\nAnalyzing ModuleRecord key(", moduleRecord->moduleKey().impl(), ")\n");
|
||||
|
||||
stream.print(" Dependencies: ", moduleRecord->requestedModules().size(), " modules\n");
|
||||
{
|
||||
Vector<String> sortedDeps;
|
||||
for (const auto& request : moduleRecord->requestedModules()) {
|
||||
WTF::StringPrintStream line;
|
||||
if (request.m_attributes == nullptr)
|
||||
line.print(" module(", request.m_specifier, ")\n");
|
||||
else
|
||||
line.print(" module(", request.m_specifier, "),attributes(", (uint8_t)request.m_attributes->type(), ", ", request.m_attributes->hostDefinedImportType(), ")\n");
|
||||
sortedDeps.append(line.toString());
|
||||
}
|
||||
std::sort(sortedDeps.begin(), sortedDeps.end(), [](const String& a, const String& b) {
|
||||
return codePointCompare(a, b) < 0;
|
||||
});
|
||||
for (const auto& dep : sortedDeps)
|
||||
stream.print(dep);
|
||||
}
|
||||
|
||||
stream.print(" Import: ", moduleRecord->importEntries().size(), " entries\n");
|
||||
{
|
||||
Vector<String> sortedImports;
|
||||
for (const auto& pair : moduleRecord->importEntries()) {
|
||||
WTF::StringPrintStream line;
|
||||
auto& importEntry = pair.value;
|
||||
line.print(" import(", importEntry.importName, "), local(", importEntry.localName, "), module(", importEntry.moduleRequest, ")\n");
|
||||
sortedImports.append(line.toString());
|
||||
}
|
||||
std::sort(sortedImports.begin(), sortedImports.end(), [](const String& a, const String& b) {
|
||||
return codePointCompare(a, b) < 0;
|
||||
});
|
||||
for (const auto& imp : sortedImports)
|
||||
stream.print(imp);
|
||||
}
|
||||
|
||||
stream.print(" Export: ", moduleRecord->exportEntries().size(), " entries\n");
|
||||
Vector<String> sortedEntries;
|
||||
for (const auto& pair : moduleRecord->exportEntries()) {
|
||||
WTF::StringPrintStream line;
|
||||
auto& exportEntry = pair.value;
|
||||
switch (exportEntry.type) {
|
||||
case AbstractModuleRecord::ExportEntry::Type::Local:
|
||||
line.print(" [Local] ", "export(", exportEntry.exportName, "), local(", exportEntry.localName, ")\n");
|
||||
break;
|
||||
|
||||
case AbstractModuleRecord::ExportEntry::Type::Indirect:
|
||||
line.print(" [Indirect] ", "export(", exportEntry.exportName, "), import(", exportEntry.importName, "), module(", exportEntry.moduleName, ")\n");
|
||||
break;
|
||||
|
||||
case AbstractModuleRecord::ExportEntry::Type::Namespace:
|
||||
line.print(" [Namespace] ", "export(", exportEntry.exportName, "), module(", exportEntry.moduleName, ")\n");
|
||||
break;
|
||||
}
|
||||
sortedEntries.append(line.toString());
|
||||
}
|
||||
std::sort(sortedEntries.begin(), sortedEntries.end(), [](const String& a, const String& b) {
|
||||
return codePointCompare(a, b) < 0;
|
||||
});
|
||||
for (const auto& entry : sortedEntries)
|
||||
stream.print(entry);
|
||||
|
||||
{
|
||||
Vector<String> sortedStarExports;
|
||||
for (const auto& moduleName : moduleRecord->starExportEntries()) {
|
||||
WTF::StringPrintStream line;
|
||||
line.print(" [Star] module(", moduleName.get(), ")\n");
|
||||
sortedStarExports.append(line.toString());
|
||||
}
|
||||
std::sort(sortedStarExports.begin(), sortedStarExports.end(), [](const String& a, const String& b) {
|
||||
return codePointCompare(a, b) < 0;
|
||||
});
|
||||
for (const auto& entry : sortedStarExports)
|
||||
stream.print(entry);
|
||||
}
|
||||
|
||||
stream.print(" -> done\n");
|
||||
|
||||
return stream.toString();
|
||||
}
|
||||
|
||||
}
|
||||
1
src/bun.js/bindings/BunAnalyzeTranspiledModule.h
Normal file
1
src/bun.js/bindings/BunAnalyzeTranspiledModule.h
Normal file
@@ -0,0 +1 @@
|
||||
struct bun_ModuleInfoDeserialized;
|
||||
@@ -19,6 +19,12 @@
|
||||
|
||||
extern "C" void Bun__startCPUProfiler(JSC::VM* vm);
|
||||
extern "C" void Bun__stopCPUProfiler(JSC::VM* vm, BunString* outJSON, BunString* outText);
|
||||
extern "C" void Bun__setSamplingInterval(int intervalMicroseconds);
|
||||
|
||||
void Bun__setSamplingInterval(int intervalMicroseconds)
|
||||
{
|
||||
Bun::setSamplingInterval(intervalMicroseconds);
|
||||
}
|
||||
|
||||
namespace Bun {
|
||||
|
||||
|
||||
@@ -3,11 +3,17 @@ pub const CPUProfilerConfig = struct {
|
||||
dir: []const u8,
|
||||
md_format: bool = false,
|
||||
json_format: bool = false,
|
||||
interval: u32 = 1000,
|
||||
};
|
||||
|
||||
// C++ function declarations
|
||||
extern fn Bun__startCPUProfiler(vm: *jsc.VM) void;
|
||||
extern fn Bun__stopCPUProfiler(vm: *jsc.VM, outJSON: ?*bun.String, outText: ?*bun.String) void;
|
||||
extern fn Bun__setSamplingInterval(intervalMicroseconds: c_int) void;
|
||||
|
||||
pub fn setSamplingInterval(interval: u32) void {
|
||||
Bun__setSamplingInterval(@intCast(interval));
|
||||
}
|
||||
|
||||
pub fn startCPUProfiler(vm: *jsc.VM) void {
|
||||
Bun__startCPUProfiler(vm);
|
||||
@@ -61,7 +67,7 @@ fn writeProfileToFile(profile_string: bun.String, config: CPUProfilerConfig, is_
|
||||
const errno = err.getErrno();
|
||||
if (errno == .NOENT or errno == .PERM or errno == .ACCES) {
|
||||
if (config.dir.len > 0) {
|
||||
bun.makePath(bun.FD.cwd().stdDir(), config.dir) catch {};
|
||||
bun.FD.cwd().makePath(u8, config.dir) catch {};
|
||||
// Retry write
|
||||
const retry_result = bun.sys.File.writeFile(bun.FD.cwd(), output_path_os, profile_slice.slice());
|
||||
if (retry_result.asErr()) |_| {
|
||||
@@ -95,7 +101,7 @@ fn buildOutputPath(path: *bun.AutoAbsPath, config: CPUProfilerConfig, is_md_form
|
||||
|
||||
// Append directory if specified
|
||||
if (config.dir.len > 0) {
|
||||
path.append(config.dir);
|
||||
path.join(&.{config.dir});
|
||||
}
|
||||
|
||||
// Append filename
|
||||
|
||||
@@ -578,7 +578,7 @@ extern "C" void Bun__startJSDebuggerThread(Zig::GlobalObject* debuggerGlobalObje
|
||||
debuggerScriptExecutionContext = debuggerGlobalObject->scriptExecutionContext();
|
||||
|
||||
JSC::VM& vm = debuggerGlobalObject->vm();
|
||||
auto scope = DECLARE_CATCH_SCOPE(vm);
|
||||
auto scope = DECLARE_TOP_EXCEPTION_SCOPE(vm);
|
||||
JSValue defaultValue = debuggerGlobalObject->internalModuleRegistry()->requireId(debuggerGlobalObject, vm, InternalModuleRegistry::Field::InternalDebugger);
|
||||
scope.assertNoException();
|
||||
JSFunction* debuggerDefaultFn = jsCast<JSFunction*>(defaultValue.asCell());
|
||||
|
||||
961
src/bun.js/bindings/BunHeapProfiler.cpp
Normal file
961
src/bun.js/bindings/BunHeapProfiler.cpp
Normal file
@@ -0,0 +1,961 @@
|
||||
#include "root.h"
|
||||
#include "BunHeapProfiler.h"
|
||||
#include "headers-handwritten.h"
|
||||
#include <JavaScriptCore/HeapProfiler.h>
|
||||
#include <JavaScriptCore/HeapSnapshotBuilder.h>
|
||||
#include <JavaScriptCore/BunV8HeapSnapshotBuilder.h>
|
||||
#include <JavaScriptCore/VM.h>
|
||||
#include <JavaScriptCore/JSGlobalObject.h>
|
||||
#include <JavaScriptCore/JSONObject.h>
|
||||
#include <wtf/text/StringBuilder.h>
|
||||
#include <wtf/HashMap.h>
|
||||
#include <wtf/HashSet.h>
|
||||
#include <wtf/Vector.h>
|
||||
#include <wtf/JSONValues.h>
|
||||
#include <algorithm>
|
||||
|
||||
namespace Bun {
|
||||
|
||||
// Type aliases for hash containers that allow 0 as a valid key
|
||||
// (heap node IDs can be 0 for the root node)
|
||||
template<typename V>
|
||||
using NodeIdHashMap = WTF::HashMap<uint64_t, V, WTF::DefaultHash<uint64_t>, WTF::UnsignedWithZeroKeyHashTraits<uint64_t>>;
|
||||
using NodeIdHashSet = WTF::HashSet<uint64_t, WTF::DefaultHash<uint64_t>, WTF::UnsignedWithZeroKeyHashTraits<uint64_t>>;
|
||||
|
||||
BunString toStringRef(const WTF::String& wtfString);
|
||||
|
||||
// Node data parsed from snapshot
|
||||
struct NodeData {
|
||||
uint64_t id;
|
||||
size_t size;
|
||||
int classNameIndex;
|
||||
int flags;
|
||||
int labelIndex { -1 };
|
||||
size_t retainedSize { 0 };
|
||||
bool isGCRoot { false };
|
||||
bool isInternal { false };
|
||||
};
|
||||
|
||||
// Edge data parsed from snapshot
|
||||
struct EdgeData {
|
||||
uint64_t fromId;
|
||||
uint64_t toId;
|
||||
int typeIndex;
|
||||
int dataIndex;
|
||||
};
|
||||
|
||||
// Type statistics for summary
|
||||
struct TypeStats {
|
||||
WTF::String name;
|
||||
size_t totalSize { 0 };
|
||||
size_t totalRetainedSize { 0 };
|
||||
size_t count { 0 };
|
||||
size_t largestRetained { 0 };
|
||||
uint64_t largestInstanceId { 0 };
|
||||
};
|
||||
|
||||
// Escape string for safe output (replace newlines, tabs, etc.)
|
||||
static WTF::String escapeString(const WTF::String& str)
|
||||
{
|
||||
if (str.isEmpty())
|
||||
return str;
|
||||
|
||||
WTF::StringBuilder sb;
|
||||
for (unsigned i = 0; i < str.length(); i++) {
|
||||
UChar c = str[i];
|
||||
if (c == '\n')
|
||||
sb.append("\\n"_s);
|
||||
else if (c == '\r')
|
||||
sb.append("\\r"_s);
|
||||
else if (c == '\t')
|
||||
sb.append("\\t"_s);
|
||||
else if (c == '\\')
|
||||
sb.append("\\\\"_s);
|
||||
else if (c == '"')
|
||||
sb.append("\\\""_s);
|
||||
else if (c == '|')
|
||||
sb.append("\\|"_s);
|
||||
else if (c == '`')
|
||||
sb.append("\\`"_s); // escape backticks to avoid breaking markdown code spans
|
||||
else if (c < 32 || c == 127)
|
||||
continue; // skip control characters
|
||||
else
|
||||
sb.append(c);
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
// Format bytes nicely for human-readable sections
|
||||
static WTF::String formatBytes(size_t bytes)
|
||||
{
|
||||
WTF::StringBuilder sb;
|
||||
if (bytes < 1024) {
|
||||
sb.append(bytes);
|
||||
sb.append(" B"_s);
|
||||
} else if (bytes < 1024 * 1024) {
|
||||
sb.append(bytes / 1024);
|
||||
sb.append("."_s);
|
||||
sb.append((bytes % 1024) * 10 / 1024);
|
||||
sb.append(" KB"_s);
|
||||
} else if (bytes < 1024ULL * 1024 * 1024) {
|
||||
sb.append(bytes / (1024 * 1024));
|
||||
sb.append("."_s);
|
||||
sb.append((bytes % (1024 * 1024)) * 10 / (1024 * 1024));
|
||||
sb.append(" MB"_s);
|
||||
} else {
|
||||
sb.append(bytes / (1024ULL * 1024 * 1024));
|
||||
sb.append("."_s);
|
||||
sb.append((bytes % (1024ULL * 1024 * 1024)) * 10 / (1024ULL * 1024 * 1024));
|
||||
sb.append(" GB"_s);
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
WTF::String generateHeapProfile(JSC::VM& vm)
|
||||
{
|
||||
vm.ensureHeapProfiler();
|
||||
auto& heapProfiler = *vm.heapProfiler();
|
||||
heapProfiler.clearSnapshots();
|
||||
|
||||
// Build the heap snapshot using JSC's GCDebugging format for more detail
|
||||
JSC::HeapSnapshotBuilder builder(heapProfiler, JSC::HeapSnapshotBuilder::SnapshotType::GCDebuggingSnapshot);
|
||||
builder.buildSnapshot();
|
||||
|
||||
WTF::String jsonString = builder.json();
|
||||
if (jsonString.isEmpty())
|
||||
return "ERROR: Failed to generate heap snapshot"_s;
|
||||
|
||||
auto jsonValue = JSON::Value::parseJSON(jsonString);
|
||||
if (!jsonValue)
|
||||
return "ERROR: Failed to parse heap snapshot JSON"_s;
|
||||
|
||||
auto jsonObject = jsonValue->asObject();
|
||||
if (!jsonObject)
|
||||
return "ERROR: Heap snapshot JSON is not an object"_s;
|
||||
|
||||
// Determine format
|
||||
WTF::String snapshotType = jsonObject->getString("type"_s);
|
||||
bool isGCDebugging = snapshotType == "GCDebugging"_s;
|
||||
int nodeStride = isGCDebugging ? 7 : 4;
|
||||
|
||||
// Parse string tables
|
||||
WTF::Vector<WTF::String> classNames;
|
||||
WTF::Vector<WTF::String> edgeTypes;
|
||||
WTF::Vector<WTF::String> edgeNames;
|
||||
WTF::Vector<WTF::String> labels;
|
||||
|
||||
auto parseStringArray = [](RefPtr<JSON::Array> arr, WTF::Vector<WTF::String>& out) {
|
||||
if (!arr)
|
||||
return;
|
||||
// Note: JSON::Array::get() returns Ref<Value> which is always valid
|
||||
for (size_t i = 0; i < arr->length(); i++) {
|
||||
out.append(arr->get(i)->asString());
|
||||
}
|
||||
};
|
||||
|
||||
parseStringArray(jsonObject->getArray("nodeClassNames"_s), classNames);
|
||||
parseStringArray(jsonObject->getArray("edgeTypes"_s), edgeTypes);
|
||||
parseStringArray(jsonObject->getArray("edgeNames"_s), edgeNames);
|
||||
parseStringArray(jsonObject->getArray("labels"_s), labels);
|
||||
|
||||
// Parse nodes
|
||||
WTF::Vector<NodeData> nodes;
|
||||
NodeIdHashMap<size_t> idToIndex;
|
||||
size_t totalHeapSize = 0;
|
||||
|
||||
auto nodesArray = jsonObject->getArray("nodes"_s);
|
||||
if (nodesArray) {
|
||||
size_t nodeCount = nodesArray->length() / nodeStride;
|
||||
nodes.reserveCapacity(nodeCount);
|
||||
|
||||
for (size_t i = 0; i < nodeCount; i++) {
|
||||
NodeData node;
|
||||
size_t offset = i * nodeStride;
|
||||
|
||||
// Use asDouble() to get full integer range for id and size (which can exceed int range)
|
||||
// Note: JSON::Array::get() returns Ref<Value> which is always valid
|
||||
double dblVal = 0;
|
||||
nodesArray->get(offset + 0)->asDouble(dblVal);
|
||||
node.id = static_cast<uint64_t>(dblVal);
|
||||
|
||||
dblVal = 0;
|
||||
nodesArray->get(offset + 1)->asDouble(dblVal);
|
||||
node.size = static_cast<size_t>(dblVal);
|
||||
|
||||
int intVal = 0;
|
||||
nodesArray->get(offset + 2)->asInteger(intVal);
|
||||
node.classNameIndex = intVal;
|
||||
|
||||
intVal = 0;
|
||||
nodesArray->get(offset + 3)->asInteger(intVal);
|
||||
node.flags = intVal;
|
||||
node.isInternal = (node.flags & 1) != 0;
|
||||
|
||||
if (isGCDebugging && nodeStride >= 7) {
|
||||
intVal = 0;
|
||||
nodesArray->get(offset + 4)->asInteger(intVal);
|
||||
node.labelIndex = intVal;
|
||||
}
|
||||
|
||||
totalHeapSize += node.size;
|
||||
idToIndex.set(node.id, nodes.size());
|
||||
nodes.append(node);
|
||||
}
|
||||
}
|
||||
|
||||
// Parse edges
|
||||
WTF::Vector<EdgeData> edges;
|
||||
auto edgesArray = jsonObject->getArray("edges"_s);
|
||||
if (edgesArray) {
|
||||
size_t edgeCount = edgesArray->length() / 4;
|
||||
edges.reserveCapacity(edgeCount);
|
||||
|
||||
for (size_t i = 0; i < edgeCount; i++) {
|
||||
EdgeData edge;
|
||||
size_t offset = i * 4;
|
||||
|
||||
// Use asDouble() to get full integer range for IDs
|
||||
// Note: JSON::Array::get() returns Ref<Value> which is always valid
|
||||
double dblVal = 0;
|
||||
edgesArray->get(offset + 0)->asDouble(dblVal);
|
||||
edge.fromId = static_cast<uint64_t>(dblVal);
|
||||
|
||||
dblVal = 0;
|
||||
edgesArray->get(offset + 1)->asDouble(dblVal);
|
||||
edge.toId = static_cast<uint64_t>(dblVal);
|
||||
|
||||
int intVal = 0;
|
||||
edgesArray->get(offset + 2)->asInteger(intVal);
|
||||
edge.typeIndex = intVal;
|
||||
|
||||
intVal = 0;
|
||||
edgesArray->get(offset + 3)->asInteger(intVal);
|
||||
edge.dataIndex = intVal;
|
||||
|
||||
edges.append(edge);
|
||||
}
|
||||
}
|
||||
|
||||
// Parse roots
|
||||
// Note: JSON::Array::get() returns Ref<Value> which is always valid
|
||||
NodeIdHashSet gcRootIds;
|
||||
auto rootsArray = jsonObject->getArray("roots"_s);
|
||||
if (rootsArray) {
|
||||
for (size_t i = 0; i < rootsArray->length(); i += 3) {
|
||||
double dblVal = 0;
|
||||
rootsArray->get(i)->asDouble(dblVal);
|
||||
uint64_t nodeId = static_cast<uint64_t>(dblVal);
|
||||
gcRootIds.add(nodeId);
|
||||
auto it = idToIndex.find(nodeId);
|
||||
if (it != idToIndex.end()) {
|
||||
nodes[it->value].isGCRoot = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Build edge maps for efficient traversal
|
||||
NodeIdHashMap<WTF::Vector<size_t>> outgoingEdges;
|
||||
NodeIdHashMap<WTF::Vector<size_t>> incomingEdges;
|
||||
for (size_t i = 0; i < edges.size(); i++) {
|
||||
outgoingEdges.ensure(edges[i].fromId, [] { return WTF::Vector<size_t>(); }).iterator->value.append(i);
|
||||
incomingEdges.ensure(edges[i].toId, [] { return WTF::Vector<size_t>(); }).iterator->value.append(i);
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// DOMINATOR TREE CALCULATION
|
||||
// Based on: K. Cooper, T. Harvey and K. Kennedy
|
||||
// "A Simple, Fast Dominance Algorithm"
|
||||
// ============================================================
|
||||
|
||||
size_t nodeCount = nodes.size();
|
||||
if (nodeCount == 0) {
|
||||
return "# Bun Heap Profile\n\nError: No heap profile nodes found. The heap snapshot may be empty or malformed.\n"_s;
|
||||
}
|
||||
|
||||
// Build nodeOrdinal (index) to nodeId mapping
|
||||
WTF::Vector<uint64_t> ordinalToId(nodeCount);
|
||||
for (size_t i = 0; i < nodeCount; i++) {
|
||||
ordinalToId[i] = nodes[i].id;
|
||||
}
|
||||
|
||||
// Step 1: Build post-order indexes via DFS from root (node 0)
|
||||
WTF::Vector<uint32_t> nodeOrdinalToPostOrderIndex(nodeCount);
|
||||
WTF::Vector<uint32_t> postOrderIndexToNodeOrdinal(nodeCount);
|
||||
|
||||
// DFS using explicit stack
|
||||
WTF::Vector<uint32_t> stackNodes(nodeCount);
|
||||
WTF::Vector<size_t> stackEdgeIdx(nodeCount);
|
||||
WTF::Vector<uint8_t> visited(nodeCount, 0);
|
||||
|
||||
uint32_t postOrderIndex = 0;
|
||||
int stackTop = 0;
|
||||
|
||||
// Start from root node (ordinal 0)
|
||||
stackNodes[0] = 0;
|
||||
stackEdgeIdx[0] = 0;
|
||||
visited[0] = 1;
|
||||
|
||||
while (stackTop >= 0) {
|
||||
uint32_t nodeOrdinal = stackNodes[stackTop];
|
||||
uint64_t nodeId = ordinalToId[nodeOrdinal];
|
||||
|
||||
auto outIt = outgoingEdges.find(nodeId);
|
||||
size_t& edgeIdx = stackEdgeIdx[stackTop];
|
||||
|
||||
bool foundChild = false;
|
||||
if (outIt != outgoingEdges.end()) {
|
||||
while (edgeIdx < outIt->value.size()) {
|
||||
size_t currentEdgeIdx = outIt->value[edgeIdx];
|
||||
edgeIdx++;
|
||||
|
||||
uint64_t toId = edges[currentEdgeIdx].toId;
|
||||
auto toIt = idToIndex.find(toId);
|
||||
if (toIt == idToIndex.end())
|
||||
continue;
|
||||
|
||||
uint32_t toOrdinal = toIt->value;
|
||||
if (visited[toOrdinal])
|
||||
continue;
|
||||
|
||||
// Push child onto stack
|
||||
visited[toOrdinal] = 1;
|
||||
stackTop++;
|
||||
stackNodes[stackTop] = toOrdinal;
|
||||
stackEdgeIdx[stackTop] = 0;
|
||||
foundChild = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!foundChild) {
|
||||
// No more children, assign post-order index
|
||||
nodeOrdinalToPostOrderIndex[nodeOrdinal] = postOrderIndex;
|
||||
postOrderIndexToNodeOrdinal[postOrderIndex] = nodeOrdinal;
|
||||
postOrderIndex++;
|
||||
stackTop--;
|
||||
}
|
||||
}
|
||||
|
||||
// Handle unvisited nodes (can happen with unreachable nodes)
|
||||
if (postOrderIndex != nodeCount) {
|
||||
// Root was last visited, revert
|
||||
if (postOrderIndex > 0 && postOrderIndexToNodeOrdinal[postOrderIndex - 1] == 0) {
|
||||
postOrderIndex--;
|
||||
}
|
||||
|
||||
// Visit unvisited nodes
|
||||
for (uint32_t nodeOrdinal = 1; nodeOrdinal < nodeCount; ++nodeOrdinal) {
|
||||
if (!visited[nodeOrdinal]) {
|
||||
nodeOrdinalToPostOrderIndex[nodeOrdinal] = postOrderIndex;
|
||||
postOrderIndexToNodeOrdinal[postOrderIndex] = nodeOrdinal;
|
||||
postOrderIndex++;
|
||||
}
|
||||
}
|
||||
|
||||
// Make sure root is last
|
||||
if (!visited[0] || nodeOrdinalToPostOrderIndex[0] != nodeCount - 1) {
|
||||
nodeOrdinalToPostOrderIndex[0] = postOrderIndex;
|
||||
postOrderIndexToNodeOrdinal[postOrderIndex] = 0;
|
||||
postOrderIndex++;
|
||||
}
|
||||
}
|
||||
|
||||
// Step 2: Build dominator tree using Cooper-Harvey-Kennedy algorithm
|
||||
uint32_t rootPostOrderIndex = nodeCount - 1;
|
||||
uint32_t noEntry = nodeCount;
|
||||
|
||||
WTF::Vector<uint8_t> affected(nodeCount, 0);
|
||||
WTF::Vector<uint32_t> dominators(nodeCount, noEntry);
|
||||
WTF::Vector<uint32_t> nodeOrdinalToDominator(nodeCount, 0);
|
||||
|
||||
// Root dominates itself
|
||||
dominators[rootPostOrderIndex] = rootPostOrderIndex;
|
||||
|
||||
// Mark root's children as affected and as GC roots
|
||||
uint64_t rootId = ordinalToId[0];
|
||||
auto rootOutEdges = outgoingEdges.find(rootId);
|
||||
if (rootOutEdges != outgoingEdges.end()) {
|
||||
for (size_t edgeIdx : rootOutEdges->value) {
|
||||
uint64_t toId = edges[edgeIdx].toId;
|
||||
auto toIt = idToIndex.find(toId);
|
||||
if (toIt != idToIndex.end()) {
|
||||
uint32_t toOrdinal = toIt->value;
|
||||
uint32_t toPostOrder = nodeOrdinalToPostOrderIndex[toOrdinal];
|
||||
affected[toPostOrder] = 1;
|
||||
nodes[toOrdinal].isGCRoot = true;
|
||||
// Also add to gcRootIds to keep it in sync with isGCRoot flag
|
||||
gcRootIds.add(toId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Iteratively compute dominators
|
||||
bool changed = true;
|
||||
while (changed) {
|
||||
changed = false;
|
||||
|
||||
for (int32_t postOrder = static_cast<int32_t>(rootPostOrderIndex) - 1; postOrder >= 0; --postOrder) {
|
||||
if (!affected[postOrder])
|
||||
continue;
|
||||
affected[postOrder] = 0;
|
||||
|
||||
// Already dominated by root
|
||||
if (dominators[postOrder] == rootPostOrderIndex)
|
||||
continue;
|
||||
|
||||
uint32_t newDominator = noEntry;
|
||||
uint32_t nodeOrdinal = postOrderIndexToNodeOrdinal[postOrder];
|
||||
uint64_t nodeId = ordinalToId[nodeOrdinal];
|
||||
|
||||
// Check all incoming edges
|
||||
auto inIt = incomingEdges.find(nodeId);
|
||||
if (inIt != incomingEdges.end()) {
|
||||
for (size_t edgeIdx : inIt->value) {
|
||||
uint64_t fromId = edges[edgeIdx].fromId;
|
||||
auto fromIt = idToIndex.find(fromId);
|
||||
if (fromIt == idToIndex.end())
|
||||
continue;
|
||||
|
||||
uint32_t fromOrdinal = fromIt->value;
|
||||
uint32_t fromPostOrder = nodeOrdinalToPostOrderIndex[fromOrdinal];
|
||||
|
||||
if (dominators[fromPostOrder] == noEntry)
|
||||
continue;
|
||||
|
||||
if (newDominator == noEntry) {
|
||||
newDominator = fromPostOrder;
|
||||
} else {
|
||||
// Find common dominator (intersect)
|
||||
uint32_t finger1 = fromPostOrder;
|
||||
uint32_t finger2 = newDominator;
|
||||
// Guard against infinite loops with iteration limit
|
||||
size_t maxIterations = nodeCount * 2;
|
||||
size_t iterations = 0;
|
||||
while (finger1 != finger2 && iterations < maxIterations) {
|
||||
while (finger1 < finger2) {
|
||||
finger1 = dominators[finger1];
|
||||
iterations++;
|
||||
}
|
||||
while (finger2 < finger1) {
|
||||
finger2 = dominators[finger2];
|
||||
iterations++;
|
||||
}
|
||||
}
|
||||
newDominator = finger1;
|
||||
}
|
||||
|
||||
if (newDominator == rootPostOrderIndex)
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Update if changed
|
||||
if (newDominator != noEntry && dominators[postOrder] != newDominator) {
|
||||
dominators[postOrder] = newDominator;
|
||||
changed = true;
|
||||
|
||||
// Mark children as affected
|
||||
auto outIt = outgoingEdges.find(nodeId);
|
||||
if (outIt != outgoingEdges.end()) {
|
||||
for (size_t edgeIdx : outIt->value) {
|
||||
uint64_t toId = edges[edgeIdx].toId;
|
||||
auto toIt = idToIndex.find(toId);
|
||||
if (toIt != idToIndex.end()) {
|
||||
uint32_t toPostOrder = nodeOrdinalToPostOrderIndex[toIt->value];
|
||||
affected[toPostOrder] = 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Convert post-order dominators to node ordinals
|
||||
for (uint32_t postOrder = 0; postOrder < nodeCount; ++postOrder) {
|
||||
uint32_t nodeOrdinal = postOrderIndexToNodeOrdinal[postOrder];
|
||||
uint32_t domPostOrder = dominators[postOrder];
|
||||
uint32_t domOrdinal = (domPostOrder < nodeCount) ? postOrderIndexToNodeOrdinal[domPostOrder] : 0;
|
||||
nodeOrdinalToDominator[nodeOrdinal] = domOrdinal;
|
||||
}
|
||||
|
||||
// Step 3: Calculate retained sizes by attributing size up the dominator tree
|
||||
// First, set self size
|
||||
for (size_t i = 0; i < nodeCount; i++) {
|
||||
nodes[i].retainedSize = nodes[i].size;
|
||||
}
|
||||
|
||||
// Walk in post-order (children before parents) and add to dominator
|
||||
for (uint32_t postOrder = 0; postOrder < nodeCount - 1; ++postOrder) {
|
||||
uint32_t nodeOrdinal = postOrderIndexToNodeOrdinal[postOrder];
|
||||
uint32_t domOrdinal = nodeOrdinalToDominator[nodeOrdinal];
|
||||
nodes[domOrdinal].retainedSize += nodes[nodeOrdinal].retainedSize;
|
||||
}
|
||||
|
||||
// Build type statistics
|
||||
WTF::HashMap<WTF::String, TypeStats> typeStatsMap;
|
||||
for (const auto& node : nodes) {
|
||||
WTF::String className = (node.classNameIndex >= 0 && static_cast<size_t>(node.classNameIndex) < classNames.size())
|
||||
? classNames[node.classNameIndex]
|
||||
: "(unknown)"_s;
|
||||
|
||||
auto result = typeStatsMap.add(className, TypeStats());
|
||||
auto& stats = result.iterator->value;
|
||||
if (result.isNewEntry)
|
||||
stats.name = className;
|
||||
stats.totalSize += node.size;
|
||||
stats.totalRetainedSize += node.retainedSize;
|
||||
stats.count++;
|
||||
if (node.retainedSize > stats.largestRetained) {
|
||||
stats.largestRetained = node.retainedSize;
|
||||
stats.largestInstanceId = node.id;
|
||||
}
|
||||
}
|
||||
|
||||
// Sort types by retained size
|
||||
WTF::Vector<TypeStats> sortedTypes;
|
||||
for (auto& pair : typeStatsMap)
|
||||
sortedTypes.append(pair.value);
|
||||
std::sort(sortedTypes.begin(), sortedTypes.end(), [](const TypeStats& a, const TypeStats& b) {
|
||||
return a.totalRetainedSize > b.totalRetainedSize;
|
||||
});
|
||||
|
||||
// Find largest objects
|
||||
WTF::Vector<size_t> largestObjects;
|
||||
for (size_t i = 0; i < nodes.size(); i++)
|
||||
largestObjects.append(i);
|
||||
std::sort(largestObjects.begin(), largestObjects.end(), [&nodes](size_t a, size_t b) {
|
||||
return nodes[a].retainedSize > nodes[b].retainedSize;
|
||||
});
|
||||
|
||||
// Helpers
|
||||
auto getClassName = [&classNames](const NodeData& node) -> WTF::String {
|
||||
if (node.classNameIndex >= 0 && static_cast<size_t>(node.classNameIndex) < classNames.size())
|
||||
return classNames[node.classNameIndex];
|
||||
return "(unknown)"_s;
|
||||
};
|
||||
|
||||
auto getEdgeType = [&edgeTypes](const EdgeData& edge) -> WTF::String {
|
||||
if (edge.typeIndex >= 0 && static_cast<size_t>(edge.typeIndex) < edgeTypes.size())
|
||||
return edgeTypes[edge.typeIndex];
|
||||
return "?"_s;
|
||||
};
|
||||
|
||||
auto getEdgeName = [&edgeNames, &edgeTypes](const EdgeData& edge) -> WTF::String {
|
||||
WTF::String edgeType;
|
||||
if (edge.typeIndex >= 0 && static_cast<size_t>(edge.typeIndex) < edgeTypes.size())
|
||||
edgeType = edgeTypes[edge.typeIndex];
|
||||
|
||||
if (edgeType == "Property"_s || edgeType == "Variable"_s) {
|
||||
if (edge.dataIndex >= 0 && static_cast<size_t>(edge.dataIndex) < edgeNames.size())
|
||||
return edgeNames[edge.dataIndex];
|
||||
} else if (edgeType == "Index"_s) {
|
||||
return makeString("["_s, WTF::String::number(edge.dataIndex), "]"_s);
|
||||
}
|
||||
return ""_s;
|
||||
};
|
||||
|
||||
auto getNodeLabel = [&labels](const NodeData& node) -> WTF::String {
|
||||
if (node.labelIndex >= 0 && static_cast<size_t>(node.labelIndex) < labels.size())
|
||||
return labels[node.labelIndex];
|
||||
return ""_s;
|
||||
};
|
||||
|
||||
// Build output
|
||||
WTF::StringBuilder output;
|
||||
|
||||
// ==================== HEADER ====================
|
||||
output.append("# Bun Heap Profile\n\n"_s);
|
||||
output.append("Generated by `bun --heap-prof-md`. This profile contains complete heap data in markdown format.\n\n"_s);
|
||||
output.append("**Quick Search Commands:**\n"_s);
|
||||
output.append("```bash\n"_s);
|
||||
output.append("grep '| `Function`' file.md # Find all Function objects\n"_s);
|
||||
output.append("grep 'gcroot=1' file.md # Find all GC roots\n"_s);
|
||||
output.append("grep '| 12345 |' file.md # Find object #12345 or edges involving it\n"_s);
|
||||
output.append("```\n\n"_s);
|
||||
output.append("---\n\n"_s);
|
||||
|
||||
// ==================== SUMMARY ====================
|
||||
output.append("## Summary\n\n"_s);
|
||||
output.append("| Metric | Value |\n"_s);
|
||||
output.append("|--------|------:|\n"_s);
|
||||
output.append("| Total Heap Size | "_s);
|
||||
output.append(formatBytes(totalHeapSize));
|
||||
output.append(" ("_s);
|
||||
output.append(WTF::String::number(totalHeapSize));
|
||||
output.append(" bytes) |\n"_s);
|
||||
output.append("| Total Objects | "_s);
|
||||
output.append(WTF::String::number(nodes.size()));
|
||||
output.append(" |\n"_s);
|
||||
output.append("| Total Edges | "_s);
|
||||
output.append(WTF::String::number(edges.size()));
|
||||
output.append(" |\n"_s);
|
||||
output.append("| Unique Types | "_s);
|
||||
output.append(WTF::String::number(sortedTypes.size()));
|
||||
output.append(" |\n"_s);
|
||||
output.append("| GC Roots | "_s);
|
||||
output.append(WTF::String::number(gcRootIds.size()));
|
||||
output.append(" |\n\n"_s);
|
||||
|
||||
// ==================== TOP TYPES ====================
|
||||
output.append("## Top 50 Types by Retained Size\n\n"_s);
|
||||
output.append("| Rank | Type | Count | Self Size | Retained Size | Largest Instance |\n"_s);
|
||||
output.append("|-----:|------|------:|----------:|--------------:|-----------------:|\n"_s);
|
||||
|
||||
size_t rank = 1;
|
||||
for (const auto& stats : sortedTypes) {
|
||||
if (rank > 50)
|
||||
break;
|
||||
|
||||
output.append("| "_s);
|
||||
output.append(WTF::String::number(rank));
|
||||
output.append(" | `"_s);
|
||||
output.append(escapeString(stats.name));
|
||||
output.append("` | "_s);
|
||||
output.append(WTF::String::number(stats.count));
|
||||
output.append(" | "_s);
|
||||
output.append(formatBytes(stats.totalSize));
|
||||
output.append(" | "_s);
|
||||
output.append(formatBytes(stats.totalRetainedSize));
|
||||
output.append(" | "_s);
|
||||
output.append(formatBytes(stats.largestRetained));
|
||||
output.append(" |\n"_s);
|
||||
rank++;
|
||||
}
|
||||
output.append("\n"_s);
|
||||
|
||||
// ==================== LARGEST OBJECTS ====================
|
||||
output.append("## Top 50 Largest Objects\n\n"_s);
|
||||
output.append("Objects that retain the most memory (potential memory leak sources):\n\n"_s);
|
||||
output.append("| Rank | ID | Type | Self Size | Retained Size | Out-Edges | In-Edges |\n"_s);
|
||||
output.append("|-----:|---:|------|----------:|--------------:|----------:|---------:|\n"_s);
|
||||
|
||||
for (size_t i = 0; i < 50 && i < largestObjects.size(); i++) {
|
||||
const auto& node = nodes[largestObjects[i]];
|
||||
size_t outCount = 0, inCount = 0;
|
||||
auto outIt = outgoingEdges.find(node.id);
|
||||
if (outIt != outgoingEdges.end())
|
||||
outCount = outIt->value.size();
|
||||
auto inIt = incomingEdges.find(node.id);
|
||||
if (inIt != incomingEdges.end())
|
||||
inCount = inIt->value.size();
|
||||
|
||||
output.append("| "_s);
|
||||
output.append(WTF::String::number(i + 1));
|
||||
output.append(" | "_s);
|
||||
output.append(WTF::String::number(node.id));
|
||||
output.append(" | `"_s);
|
||||
output.append(escapeString(getClassName(node)));
|
||||
output.append("` | "_s);
|
||||
output.append(formatBytes(node.size));
|
||||
output.append(" | "_s);
|
||||
output.append(formatBytes(node.retainedSize));
|
||||
output.append(" | "_s);
|
||||
output.append(WTF::String::number(outCount));
|
||||
output.append(" | "_s);
|
||||
output.append(WTF::String::number(inCount));
|
||||
output.append(" |\n"_s);
|
||||
}
|
||||
output.append("\n"_s);
|
||||
|
||||
// ==================== RETAINER CHAINS ====================
|
||||
output.append("## Retainer Chains\n\n"_s);
|
||||
output.append("How the top 20 largest objects are kept alive (path from GC root to object):\n\n"_s);
|
||||
|
||||
for (size_t i = 0; i < 20 && i < largestObjects.size(); i++) {
|
||||
const auto& node = nodes[largestObjects[i]];
|
||||
output.append("### "_s);
|
||||
output.append(WTF::String::number(i + 1));
|
||||
output.append(". Object #"_s);
|
||||
output.append(WTF::String::number(node.id));
|
||||
output.append(" - `"_s);
|
||||
output.append(escapeString(getClassName(node)));
|
||||
output.append("` ("_s);
|
||||
output.append(formatBytes(node.retainedSize));
|
||||
output.append(" retained)\n\n"_s);
|
||||
|
||||
// BFS to find path to GC root
|
||||
// We traverse from node.id upward through retainers (incoming edges)
|
||||
// parent[X] = Y means "X is retained by Y" (Y is X's retainer)
|
||||
// retainerEdge[X] = edgeIdx means "edges[edgeIdx] is the edge FROM parent[X] TO X"
|
||||
NodeIdHashMap<uint64_t> retainer;
|
||||
NodeIdHashMap<size_t> retainerEdge;
|
||||
WTF::Vector<uint64_t> queue;
|
||||
size_t queueIdx = 0;
|
||||
queue.append(node.id);
|
||||
retainer.set(node.id, node.id); // sentinel
|
||||
|
||||
bool foundRootFound = false;
|
||||
uint64_t foundRootId = 0;
|
||||
while (queueIdx < queue.size() && !foundRootFound) {
|
||||
uint64_t current = queue[queueIdx++];
|
||||
if (gcRootIds.contains(current) && current != node.id) {
|
||||
foundRootFound = true;
|
||||
foundRootId = current;
|
||||
break;
|
||||
}
|
||||
auto it = incomingEdges.find(current);
|
||||
if (it != incomingEdges.end()) {
|
||||
// Only set retainer for current once (first valid retainer wins)
|
||||
bool currentHasRetainer = (retainer.get(current) != current);
|
||||
for (size_t edgeIdx : it->value) {
|
||||
uint64_t retainerId = edges[edgeIdx].fromId;
|
||||
if (!retainer.contains(retainerId)) {
|
||||
// Only set current's retainer if not already set
|
||||
if (!currentHasRetainer) {
|
||||
retainer.set(current, retainerId);
|
||||
retainerEdge.set(current, edgeIdx);
|
||||
currentHasRetainer = true;
|
||||
}
|
||||
// Mark retainerId as visited and add to queue
|
||||
retainer.set(retainerId, retainerId); // sentinel, will be updated when we find its retainer
|
||||
queue.append(retainerId);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
output.append("```\n"_s);
|
||||
if (foundRootFound) {
|
||||
// Build path from node.id to foundRootId
|
||||
WTF::Vector<uint64_t> path;
|
||||
uint64_t current = node.id;
|
||||
while (current != foundRootId && retainer.contains(current)) {
|
||||
path.append(current);
|
||||
uint64_t next = retainer.get(current);
|
||||
if (next == current) break; // sentinel or no retainer
|
||||
current = next;
|
||||
}
|
||||
path.append(foundRootId);
|
||||
|
||||
// Print path from root to node (reverse order)
|
||||
for (size_t j = path.size(); j > 0; j--) {
|
||||
uint64_t nodeId = path[j - 1];
|
||||
auto nodeIt = idToIndex.find(nodeId);
|
||||
if (nodeIt == idToIndex.end())
|
||||
continue;
|
||||
const auto& pathNode = nodes[nodeIt->value];
|
||||
|
||||
for (size_t indent = 0; indent < path.size() - j; indent++)
|
||||
output.append(" "_s);
|
||||
|
||||
output.append(getClassName(pathNode));
|
||||
output.append("#"_s);
|
||||
output.append(WTF::String::number(nodeId));
|
||||
if (pathNode.isGCRoot)
|
||||
output.append(" [ROOT]"_s);
|
||||
output.append(" ("_s);
|
||||
output.append(formatBytes(pathNode.size));
|
||||
output.append(")"_s);
|
||||
|
||||
// Show edge to child (path[j-2])
|
||||
if (j > 1) {
|
||||
uint64_t childId = path[j - 2];
|
||||
auto edgeIt = retainerEdge.find(childId);
|
||||
if (edgeIt != retainerEdge.end()) {
|
||||
WTF::String edgeName = getEdgeName(edges[edgeIt->value]);
|
||||
if (!edgeName.isEmpty()) {
|
||||
output.append(" ."_s);
|
||||
output.append(edgeName);
|
||||
}
|
||||
output.append(" -> "_s);
|
||||
}
|
||||
}
|
||||
output.append("\n"_s);
|
||||
}
|
||||
} else if (node.isGCRoot) {
|
||||
output.append(getClassName(node));
|
||||
output.append("#"_s);
|
||||
output.append(WTF::String::number(node.id));
|
||||
output.append(" [ROOT] (this object is a GC root)\n"_s);
|
||||
} else {
|
||||
output.append("(no path to GC root found)\n"_s);
|
||||
}
|
||||
output.append("```\n\n"_s);
|
||||
}
|
||||
|
||||
// ==================== GC ROOTS ====================
|
||||
output.append("## GC Roots\n\n"_s);
|
||||
output.append("Objects directly held by the runtime (prevent garbage collection):\n\n"_s);
|
||||
output.append("| ID | Type | Size | Retained | Label |\n"_s);
|
||||
output.append("|---:|------|-----:|---------:|-------|\n"_s);
|
||||
|
||||
size_t rootCount = 0;
|
||||
for (const auto& node : nodes) {
|
||||
if (node.isGCRoot && rootCount < 100) {
|
||||
output.append("| "_s);
|
||||
output.append(WTF::String::number(node.id));
|
||||
output.append(" | `"_s);
|
||||
output.append(escapeString(getClassName(node)));
|
||||
output.append("` | "_s);
|
||||
output.append(formatBytes(node.size));
|
||||
output.append(" | "_s);
|
||||
output.append(formatBytes(node.retainedSize));
|
||||
output.append(" | "_s);
|
||||
WTF::String label = getNodeLabel(node);
|
||||
if (!label.isEmpty())
|
||||
output.append(escapeString(label.left(50)));
|
||||
output.append(" |\n"_s);
|
||||
rootCount++;
|
||||
}
|
||||
}
|
||||
if (gcRootIds.size() > 100) {
|
||||
output.append("\n*... and "_s);
|
||||
output.append(WTF::String::number(gcRootIds.size() - 100));
|
||||
output.append(" more GC roots*\n"_s);
|
||||
}
|
||||
output.append("\n"_s);
|
||||
|
||||
// ==================== ALL NODES ====================
|
||||
output.append("## All Objects\n\n"_s);
|
||||
output.append("<details>\n<summary>Click to expand "_s);
|
||||
output.append(WTF::String::number(nodes.size()));
|
||||
output.append(" objects (searchable with grep)</summary>\n\n"_s);
|
||||
output.append("| ID | Type | Size | Retained | Flags | Label |\n"_s);
|
||||
output.append("|---:|------|-----:|---------:|-------|-------|\n"_s);
|
||||
|
||||
for (const auto& node : nodes) {
|
||||
output.append("| "_s);
|
||||
output.append(WTF::String::number(node.id));
|
||||
output.append(" | `"_s);
|
||||
output.append(escapeString(getClassName(node)));
|
||||
output.append("` | "_s);
|
||||
output.append(WTF::String::number(node.size));
|
||||
output.append(" | "_s);
|
||||
output.append(WTF::String::number(node.retainedSize));
|
||||
output.append(" | "_s);
|
||||
if (node.isGCRoot)
|
||||
output.append("gcroot=1 "_s);
|
||||
if (node.isInternal)
|
||||
output.append("internal=1"_s);
|
||||
output.append(" | "_s);
|
||||
WTF::String label = getNodeLabel(node);
|
||||
if (!label.isEmpty()) {
|
||||
WTF::String displayLabel = label.length() > 40 ? makeString(label.left(37), "..."_s) : label;
|
||||
output.append(escapeString(displayLabel));
|
||||
}
|
||||
output.append(" |\n"_s);
|
||||
}
|
||||
output.append("\n</details>\n\n"_s);
|
||||
|
||||
// ==================== ALL EDGES ====================
|
||||
output.append("## All Edges\n\n"_s);
|
||||
output.append("<details>\n<summary>Click to expand "_s);
|
||||
output.append(WTF::String::number(edges.size()));
|
||||
output.append(" edges (object reference graph)</summary>\n\n"_s);
|
||||
output.append("| From | To | Type | Name |\n"_s);
|
||||
output.append("|-----:|---:|------|------|\n"_s);
|
||||
|
||||
for (const auto& edge : edges) {
|
||||
output.append("| "_s);
|
||||
output.append(WTF::String::number(edge.fromId));
|
||||
output.append(" | "_s);
|
||||
output.append(WTF::String::number(edge.toId));
|
||||
output.append(" | "_s);
|
||||
output.append(getEdgeType(edge));
|
||||
output.append(" | "_s);
|
||||
WTF::String edgeName = getEdgeName(edge);
|
||||
if (!edgeName.isEmpty())
|
||||
output.append(escapeString(edgeName));
|
||||
output.append(" |\n"_s);
|
||||
}
|
||||
output.append("\n</details>\n\n"_s);
|
||||
|
||||
// ==================== STRING VALUES ====================
|
||||
output.append("## String Values\n\n"_s);
|
||||
output.append("String objects (useful for identifying leak sources by content):\n\n"_s);
|
||||
output.append("<details>\n<summary>Click to expand string values</summary>\n\n"_s);
|
||||
output.append("| ID | Size | Value |\n"_s);
|
||||
output.append("|---:|-----:|-------|\n"_s);
|
||||
|
||||
for (const auto& node : nodes) {
|
||||
WTF::String className = getClassName(node);
|
||||
if (className == "string"_s || className == "String"_s) {
|
||||
WTF::String label = getNodeLabel(node);
|
||||
output.append("| "_s);
|
||||
output.append(WTF::String::number(node.id));
|
||||
output.append(" | "_s);
|
||||
output.append(WTF::String::number(node.size));
|
||||
output.append(" | "_s);
|
||||
if (!label.isEmpty()) {
|
||||
WTF::String displayLabel = label.length() > 100 ? makeString(label.left(97), "..."_s) : label;
|
||||
output.append("`"_s);
|
||||
output.append(escapeString(displayLabel));
|
||||
output.append("`"_s);
|
||||
}
|
||||
output.append(" |\n"_s);
|
||||
}
|
||||
}
|
||||
output.append("\n</details>\n\n"_s);
|
||||
|
||||
// ==================== TYPE STATISTICS ====================
|
||||
output.append("## Complete Type Statistics\n\n"_s);
|
||||
output.append("<details>\n<summary>Click to expand all "_s);
|
||||
output.append(WTF::String::number(sortedTypes.size()));
|
||||
output.append(" types</summary>\n\n"_s);
|
||||
output.append("| Type | Count | Self Size | Retained Size | Largest ID |\n"_s);
|
||||
output.append("|------|------:|----------:|--------------:|-----------:|\n"_s);
|
||||
|
||||
for (const auto& stats : sortedTypes) {
|
||||
output.append("| `"_s);
|
||||
output.append(escapeString(stats.name));
|
||||
output.append("` | "_s);
|
||||
output.append(WTF::String::number(stats.count));
|
||||
output.append(" | "_s);
|
||||
output.append(WTF::String::number(stats.totalSize));
|
||||
output.append(" | "_s);
|
||||
output.append(WTF::String::number(stats.totalRetainedSize));
|
||||
output.append(" | "_s);
|
||||
output.append(WTF::String::number(stats.largestInstanceId));
|
||||
output.append(" |\n"_s);
|
||||
}
|
||||
output.append("\n</details>\n\n"_s);
|
||||
|
||||
// ==================== EDGE NAMES ====================
|
||||
output.append("## Property Names\n\n"_s);
|
||||
output.append("<details>\n<summary>Click to expand all "_s);
|
||||
output.append(WTF::String::number(edgeNames.size()));
|
||||
output.append(" property/variable names</summary>\n\n"_s);
|
||||
output.append("| Index | Name |\n"_s);
|
||||
output.append("|------:|------|\n"_s);
|
||||
|
||||
for (size_t i = 0; i < edgeNames.size(); i++) {
|
||||
if (!edgeNames[i].isEmpty()) {
|
||||
output.append("| "_s);
|
||||
output.append(WTF::String::number(i));
|
||||
output.append(" | `"_s);
|
||||
output.append(escapeString(edgeNames[i]));
|
||||
output.append("` |\n"_s);
|
||||
}
|
||||
}
|
||||
output.append("\n</details>\n\n"_s);
|
||||
|
||||
// ==================== FOOTER ====================
|
||||
output.append("---\n\n"_s);
|
||||
output.append("*End of heap profile*\n"_s);
|
||||
|
||||
return output.toString();
|
||||
}
|
||||
|
||||
WTF::String generateHeapSnapshotV8(JSC::VM& vm)
|
||||
{
|
||||
vm.ensureHeapProfiler();
|
||||
auto& heapProfiler = *vm.heapProfiler();
|
||||
heapProfiler.clearSnapshots();
|
||||
|
||||
JSC::BunV8HeapSnapshotBuilder builder(heapProfiler);
|
||||
return builder.json();
|
||||
}
|
||||
|
||||
} // namespace Bun
|
||||
|
||||
extern "C" BunString Bun__generateHeapProfile(JSC::VM* vm)
|
||||
{
|
||||
WTF::String result = Bun::generateHeapProfile(*vm);
|
||||
return Bun::toStringRef(result);
|
||||
}
|
||||
|
||||
extern "C" BunString Bun__generateHeapSnapshotV8(JSC::VM* vm)
|
||||
{
|
||||
WTF::String result = Bun::generateHeapSnapshotV8(*vm);
|
||||
return Bun::toStringRef(result);
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user