mirror of
https://github.com/oven-sh/bun
synced 2026-03-02 05:21:05 +01:00
Compare commits
8 Commits
claude/fix
...
dylan/fix-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8bc9b2e741 | ||
|
|
45302e60b1 | ||
|
|
657feebdf6 | ||
|
|
0f59bb00b2 | ||
|
|
c1da935e5e | ||
|
|
ffd80531a1 | ||
|
|
a85133aa3b | ||
|
|
94d609cd69 |
@@ -26,7 +26,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
wget curl git python3 python3-pip ninja-build \
|
||||
software-properties-common apt-transport-https \
|
||||
ca-certificates gnupg lsb-release unzip \
|
||||
libxml2-dev ruby ruby-dev bison gawk perl make golang ccache qemu-user-static \
|
||||
libxml2-dev ruby ruby-dev bison gawk perl make golang ccache \
|
||||
&& add-apt-repository ppa:ubuntu-toolchain-r/test \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y gcc-13 g++-13 libgcc-13-dev libstdc++-13-dev \
|
||||
|
||||
@@ -537,109 +537,6 @@ function getLinkBunStep(platform, options) {
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the artifact triplet for a platform, e.g. "bun-linux-aarch64" or "bun-linux-x64-musl-baseline".
|
||||
* Matches the naming convention in cmake/targets/BuildBun.cmake.
|
||||
* @param {Platform} platform
|
||||
* @returns {string}
|
||||
*/
|
||||
function getTargetTriplet(platform) {
|
||||
const { os, arch, abi, baseline } = platform;
|
||||
let triplet = `bun-${os}-${arch}`;
|
||||
if (abi === "musl") {
|
||||
triplet += "-musl";
|
||||
}
|
||||
if (baseline) {
|
||||
triplet += "-baseline";
|
||||
}
|
||||
return triplet;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if a platform needs QEMU-based baseline CPU verification.
|
||||
* x64 baseline builds verify no AVX/AVX2 instructions snuck in.
|
||||
* aarch64 builds verify no LSE/SVE instructions snuck in.
|
||||
* @param {Platform} platform
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function needsBaselineVerification(platform) {
|
||||
const { os, arch, baseline } = platform;
|
||||
if (os !== "linux") return false;
|
||||
return (arch === "x64" && baseline) || arch === "aarch64";
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @param {PipelineOptions} options
|
||||
* @returns {Step}
|
||||
*/
|
||||
function getVerifyBaselineStep(platform, options) {
|
||||
const { arch } = platform;
|
||||
const targetKey = getTargetKey(platform);
|
||||
const archArg = arch === "x64" ? "x64" : "aarch64";
|
||||
|
||||
return {
|
||||
key: `${targetKey}-verify-baseline`,
|
||||
label: `${getTargetLabel(platform)} - verify-baseline`,
|
||||
depends_on: [`${targetKey}-build-bun`],
|
||||
agents: getLinkBunAgent(platform, options),
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
timeout_in_minutes: 5,
|
||||
command: [
|
||||
`buildkite-agent artifact download '*.zip' . --step ${targetKey}-build-bun`,
|
||||
`unzip -o '${getTargetTriplet(platform)}.zip'`,
|
||||
`unzip -o '${getTargetTriplet(platform)}-profile.zip'`,
|
||||
`chmod +x ${getTargetTriplet(platform)}/bun ${getTargetTriplet(platform)}-profile/bun-profile`,
|
||||
`./scripts/verify-baseline-cpu.sh --arch ${archArg} --binary ${getTargetTriplet(platform)}/bun`,
|
||||
`./scripts/verify-baseline-cpu.sh --arch ${archArg} --binary ${getTargetTriplet(platform)}-profile/bun-profile`,
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the PR modifies SetupWebKit.cmake (WebKit version changes).
|
||||
* JIT stress tests under QEMU should run when WebKit is updated to catch
|
||||
* JIT-generated code that uses unsupported CPU instructions.
|
||||
* @param {PipelineOptions} options
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function hasWebKitChanges(options) {
|
||||
const { changedFiles = [] } = options;
|
||||
return changedFiles.some(file => file.includes("SetupWebKit.cmake"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a step that runs JSC JIT stress tests under QEMU.
|
||||
* This verifies that JIT-compiled code doesn't use CPU instructions
|
||||
* beyond the baseline target (no AVX on x64, no LSE on aarch64).
|
||||
* @param {Platform} platform
|
||||
* @param {PipelineOptions} options
|
||||
* @returns {Step}
|
||||
*/
|
||||
function getJitStressTestStep(platform, options) {
|
||||
const { arch } = platform;
|
||||
const targetKey = getTargetKey(platform);
|
||||
const archArg = arch === "x64" ? "x64" : "aarch64";
|
||||
|
||||
return {
|
||||
key: `${targetKey}-jit-stress-qemu`,
|
||||
label: `${getTargetLabel(platform)} - jit-stress-qemu`,
|
||||
depends_on: [`${targetKey}-build-bun`],
|
||||
agents: getLinkBunAgent(platform, options),
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
// JIT stress tests are slow under QEMU emulation
|
||||
timeout_in_minutes: 30,
|
||||
command: [
|
||||
`buildkite-agent artifact download '*.zip' . --step ${targetKey}-build-bun`,
|
||||
`unzip -o '${getTargetTriplet(platform)}.zip'`,
|
||||
`chmod +x ${getTargetTriplet(platform)}/bun`,
|
||||
`./scripts/verify-jit-stress-qemu.sh --arch ${archArg} --binary ${getTargetTriplet(platform)}/bun`,
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @param {PipelineOptions} options
|
||||
@@ -877,7 +774,6 @@ function getBenchmarkStep() {
|
||||
* @property {Platform[]} [buildPlatforms]
|
||||
* @property {Platform[]} [testPlatforms]
|
||||
* @property {string[]} [testFiles]
|
||||
* @property {string[]} [changedFiles]
|
||||
*/
|
||||
|
||||
/**
|
||||
@@ -1230,14 +1126,6 @@ async function getPipeline(options = {}) {
|
||||
steps.push(getBuildZigStep(target, options));
|
||||
steps.push(getLinkBunStep(target, options));
|
||||
|
||||
if (needsBaselineVerification(target)) {
|
||||
steps.push(getVerifyBaselineStep(target, options));
|
||||
// Run JIT stress tests under QEMU when WebKit is updated
|
||||
if (hasWebKitChanges(options)) {
|
||||
steps.push(getJitStressTestStep(target, options));
|
||||
}
|
||||
}
|
||||
|
||||
return getStepWithDependsOn(
|
||||
{
|
||||
key: getTargetKey(target),
|
||||
@@ -1335,7 +1223,6 @@ async function main() {
|
||||
console.log(`- PR is only docs, skipping tests!`);
|
||||
return;
|
||||
}
|
||||
options.changedFiles = allFiles;
|
||||
}
|
||||
|
||||
startGroup("Generating pipeline...");
|
||||
|
||||
194
bench/bun.lock
194
bench/bun.lock
@@ -18,13 +18,9 @@
|
||||
"fast-glob": "3.3.1",
|
||||
"fastify": "^5.0.0",
|
||||
"fdir": "^6.1.0",
|
||||
"marked": "^17.0.1",
|
||||
"mitata": "1.0.20",
|
||||
"react": "^19",
|
||||
"react-dom": "^19",
|
||||
"react-markdown": "^9.0.3",
|
||||
"remark": "^15.0.1",
|
||||
"remark-html": "^16.0.1",
|
||||
"mitata": "^1.0.25",
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"string-width": "7.1.0",
|
||||
"strip-ansi": "^7.1.0",
|
||||
"tar": "^7.4.3",
|
||||
@@ -154,36 +150,18 @@
|
||||
|
||||
"@swc/core-win32-x64-msvc": ["@swc/core-win32-x64-msvc@1.3.35", "", { "os": "win32", "cpu": "x64" }, "sha512-/RvphT4WfuGfIK84Ha0dovdPrKB1bW/mc+dtdmhv2E3EGkNc5FoueNwYmXWRimxnU7X0X7IkcRhyKB4G5DeAmg=="],
|
||||
|
||||
"@types/debug": ["@types/debug@4.1.12", "", { "dependencies": { "@types/ms": "*" } }, "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ=="],
|
||||
|
||||
"@types/estree": ["@types/estree@1.0.8", "", {}, "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w=="],
|
||||
|
||||
"@types/estree-jsx": ["@types/estree-jsx@1.0.5", "", { "dependencies": { "@types/estree": "*" } }, "sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg=="],
|
||||
|
||||
"@types/fs-extra": ["@types/fs-extra@11.0.4", "", { "dependencies": { "@types/jsonfile": "*", "@types/node": "*" } }, "sha512-yTbItCNreRooED33qjunPthRcSjERP1r4MqCZc7wv0u2sUkzTFp45tgUfS5+r7FrZPdmCCNflLhVSP/o+SemsQ=="],
|
||||
|
||||
"@types/hast": ["@types/hast@3.0.4", "", { "dependencies": { "@types/unist": "*" } }, "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ=="],
|
||||
|
||||
"@types/jsonfile": ["@types/jsonfile@6.1.4", "", { "dependencies": { "@types/node": "*" } }, "sha512-D5qGUYwjvnNNextdU59/+fI+spnwtTFmyQP0h+PfIOSkNfpU6AOICUOkm4i0OnSk+NyjdPJrxCDro0sJsWlRpQ=="],
|
||||
|
||||
"@types/mdast": ["@types/mdast@4.0.4", "", { "dependencies": { "@types/unist": "*" } }, "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA=="],
|
||||
|
||||
"@types/minimist": ["@types/minimist@1.2.5", "", {}, "sha512-hov8bUuiLiyFPGyFPE1lwWhmzYbirOXQNNo40+y3zow8aFVTeyn3VWL0VFFfdNddA8S4Vf0Tc062rzyNr7Paag=="],
|
||||
|
||||
"@types/ms": ["@types/ms@2.1.0", "", {}, "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA=="],
|
||||
|
||||
"@types/node": ["@types/node@18.19.8", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-g1pZtPhsvGVTwmeVoexWZLTQaOvXwoSq//pTL0DHeNzUDrFnir4fgETdhjhIxjVnN+hKOuh98+E1eMLnUXstFg=="],
|
||||
|
||||
"@types/ps-tree": ["@types/ps-tree@1.1.6", "", {}, "sha512-PtrlVaOaI44/3pl3cvnlK+GxOM3re2526TJvPvh7W+keHIXdV4TE0ylpPBAcvFQCbGitaTXwL9u+RF7qtVeazQ=="],
|
||||
|
||||
"@types/react": ["@types/react@19.2.10", "", { "dependencies": { "csstype": "^3.2.2" } }, "sha512-WPigyYuGhgZ/cTPRXB2EwUw+XvsRA3GqHlsP4qteqrnnjDrApbS7MxcGr/hke5iUoeB7E/gQtrs9I37zAJ0Vjw=="],
|
||||
|
||||
"@types/unist": ["@types/unist@3.0.3", "", {}, "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q=="],
|
||||
|
||||
"@types/which": ["@types/which@3.0.3", "", {}, "sha512-2C1+XoY0huExTbs8MQv1DuS5FS86+SEjdM9F/+GS61gg5Hqbtj8ZiDSx8MfWcyei907fIPbfPGCOrNUTnVHY1g=="],
|
||||
|
||||
"@ungap/structured-clone": ["@ungap/structured-clone@1.3.0", "", {}, "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g=="],
|
||||
|
||||
"abstract-logging": ["abstract-logging@2.0.1", "", {}, "sha512-2BjRTZxTPvheOvGbBslFSYOUkr+SjPtOnrLP33f+VIWLzezQpZcqVg7ja3L4dBXmzzgwT+a029jRx5PCi3JuiA=="],
|
||||
|
||||
"ajv": ["ajv@8.17.1", "", { "dependencies": { "fast-deep-equal": "^3.1.3", "fast-uri": "^3.0.1", "json-schema-traverse": "^1.0.0", "require-from-string": "^2.0.2" } }, "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g=="],
|
||||
@@ -198,8 +176,6 @@
|
||||
|
||||
"avvio": ["avvio@9.1.0", "", { "dependencies": { "@fastify/error": "^4.0.0", "fastq": "^1.17.1" } }, "sha512-fYASnYi600CsH/j9EQov7lECAniYiBFiiAtBNuZYLA2leLe9qOvZzqYHFjtIj6gD2VMoMLP14834LFWvr4IfDw=="],
|
||||
|
||||
"bail": ["bail@2.0.2", "", {}, "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw=="],
|
||||
|
||||
"benchmark": ["benchmark@2.1.4", "", { "dependencies": { "lodash": "^4.17.4", "platform": "^1.3.3" } }, "sha512-l9MlfN4M1K/H2fbhfMy3B7vJd6AGKJVQn2h6Sg/Yx+KckoUA7ewS5Vv6TjSq18ooE1kS9hhAlQRH3AkXIh/aOQ=="],
|
||||
|
||||
"braces": ["braces@3.0.2", "", { "dependencies": { "fill-range": "^7.0.1" } }, "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A=="],
|
||||
@@ -208,18 +184,8 @@
|
||||
|
||||
"caniuse-lite": ["caniuse-lite@1.0.30001456", "", {}, "sha512-XFHJY5dUgmpMV25UqaD4kVq2LsiaU5rS8fb0f17pCoXQiQslzmFgnfOxfvo1bTpTqf7dwG/N/05CnLCnOEKmzA=="],
|
||||
|
||||
"ccount": ["ccount@2.0.1", "", {}, "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg=="],
|
||||
|
||||
"chalk": ["chalk@5.3.0", "", {}, "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w=="],
|
||||
|
||||
"character-entities": ["character-entities@2.0.2", "", {}, "sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ=="],
|
||||
|
||||
"character-entities-html4": ["character-entities-html4@2.1.0", "", {}, "sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA=="],
|
||||
|
||||
"character-entities-legacy": ["character-entities-legacy@3.0.0", "", {}, "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ=="],
|
||||
|
||||
"character-reference-invalid": ["character-reference-invalid@2.0.1", "", {}, "sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw=="],
|
||||
|
||||
"chownr": ["chownr@3.0.0", "", {}, "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g=="],
|
||||
|
||||
"color": ["color@4.2.3", "", { "dependencies": { "color-convert": "^2.0.1", "color-string": "^1.9.0" } }, "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A=="],
|
||||
@@ -230,26 +196,18 @@
|
||||
|
||||
"color-string": ["color-string@1.9.1", "", { "dependencies": { "color-name": "^1.0.0", "simple-swizzle": "^0.2.2" } }, "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg=="],
|
||||
|
||||
"comma-separated-tokens": ["comma-separated-tokens@2.0.3", "", {}, "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg=="],
|
||||
|
||||
"convert-source-map": ["convert-source-map@1.9.0", "", {}, "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A=="],
|
||||
|
||||
"cookie": ["cookie@1.0.2", "", {}, "sha512-9Kr/j4O16ISv8zBBhJoi4bXOYNTkFLOqSL3UDB0njXxCXNezjeyVrJyGOWtgfs/q2km1gwBcfH8q1yEGoMYunA=="],
|
||||
|
||||
"cross-spawn": ["cross-spawn@7.0.3", "", { "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" } }, "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w=="],
|
||||
|
||||
"csstype": ["csstype@3.2.3", "", {}, "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ=="],
|
||||
|
||||
"data-uri-to-buffer": ["data-uri-to-buffer@4.0.1", "", {}, "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A=="],
|
||||
|
||||
"debug": ["debug@4.3.4", "", { "dependencies": { "ms": "2.1.2" } }, "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ=="],
|
||||
|
||||
"decode-named-character-reference": ["decode-named-character-reference@1.3.0", "", { "dependencies": { "character-entities": "^2.0.0" } }, "sha512-GtpQYB283KrPp6nRw50q3U9/VfOutZOe103qlN7BPP6Ad27xYnOIWv4lPzo8HCAL+mMZofJ9KEy30fq6MfaK6Q=="],
|
||||
|
||||
"dequal": ["dequal@2.0.3", "", {}, "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA=="],
|
||||
|
||||
"devlop": ["devlop@1.1.0", "", { "dependencies": { "dequal": "^2.0.0" } }, "sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA=="],
|
||||
|
||||
"dir-glob": ["dir-glob@3.0.1", "", { "dependencies": { "path-type": "^4.0.0" } }, "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA=="],
|
||||
|
||||
"duplexer": ["duplexer@0.1.2", "", {}, "sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg=="],
|
||||
@@ -304,16 +262,12 @@
|
||||
|
||||
"escape-string-regexp": ["escape-string-regexp@1.0.5", "", {}, "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg=="],
|
||||
|
||||
"estree-util-is-identifier-name": ["estree-util-is-identifier-name@3.0.0", "", {}, "sha512-hFtqIDZTIUZ9BXLb8y4pYGyk6+wekIivNVTcmvk8NoOh+VeRn5y6cEHzbURrWbfp1fIqdVipilzj+lfaadNZmg=="],
|
||||
|
||||
"event-stream": ["event-stream@3.3.4", "", { "dependencies": { "duplexer": "~0.1.1", "from": "~0", "map-stream": "~0.1.0", "pause-stream": "0.0.11", "split": "0.3", "stream-combiner": "~0.0.4", "through": "~2.3.1" } }, "sha512-QHpkERcGsR0T7Qm3HNJSyXKEEj8AHNxkY3PK8TS2KJvQ7NiSHe3DDpwVKKtoYprL/AreyzFBeIkBIWChAqn60g=="],
|
||||
|
||||
"eventemitter3": ["eventemitter3@5.0.0", "", {}, "sha512-riuVbElZZNXLeLEoprfNYoDSwTBRR44X3mnhdI1YcnENpWTCsTTVZ2zFuqQcpoyqPQIUXdiPEU0ECAq0KQRaHg=="],
|
||||
|
||||
"execa": ["execa@8.0.1", "", { "dependencies": { "cross-spawn": "^7.0.3", "get-stream": "^8.0.1", "human-signals": "^5.0.0", "is-stream": "^3.0.0", "merge-stream": "^2.0.0", "npm-run-path": "^5.1.0", "onetime": "^6.0.0", "signal-exit": "^4.1.0", "strip-final-newline": "^3.0.0" } }, "sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg=="],
|
||||
|
||||
"extend": ["extend@3.0.2", "", {}, "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="],
|
||||
|
||||
"fast-decode-uri-component": ["fast-decode-uri-component@1.0.1", "", {}, "sha512-WKgKWg5eUxvRZGwW8FvfbaH7AXSh2cL+3j5fMGzUMCxWBJ3dV3a7Wz8y2f/uQ0e3B6WmodD3oS54jTQ9HVTIIg=="],
|
||||
|
||||
"fast-deep-equal": ["fast-deep-equal@3.1.3", "", {}, "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="],
|
||||
@@ -364,44 +318,20 @@
|
||||
|
||||
"has-flag": ["has-flag@3.0.0", "", {}, "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw=="],
|
||||
|
||||
"hast-util-sanitize": ["hast-util-sanitize@5.0.2", "", { "dependencies": { "@types/hast": "^3.0.0", "@ungap/structured-clone": "^1.0.0", "unist-util-position": "^5.0.0" } }, "sha512-3yTWghByc50aGS7JlGhk61SPenfE/p1oaFeNwkOOyrscaOkMGrcW9+Cy/QAIOBpZxP1yqDIzFMR0+Np0i0+usg=="],
|
||||
|
||||
"hast-util-to-html": ["hast-util-to-html@9.0.5", "", { "dependencies": { "@types/hast": "^3.0.0", "@types/unist": "^3.0.0", "ccount": "^2.0.0", "comma-separated-tokens": "^2.0.0", "hast-util-whitespace": "^3.0.0", "html-void-elements": "^3.0.0", "mdast-util-to-hast": "^13.0.0", "property-information": "^7.0.0", "space-separated-tokens": "^2.0.0", "stringify-entities": "^4.0.0", "zwitch": "^2.0.4" } }, "sha512-OguPdidb+fbHQSU4Q4ZiLKnzWo8Wwsf5bZfbvu7//a9oTYoqD/fWpe96NuHkoS9h0ccGOTe0C4NGXdtS0iObOw=="],
|
||||
|
||||
"hast-util-to-jsx-runtime": ["hast-util-to-jsx-runtime@2.3.6", "", { "dependencies": { "@types/estree": "^1.0.0", "@types/hast": "^3.0.0", "@types/unist": "^3.0.0", "comma-separated-tokens": "^2.0.0", "devlop": "^1.0.0", "estree-util-is-identifier-name": "^3.0.0", "hast-util-whitespace": "^3.0.0", "mdast-util-mdx-expression": "^2.0.0", "mdast-util-mdx-jsx": "^3.0.0", "mdast-util-mdxjs-esm": "^2.0.0", "property-information": "^7.0.0", "space-separated-tokens": "^2.0.0", "style-to-js": "^1.0.0", "unist-util-position": "^5.0.0", "vfile-message": "^4.0.0" } }, "sha512-zl6s8LwNyo1P9uw+XJGvZtdFF1GdAkOg8ujOw+4Pyb76874fLps4ueHXDhXWdk6YHQ6OgUtinliG7RsYvCbbBg=="],
|
||||
|
||||
"hast-util-whitespace": ["hast-util-whitespace@3.0.0", "", { "dependencies": { "@types/hast": "^3.0.0" } }, "sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw=="],
|
||||
|
||||
"html-url-attributes": ["html-url-attributes@3.0.1", "", {}, "sha512-ol6UPyBWqsrO6EJySPz2O7ZSr856WDrEzM5zMqp+FJJLGMW35cLYmmZnl0vztAZxRUoNZJFTCohfjuIJ8I4QBQ=="],
|
||||
|
||||
"html-void-elements": ["html-void-elements@3.0.0", "", {}, "sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg=="],
|
||||
|
||||
"human-signals": ["human-signals@5.0.0", "", {}, "sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ=="],
|
||||
|
||||
"ignore": ["ignore@5.3.0", "", {}, "sha512-g7dmpshy+gD7mh88OC9NwSGTKoc3kyLAZQRU1mt53Aw/vnvfXnbC+F/7F7QoYVKbV+KNvJx8wArewKy1vXMtlg=="],
|
||||
|
||||
"inline-style-parser": ["inline-style-parser@0.2.7", "", {}, "sha512-Nb2ctOyNR8DqQoR0OwRG95uNWIC0C1lCgf5Naz5H6Ji72KZ8OcFZLz2P5sNgwlyoJ8Yif11oMuYs5pBQa86csA=="],
|
||||
|
||||
"ipaddr.js": ["ipaddr.js@2.2.0", "", {}, "sha512-Ag3wB2o37wslZS19hZqorUnrnzSkpOVy+IiiDEiTqNubEYpYuHWIf6K4psgN2ZWKExS4xhVCrRVfb/wfW8fWJA=="],
|
||||
|
||||
"is-alphabetical": ["is-alphabetical@2.0.1", "", {}, "sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ=="],
|
||||
|
||||
"is-alphanumerical": ["is-alphanumerical@2.0.1", "", { "dependencies": { "is-alphabetical": "^2.0.0", "is-decimal": "^2.0.0" } }, "sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw=="],
|
||||
|
||||
"is-arrayish": ["is-arrayish@0.3.2", "", {}, "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ=="],
|
||||
|
||||
"is-decimal": ["is-decimal@2.0.1", "", {}, "sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A=="],
|
||||
|
||||
"is-extglob": ["is-extglob@2.1.1", "", {}, "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ=="],
|
||||
|
||||
"is-glob": ["is-glob@4.0.3", "", { "dependencies": { "is-extglob": "^2.1.1" } }, "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg=="],
|
||||
|
||||
"is-hexadecimal": ["is-hexadecimal@2.0.1", "", {}, "sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg=="],
|
||||
|
||||
"is-number": ["is-number@7.0.0", "", {}, "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng=="],
|
||||
|
||||
"is-plain-obj": ["is-plain-obj@4.1.0", "", {}, "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg=="],
|
||||
|
||||
"is-stream": ["is-stream@3.0.0", "", {}, "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA=="],
|
||||
|
||||
"isexe": ["isexe@2.0.0", "", {}, "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="],
|
||||
@@ -422,76 +352,16 @@
|
||||
|
||||
"lodash": ["lodash@4.17.21", "", {}, "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="],
|
||||
|
||||
"longest-streak": ["longest-streak@3.1.0", "", {}, "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g=="],
|
||||
"loose-envify": ["loose-envify@1.4.0", "", { "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" }, "bin": { "loose-envify": "cli.js" } }, "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q=="],
|
||||
|
||||
"lru-cache": ["lru-cache@5.1.1", "", { "dependencies": { "yallist": "^3.0.2" } }, "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w=="],
|
||||
|
||||
"map-stream": ["map-stream@0.1.0", "", {}, "sha512-CkYQrPYZfWnu/DAmVCpTSX/xHpKZ80eKh2lAkyA6AJTef6bW+6JpbQZN5rofum7da+SyN1bi5ctTm+lTfcCW3g=="],
|
||||
|
||||
"marked": ["marked@17.0.1", "", { "bin": { "marked": "bin/marked.js" } }, "sha512-boeBdiS0ghpWcSwoNm/jJBwdpFaMnZWRzjA6SkUMYb40SVaN1x7mmfGKp0jvexGcx+7y2La5zRZsYFZI6Qpypg=="],
|
||||
|
||||
"mdast-util-from-markdown": ["mdast-util-from-markdown@2.0.2", "", { "dependencies": { "@types/mdast": "^4.0.0", "@types/unist": "^3.0.0", "decode-named-character-reference": "^1.0.0", "devlop": "^1.0.0", "mdast-util-to-string": "^4.0.0", "micromark": "^4.0.0", "micromark-util-decode-numeric-character-reference": "^2.0.0", "micromark-util-decode-string": "^2.0.0", "micromark-util-normalize-identifier": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0", "unist-util-stringify-position": "^4.0.0" } }, "sha512-uZhTV/8NBuw0WHkPTrCqDOl0zVe1BIng5ZtHoDk49ME1qqcjYmmLmOf0gELgcRMxN4w2iuIeVso5/6QymSrgmA=="],
|
||||
|
||||
"mdast-util-mdx-expression": ["mdast-util-mdx-expression@2.0.1", "", { "dependencies": { "@types/estree-jsx": "^1.0.0", "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", "devlop": "^1.0.0", "mdast-util-from-markdown": "^2.0.0", "mdast-util-to-markdown": "^2.0.0" } }, "sha512-J6f+9hUp+ldTZqKRSg7Vw5V6MqjATc+3E4gf3CFNcuZNWD8XdyI6zQ8GqH7f8169MM6P7hMBRDVGnn7oHB9kXQ=="],
|
||||
|
||||
"mdast-util-mdx-jsx": ["mdast-util-mdx-jsx@3.2.0", "", { "dependencies": { "@types/estree-jsx": "^1.0.0", "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", "@types/unist": "^3.0.0", "ccount": "^2.0.0", "devlop": "^1.1.0", "mdast-util-from-markdown": "^2.0.0", "mdast-util-to-markdown": "^2.0.0", "parse-entities": "^4.0.0", "stringify-entities": "^4.0.0", "unist-util-stringify-position": "^4.0.0", "vfile-message": "^4.0.0" } }, "sha512-lj/z8v0r6ZtsN/cGNNtemmmfoLAFZnjMbNyLzBafjzikOM+glrjNHPlf6lQDOTccj9n5b0PPihEBbhneMyGs1Q=="],
|
||||
|
||||
"mdast-util-mdxjs-esm": ["mdast-util-mdxjs-esm@2.0.1", "", { "dependencies": { "@types/estree-jsx": "^1.0.0", "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", "devlop": "^1.0.0", "mdast-util-from-markdown": "^2.0.0", "mdast-util-to-markdown": "^2.0.0" } }, "sha512-EcmOpxsZ96CvlP03NghtH1EsLtr0n9Tm4lPUJUBccV9RwUOneqSycg19n5HGzCf+10LozMRSObtVr3ee1WoHtg=="],
|
||||
|
||||
"mdast-util-phrasing": ["mdast-util-phrasing@4.1.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "unist-util-is": "^6.0.0" } }, "sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w=="],
|
||||
|
||||
"mdast-util-to-hast": ["mdast-util-to-hast@13.2.1", "", { "dependencies": { "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", "@ungap/structured-clone": "^1.0.0", "devlop": "^1.0.0", "micromark-util-sanitize-uri": "^2.0.0", "trim-lines": "^3.0.0", "unist-util-position": "^5.0.0", "unist-util-visit": "^5.0.0", "vfile": "^6.0.0" } }, "sha512-cctsq2wp5vTsLIcaymblUriiTcZd0CwWtCbLvrOzYCDZoWyMNV8sZ7krj09FSnsiJi3WVsHLM4k6Dq/yaPyCXA=="],
|
||||
|
||||
"mdast-util-to-markdown": ["mdast-util-to-markdown@2.1.2", "", { "dependencies": { "@types/mdast": "^4.0.0", "@types/unist": "^3.0.0", "longest-streak": "^3.0.0", "mdast-util-phrasing": "^4.0.0", "mdast-util-to-string": "^4.0.0", "micromark-util-classify-character": "^2.0.0", "micromark-util-decode-string": "^2.0.0", "unist-util-visit": "^5.0.0", "zwitch": "^2.0.0" } }, "sha512-xj68wMTvGXVOKonmog6LwyJKrYXZPvlwabaryTjLh9LuvovB/KAH+kvi8Gjj+7rJjsFi23nkUxRQv1KqSroMqA=="],
|
||||
|
||||
"mdast-util-to-string": ["mdast-util-to-string@4.0.0", "", { "dependencies": { "@types/mdast": "^4.0.0" } }, "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg=="],
|
||||
|
||||
"merge-stream": ["merge-stream@2.0.0", "", {}, "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w=="],
|
||||
|
||||
"merge2": ["merge2@1.4.1", "", {}, "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg=="],
|
||||
|
||||
"micromark": ["micromark@4.0.2", "", { "dependencies": { "@types/debug": "^4.0.0", "debug": "^4.0.0", "decode-named-character-reference": "^1.0.0", "devlop": "^1.0.0", "micromark-core-commonmark": "^2.0.0", "micromark-factory-space": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-chunked": "^2.0.0", "micromark-util-combine-extensions": "^2.0.0", "micromark-util-decode-numeric-character-reference": "^2.0.0", "micromark-util-encode": "^2.0.0", "micromark-util-normalize-identifier": "^2.0.0", "micromark-util-resolve-all": "^2.0.0", "micromark-util-sanitize-uri": "^2.0.0", "micromark-util-subtokenize": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-zpe98Q6kvavpCr1NPVSCMebCKfD7CA2NqZ+rykeNhONIJBpc1tFKt9hucLGwha3jNTNI8lHpctWJWoimVF4PfA=="],
|
||||
|
||||
"micromark-core-commonmark": ["micromark-core-commonmark@2.0.3", "", { "dependencies": { "decode-named-character-reference": "^1.0.0", "devlop": "^1.0.0", "micromark-factory-destination": "^2.0.0", "micromark-factory-label": "^2.0.0", "micromark-factory-space": "^2.0.0", "micromark-factory-title": "^2.0.0", "micromark-factory-whitespace": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-chunked": "^2.0.0", "micromark-util-classify-character": "^2.0.0", "micromark-util-html-tag-name": "^2.0.0", "micromark-util-normalize-identifier": "^2.0.0", "micromark-util-resolve-all": "^2.0.0", "micromark-util-subtokenize": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg=="],
|
||||
|
||||
"micromark-factory-destination": ["micromark-factory-destination@2.0.1", "", { "dependencies": { "micromark-util-character": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-Xe6rDdJlkmbFRExpTOmRj9N3MaWmbAgdpSrBQvCFqhezUn4AHqJHbaEnfbVYYiexVSs//tqOdY/DxhjdCiJnIA=="],
|
||||
|
||||
"micromark-factory-label": ["micromark-factory-label@2.0.1", "", { "dependencies": { "devlop": "^1.0.0", "micromark-util-character": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-VFMekyQExqIW7xIChcXn4ok29YE3rnuyveW3wZQWWqF4Nv9Wk5rgJ99KzPvHjkmPXF93FXIbBp6YdW3t71/7Vg=="],
|
||||
|
||||
"micromark-factory-space": ["micromark-factory-space@2.0.1", "", { "dependencies": { "micromark-util-character": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg=="],
|
||||
|
||||
"micromark-factory-title": ["micromark-factory-title@2.0.1", "", { "dependencies": { "micromark-factory-space": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-5bZ+3CjhAd9eChYTHsjy6TGxpOFSKgKKJPJxr293jTbfry2KDoWkhBb6TcPVB4NmzaPhMs1Frm9AZH7OD4Cjzw=="],
|
||||
|
||||
"micromark-factory-whitespace": ["micromark-factory-whitespace@2.0.1", "", { "dependencies": { "micromark-factory-space": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-Ob0nuZ3PKt/n0hORHyvoD9uZhr+Za8sFoP+OnMcnWK5lngSzALgQYKMr9RJVOWLqQYuyn6ulqGWSXdwf6F80lQ=="],
|
||||
|
||||
"micromark-util-character": ["micromark-util-character@2.1.1", "", { "dependencies": { "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q=="],
|
||||
|
||||
"micromark-util-chunked": ["micromark-util-chunked@2.0.1", "", { "dependencies": { "micromark-util-symbol": "^2.0.0" } }, "sha512-QUNFEOPELfmvv+4xiNg2sRYeS/P84pTW0TCgP5zc9FpXetHY0ab7SxKyAQCNCc1eK0459uoLI1y5oO5Vc1dbhA=="],
|
||||
|
||||
"micromark-util-classify-character": ["micromark-util-classify-character@2.0.1", "", { "dependencies": { "micromark-util-character": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q=="],
|
||||
|
||||
"micromark-util-combine-extensions": ["micromark-util-combine-extensions@2.0.1", "", { "dependencies": { "micromark-util-chunked": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-OnAnH8Ujmy59JcyZw8JSbK9cGpdVY44NKgSM7E9Eh7DiLS2E9RNQf0dONaGDzEG9yjEl5hcqeIsj4hfRkLH/Bg=="],
|
||||
|
||||
"micromark-util-decode-numeric-character-reference": ["micromark-util-decode-numeric-character-reference@2.0.2", "", { "dependencies": { "micromark-util-symbol": "^2.0.0" } }, "sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw=="],
|
||||
|
||||
"micromark-util-decode-string": ["micromark-util-decode-string@2.0.1", "", { "dependencies": { "decode-named-character-reference": "^1.0.0", "micromark-util-character": "^2.0.0", "micromark-util-decode-numeric-character-reference": "^2.0.0", "micromark-util-symbol": "^2.0.0" } }, "sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ=="],
|
||||
|
||||
"micromark-util-encode": ["micromark-util-encode@2.0.1", "", {}, "sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw=="],
|
||||
|
||||
"micromark-util-html-tag-name": ["micromark-util-html-tag-name@2.0.1", "", {}, "sha512-2cNEiYDhCWKI+Gs9T0Tiysk136SnR13hhO8yW6BGNyhOC4qYFnwF1nKfD3HFAIXA5c45RrIG1ub11GiXeYd1xA=="],
|
||||
|
||||
"micromark-util-normalize-identifier": ["micromark-util-normalize-identifier@2.0.1", "", { "dependencies": { "micromark-util-symbol": "^2.0.0" } }, "sha512-sxPqmo70LyARJs0w2UclACPUUEqltCkJ6PhKdMIDuJ3gSf/Q+/GIe3WKl0Ijb/GyH9lOpUkRAO2wp0GVkLvS9Q=="],
|
||||
|
||||
"micromark-util-resolve-all": ["micromark-util-resolve-all@2.0.1", "", { "dependencies": { "micromark-util-types": "^2.0.0" } }, "sha512-VdQyxFWFT2/FGJgwQnJYbe1jjQoNTS4RjglmSjTUlpUMa95Htx9NHeYW4rGDJzbjvCsl9eLjMQwGeElsqmzcHg=="],
|
||||
|
||||
"micromark-util-sanitize-uri": ["micromark-util-sanitize-uri@2.0.1", "", { "dependencies": { "micromark-util-character": "^2.0.0", "micromark-util-encode": "^2.0.0", "micromark-util-symbol": "^2.0.0" } }, "sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ=="],
|
||||
|
||||
"micromark-util-subtokenize": ["micromark-util-subtokenize@2.1.0", "", { "dependencies": { "devlop": "^1.0.0", "micromark-util-chunked": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-XQLu552iSctvnEcgXw6+Sx75GflAPNED1qx7eBJ+wydBb2KCbRZe+NwvIEEMM83uml1+2WSXpBAcp9IUCgCYWA=="],
|
||||
|
||||
"micromark-util-symbol": ["micromark-util-symbol@2.0.1", "", {}, "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q=="],
|
||||
|
||||
"micromark-util-types": ["micromark-util-types@2.0.2", "", {}, "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA=="],
|
||||
|
||||
"micromatch": ["micromatch@4.0.5", "", { "dependencies": { "braces": "^3.0.2", "picomatch": "^2.3.1" } }, "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA=="],
|
||||
|
||||
"mimic-fn": ["mimic-fn@4.0.0", "", {}, "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw=="],
|
||||
@@ -502,7 +372,7 @@
|
||||
|
||||
"minizlib": ["minizlib@3.1.0", "", { "dependencies": { "minipass": "^7.1.2" } }, "sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw=="],
|
||||
|
||||
"mitata": ["mitata@1.0.20", "", {}, "sha512-oHWYGX5bi4wGT/1zrhiZAEzqTV14Vq6/PUTW8WK0b3YHBBQcZz2QFm+InHhjnD0I7B6CMtwdGt2K0938r7YTdQ=="],
|
||||
"mitata": ["mitata@1.0.25", "", {}, "sha512-0v5qZtVW5vwj9FDvYfraR31BMDcRLkhSFWPTLaxx/Z3/EvScfVtAAWtMI2ArIbBcwh7P86dXh0lQWKiXQPlwYA=="],
|
||||
|
||||
"ms": ["ms@2.1.2", "", {}, "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="],
|
||||
|
||||
@@ -518,8 +388,6 @@
|
||||
|
||||
"onetime": ["onetime@6.0.0", "", { "dependencies": { "mimic-fn": "^4.0.0" } }, "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ=="],
|
||||
|
||||
"parse-entities": ["parse-entities@4.0.2", "", { "dependencies": { "@types/unist": "^2.0.0", "character-entities-legacy": "^3.0.0", "character-reference-invalid": "^2.0.0", "decode-named-character-reference": "^1.0.0", "is-alphanumerical": "^2.0.0", "is-decimal": "^2.0.0", "is-hexadecimal": "^2.0.0" } }, "sha512-GG2AQYWoLgL877gQIKeRPGO1xF9+eG1ujIb5soS5gPvLQ1y2o8FL90w2QWNdf9I361Mpp7726c+lj3U0qK1uGw=="],
|
||||
|
||||
"path-key": ["path-key@3.1.1", "", {}, "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="],
|
||||
|
||||
"path-type": ["path-type@4.0.0", "", {}, "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw=="],
|
||||
@@ -540,32 +408,18 @@
|
||||
|
||||
"process-warning": ["process-warning@5.0.0", "", {}, "sha512-a39t9ApHNx2L4+HBnQKqxxHNs1r7KF+Intd8Q/g1bUh6q0WIp9voPXJ/x0j+ZL45KF1pJd9+q2jLIRMfvEshkA=="],
|
||||
|
||||
"property-information": ["property-information@7.1.0", "", {}, "sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ=="],
|
||||
|
||||
"ps-tree": ["ps-tree@1.2.0", "", { "dependencies": { "event-stream": "=3.3.4" }, "bin": { "ps-tree": "./bin/ps-tree.js" } }, "sha512-0VnamPPYHl4uaU/nSFeZZpR21QAWRz+sRv4iW9+v/GS/J5U5iZB5BNN6J0RMoOvdx2gWM2+ZFMIm58q24e4UYA=="],
|
||||
|
||||
"queue-microtask": ["queue-microtask@1.2.3", "", {}, "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="],
|
||||
|
||||
"quick-format-unescaped": ["quick-format-unescaped@4.0.4", "", {}, "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg=="],
|
||||
|
||||
"react": ["react@19.2.4", "", {}, "sha512-9nfp2hYpCwOjAN+8TZFGhtWEwgvWHXqESH8qT89AT/lWklpLON22Lc8pEtnpsZz7VmawabSU0gCjnj8aC0euHQ=="],
|
||||
"react": ["react@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ=="],
|
||||
|
||||
"react-dom": ["react-dom@19.2.4", "", { "dependencies": { "scheduler": "^0.27.0" }, "peerDependencies": { "react": "^19.2.4" } }, "sha512-AXJdLo8kgMbimY95O2aKQqsz2iWi9jMgKJhRBAxECE4IFxfcazB2LmzloIoibJI3C12IlY20+KFaLv+71bUJeQ=="],
|
||||
|
||||
"react-markdown": ["react-markdown@9.1.0", "", { "dependencies": { "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", "devlop": "^1.0.0", "hast-util-to-jsx-runtime": "^2.0.0", "html-url-attributes": "^3.0.0", "mdast-util-to-hast": "^13.0.0", "remark-parse": "^11.0.0", "remark-rehype": "^11.0.0", "unified": "^11.0.0", "unist-util-visit": "^5.0.0", "vfile": "^6.0.0" }, "peerDependencies": { "@types/react": ">=18", "react": ">=18" } }, "sha512-xaijuJB0kzGiUdG7nc2MOMDUDBWPyGAjZtUrow9XxUeua8IqeP+VlIfAZ3bphpcLTnSZXz6z9jcVC/TCwbfgdw=="],
|
||||
"react-dom": ["react-dom@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0", "scheduler": "^0.23.2" }, "peerDependencies": { "react": "^18.3.1" } }, "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw=="],
|
||||
|
||||
"real-require": ["real-require@0.2.0", "", {}, "sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg=="],
|
||||
|
||||
"remark": ["remark@15.0.1", "", { "dependencies": { "@types/mdast": "^4.0.0", "remark-parse": "^11.0.0", "remark-stringify": "^11.0.0", "unified": "^11.0.0" } }, "sha512-Eht5w30ruCXgFmxVUSlNWQ9iiimq07URKeFS3hNc8cUWy1llX4KDWfyEDZRycMc+znsN9Ux5/tJ/BFdgdOwA3A=="],
|
||||
|
||||
"remark-html": ["remark-html@16.0.1", "", { "dependencies": { "@types/mdast": "^4.0.0", "hast-util-sanitize": "^5.0.0", "hast-util-to-html": "^9.0.0", "mdast-util-to-hast": "^13.0.0", "unified": "^11.0.0" } }, "sha512-B9JqA5i0qZe0Nsf49q3OXyGvyXuZFDzAP2iOFLEumymuYJITVpiH1IgsTEwTpdptDmZlMDMWeDmSawdaJIGCXQ=="],
|
||||
|
||||
"remark-parse": ["remark-parse@11.0.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "mdast-util-from-markdown": "^2.0.0", "micromark-util-types": "^2.0.0", "unified": "^11.0.0" } }, "sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA=="],
|
||||
|
||||
"remark-rehype": ["remark-rehype@11.1.2", "", { "dependencies": { "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", "mdast-util-to-hast": "^13.0.0", "unified": "^11.0.0", "vfile": "^6.0.0" } }, "sha512-Dh7l57ianaEoIpzbp0PC9UKAdCSVklD8E5Rpw7ETfbTl3FqcOOgq5q2LVDhgGCkaBv7p24JXikPdvhhmHvKMsw=="],
|
||||
|
||||
"remark-stringify": ["remark-stringify@11.0.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "mdast-util-to-markdown": "^2.0.0", "unified": "^11.0.0" } }, "sha512-1OSmLd3awB/t8qdoEOMazZkNsfVTeY4fTsgzcQFdXNq8ToTN4ZGwrMnlda4K6smTFKD+GRV6O48i6Z4iKgPPpw=="],
|
||||
|
||||
"require-from-string": ["require-from-string@2.0.2", "", {}, "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw=="],
|
||||
|
||||
"ret": ["ret@0.5.0", "", {}, "sha512-I1XxrZSQ+oErkRR4jYbAyEEu2I0avBvvMM5JN+6EBprOGRCs63ENqZ3vjavq8fBw2+62G5LF5XelKwuJpcvcxw=="],
|
||||
@@ -580,7 +434,7 @@
|
||||
|
||||
"safe-stable-stringify": ["safe-stable-stringify@2.5.0", "", {}, "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA=="],
|
||||
|
||||
"scheduler": ["scheduler@0.27.0", "", {}, "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q=="],
|
||||
"scheduler": ["scheduler@0.23.2", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ=="],
|
||||
|
||||
"secure-json-parse": ["secure-json-parse@4.0.0", "", {}, "sha512-dxtLJO6sc35jWidmLxo7ij+Eg48PM/kleBsxpC8QJE0qJICe+KawkDQmvCMZUr9u7WKVHgMW6vy3fQ7zMiFZMA=="],
|
||||
|
||||
@@ -600,8 +454,6 @@
|
||||
|
||||
"sonic-boom": ["sonic-boom@4.2.0", "", { "dependencies": { "atomic-sleep": "^1.0.0" } }, "sha512-INb7TM37/mAcsGmc9hyyI6+QR3rR1zVRu36B0NeGXKnOOLiZOfER5SA+N7X7k3yUYRzLWafduTDvJAfDswwEww=="],
|
||||
|
||||
"space-separated-tokens": ["space-separated-tokens@2.0.2", "", {}, "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q=="],
|
||||
|
||||
"split": ["split@0.3.3", "", { "dependencies": { "through": "2" } }, "sha512-wD2AeVmxXRBoX44wAycgjVpMhvbwdI2aZjCkvfNcH1YqHQvJVa1duWc73OyVGJUc05fhFaTZeQ/PYsrmyH0JVA=="],
|
||||
|
||||
"split2": ["split2@4.2.0", "", {}, "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg=="],
|
||||
@@ -610,16 +462,10 @@
|
||||
|
||||
"string-width": ["string-width@7.1.0", "", { "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", "strip-ansi": "^7.1.0" } }, "sha512-SEIJCWiX7Kg4c129n48aDRwLbFb2LJmXXFrWBG4NGaRtMQ3myKPKbwrD1BKqQn74oCoNMBVrfDEr5M9YxCsrkw=="],
|
||||
|
||||
"stringify-entities": ["stringify-entities@4.0.4", "", { "dependencies": { "character-entities-html4": "^2.0.0", "character-entities-legacy": "^3.0.0" } }, "sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg=="],
|
||||
|
||||
"strip-ansi": ["strip-ansi@7.1.0", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ=="],
|
||||
|
||||
"strip-final-newline": ["strip-final-newline@3.0.0", "", {}, "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw=="],
|
||||
|
||||
"style-to-js": ["style-to-js@1.1.21", "", { "dependencies": { "style-to-object": "1.0.14" } }, "sha512-RjQetxJrrUJLQPHbLku6U/ocGtzyjbJMP9lCNK7Ag0CNh690nSH8woqWH9u16nMjYBAok+i7JO1NP2pOy8IsPQ=="],
|
||||
|
||||
"style-to-object": ["style-to-object@1.0.14", "", { "dependencies": { "inline-style-parser": "0.2.7" } }, "sha512-LIN7rULI0jBscWQYaSswptyderlarFkjQ+t79nzty8tcIAceVomEVlLzH5VP4Cmsv6MtKhs7qaAiwlcp+Mgaxw=="],
|
||||
|
||||
"supports-color": ["supports-color@5.5.0", "", { "dependencies": { "has-flag": "^3.0.0" } }, "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow=="],
|
||||
|
||||
"tar": ["tar@7.5.2", "", { "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", "minizlib": "^3.1.0", "yallist": "^5.0.0" } }, "sha512-7NyxrTE4Anh8km8iEy7o0QYPs+0JKBTj5ZaqHg6B39erLg0qYXN3BijtShwbsNSvQ+LN75+KV+C4QR/f6Gwnpg=="],
|
||||
@@ -636,32 +482,12 @@
|
||||
|
||||
"toad-cache": ["toad-cache@3.7.0", "", {}, "sha512-/m8M+2BJUpoJdgAHoG+baCwBT+tf2VraSfkBgl0Y00qIWt41DJ8R5B8nsEw0I58YwF5IZH6z24/2TobDKnqSWw=="],
|
||||
|
||||
"trim-lines": ["trim-lines@3.0.1", "", {}, "sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg=="],
|
||||
|
||||
"trough": ["trough@2.2.0", "", {}, "sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw=="],
|
||||
|
||||
"undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="],
|
||||
|
||||
"unified": ["unified@11.0.5", "", { "dependencies": { "@types/unist": "^3.0.0", "bail": "^2.0.0", "devlop": "^1.0.0", "extend": "^3.0.0", "is-plain-obj": "^4.0.0", "trough": "^2.0.0", "vfile": "^6.0.0" } }, "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA=="],
|
||||
|
||||
"unist-util-is": ["unist-util-is@6.0.1", "", { "dependencies": { "@types/unist": "^3.0.0" } }, "sha512-LsiILbtBETkDz8I9p1dQ0uyRUWuaQzd/cuEeS1hoRSyW5E5XGmTzlwY1OrNzzakGowI9Dr/I8HVaw4hTtnxy8g=="],
|
||||
|
||||
"unist-util-position": ["unist-util-position@5.0.0", "", { "dependencies": { "@types/unist": "^3.0.0" } }, "sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA=="],
|
||||
|
||||
"unist-util-stringify-position": ["unist-util-stringify-position@4.0.0", "", { "dependencies": { "@types/unist": "^3.0.0" } }, "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ=="],
|
||||
|
||||
"unist-util-visit": ["unist-util-visit@5.1.0", "", { "dependencies": { "@types/unist": "^3.0.0", "unist-util-is": "^6.0.0", "unist-util-visit-parents": "^6.0.0" } }, "sha512-m+vIdyeCOpdr/QeQCu2EzxX/ohgS8KbnPDgFni4dQsfSCtpz8UqDyY5GjRru8PDKuYn7Fq19j1CQ+nJSsGKOzg=="],
|
||||
|
||||
"unist-util-visit-parents": ["unist-util-visit-parents@6.0.2", "", { "dependencies": { "@types/unist": "^3.0.0", "unist-util-is": "^6.0.0" } }, "sha512-goh1s1TBrqSqukSc8wrjwWhL0hiJxgA8m4kFxGlQ+8FYQ3C/m11FcTs4YYem7V664AhHVvgoQLk890Ssdsr2IQ=="],
|
||||
|
||||
"universalify": ["universalify@2.0.1", "", {}, "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw=="],
|
||||
|
||||
"update-browserslist-db": ["update-browserslist-db@1.0.10", "", { "dependencies": { "escalade": "^3.1.1", "picocolors": "^1.0.0" }, "peerDependencies": { "browserslist": ">= 4.21.0" }, "bin": { "browserslist-lint": "cli.js" } }, "sha512-OztqDenkfFkbSG+tRxBeAnCVPckDBcvibKd35yDONx6OU8N7sqgwc7rCbkJ/WcYtVRZ4ba68d6byhC21GFh7sQ=="],
|
||||
|
||||
"vfile": ["vfile@6.0.3", "", { "dependencies": { "@types/unist": "^3.0.0", "vfile-message": "^4.0.0" } }, "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q=="],
|
||||
|
||||
"vfile-message": ["vfile-message@4.0.3", "", { "dependencies": { "@types/unist": "^3.0.0", "unist-util-stringify-position": "^4.0.0" } }, "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw=="],
|
||||
|
||||
"web-streams-polyfill": ["web-streams-polyfill@3.3.2", "", {}, "sha512-3pRGuxRF5gpuZc0W+EpwQRmCD7gRqcDOMt688KmdlDAgAyaB1XlN0zq2njfDNm44XVdIouE7pZ6GzbdyH47uIQ=="],
|
||||
|
||||
"webpod": ["webpod@0.0.2", "", { "bin": { "webpod": "dist/index.js" } }, "sha512-cSwwQIeg8v4i3p4ajHhwgR7N6VyxAf+KYSSsY6Pd3aETE+xEU4vbitz7qQkB0I321xnhDdgtxuiSfk5r/FVtjg=="],
|
||||
@@ -674,8 +500,6 @@
|
||||
|
||||
"yaml": ["yaml@2.3.4", "", {}, "sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA=="],
|
||||
|
||||
"zwitch": ["zwitch@2.0.4", "", {}, "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A=="],
|
||||
|
||||
"zx": ["zx@7.2.3", "", { "dependencies": { "@types/fs-extra": "^11.0.1", "@types/minimist": "^1.2.2", "@types/node": "^18.16.3", "@types/ps-tree": "^1.1.2", "@types/which": "^3.0.0", "chalk": "^5.2.0", "fs-extra": "^11.1.1", "fx": "*", "globby": "^13.1.4", "minimist": "^1.2.8", "node-fetch": "3.3.1", "ps-tree": "^1.2.0", "webpod": "^0", "which": "^3.0.0", "yaml": "^2.2.2" }, "bin": { "zx": "build/cli.js" } }, "sha512-QODu38nLlYXg/B/Gw7ZKiZrvPkEsjPN3LQ5JFXM7h0JvwhEdPNNl+4Ao1y4+o3CLNiDUNcwzQYZ4/Ko7kKzCMA=="],
|
||||
|
||||
"@babel/generator/@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.2", "", { "dependencies": { "@jridgewell/set-array": "^1.0.1", "@jridgewell/sourcemap-codec": "^1.4.10", "@jridgewell/trace-mapping": "^0.3.9" } }, "sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A=="],
|
||||
@@ -694,8 +518,6 @@
|
||||
|
||||
"npm-run-path/path-key": ["path-key@4.0.0", "", {}, "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ=="],
|
||||
|
||||
"parse-entities/@types/unist": ["@types/unist@2.0.11", "", {}, "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA=="],
|
||||
|
||||
"@babel/highlight/chalk/ansi-styles": ["ansi-styles@3.2.1", "", { "dependencies": { "color-convert": "^1.9.0" } }, "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA=="],
|
||||
|
||||
"@babel/highlight/chalk/ansi-styles/color-convert": ["color-convert@1.9.3", "", { "dependencies": { "color-name": "1.1.3" } }, "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg=="],
|
||||
|
||||
@@ -14,18 +14,14 @@
|
||||
"fast-glob": "3.3.1",
|
||||
"fastify": "^5.0.0",
|
||||
"fdir": "^6.1.0",
|
||||
"marked": "^17.0.1",
|
||||
"mitata": "1.0.20",
|
||||
"react": "^19",
|
||||
"react-dom": "^19",
|
||||
"react-markdown": "^9.0.3",
|
||||
"remark": "^15.0.1",
|
||||
"remark-html": "^16.0.1",
|
||||
"mitata": "^1.0.25",
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"string-width": "7.1.0",
|
||||
"wrap-ansi": "^9.0.0",
|
||||
"strip-ansi": "^7.1.0",
|
||||
"tar": "^7.4.3",
|
||||
"tinycolor2": "^1.6.0",
|
||||
"wrap-ansi": "^9.0.0",
|
||||
"zx": "^7.2.3"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -1,92 +0,0 @@
|
||||
import React from "react";
|
||||
import { renderToString } from "react-dom/server";
|
||||
import ReactMarkdown from "react-markdown";
|
||||
|
||||
const markdown = `# Project README
|
||||
|
||||
## Introduction
|
||||
|
||||
This is a medium-sized markdown document that includes **bold text**, *italic text*,
|
||||
and \`inline code\`. It also has [links](https://example.com) and various formatting.
|
||||
|
||||
## Features
|
||||
|
||||
- Feature one with **bold**
|
||||
- Feature two with *emphasis*
|
||||
- Feature three with \`code\`
|
||||
- Feature four with [a link](https://example.com)
|
||||
|
||||
## Code Example
|
||||
|
||||
\`\`\`javascript
|
||||
function hello() {
|
||||
console.log("Hello, world!");
|
||||
return 42;
|
||||
}
|
||||
|
||||
const result = hello();
|
||||
\`\`\`
|
||||
|
||||
## Table
|
||||
|
||||
| Name | Value | Description |
|
||||
|------|-------|-------------|
|
||||
| foo | 1 | First item |
|
||||
| bar | 2 | Second item |
|
||||
| baz | 3 | Third item |
|
||||
|
||||
## Blockquote
|
||||
|
||||
> This is a blockquote with **bold** and *italic* text.
|
||||
> It spans multiple lines and contains a [link](https://example.com).
|
||||
|
||||
---
|
||||
|
||||
### Nested Lists
|
||||
|
||||
1. First ordered item
|
||||
- Nested unordered
|
||||
- Another nested
|
||||
2. Second ordered item
|
||||
1. Nested ordered
|
||||
2. Another nested
|
||||
3. Third ordered item
|
||||
|
||||
Some final paragraph with ~~strikethrough~~ text and more **formatting**.
|
||||
`;
|
||||
|
||||
// Verify outputs are roughly the same
|
||||
const bunHtml = renderToString(Bun.markdown.react(markdown));
|
||||
const reactMarkdownHtml = renderToString(React.createElement(ReactMarkdown, { children: markdown }));
|
||||
|
||||
console.log("=== Bun.markdown.react output ===");
|
||||
console.log(bunHtml.slice(0, 500));
|
||||
console.log(`... (${bunHtml.length} chars total)\n`);
|
||||
|
||||
console.log("=== react-markdown output ===");
|
||||
console.log(reactMarkdownHtml.slice(0, 500));
|
||||
console.log(`... (${reactMarkdownHtml.length} chars total)\n`);
|
||||
|
||||
const server = Bun.serve({
|
||||
port: 0,
|
||||
routes: {
|
||||
"/bun-markdown": () => {
|
||||
return new Response(renderToString(Bun.markdown.react(markdown)), {
|
||||
headers: { "Content-Type": "text/html" },
|
||||
});
|
||||
},
|
||||
"/react-markdown": () => {
|
||||
return new Response(renderToString(React.createElement(ReactMarkdown, { children: markdown })), {
|
||||
headers: { "Content-Type": "text/html" },
|
||||
});
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
console.log(`Server listening on ${server.url}`);
|
||||
console.log(` ${server.url}bun-markdown`);
|
||||
console.log(` ${server.url}react-markdown`);
|
||||
console.log();
|
||||
console.log("Run:");
|
||||
console.log(` oha -c 20 -z 5s ${server.url}bun-markdown`);
|
||||
console.log(` oha -c 20 -z 5s ${server.url}react-markdown`);
|
||||
@@ -1,159 +0,0 @@
|
||||
import { marked } from "marked";
|
||||
import { remark } from "remark";
|
||||
import remarkHtml from "remark-html";
|
||||
import { bench, run, summary } from "../runner.mjs";
|
||||
|
||||
const remarkProcessor = remark().use(remarkHtml);
|
||||
|
||||
const small = `# Hello World
|
||||
|
||||
This is a **bold** and *italic* paragraph with a [link](https://example.com).
|
||||
|
||||
- Item 1
|
||||
- Item 2
|
||||
- Item 3
|
||||
`;
|
||||
|
||||
const medium = `# Project README
|
||||
|
||||
## Introduction
|
||||
|
||||
This is a medium-sized markdown document that includes **bold text**, *italic text*,
|
||||
and \`inline code\`. It also has [links](https://example.com) and various formatting.
|
||||
|
||||
## Features
|
||||
|
||||
- Feature one with **bold**
|
||||
- Feature two with *emphasis*
|
||||
- Feature three with \`code\`
|
||||
- Feature four with [a link](https://example.com)
|
||||
|
||||
## Code Example
|
||||
|
||||
\`\`\`javascript
|
||||
function hello() {
|
||||
console.log("Hello, world!");
|
||||
return 42;
|
||||
}
|
||||
|
||||
const result = hello();
|
||||
\`\`\`
|
||||
|
||||
## Table
|
||||
|
||||
| Name | Value | Description |
|
||||
|------|-------|-------------|
|
||||
| foo | 1 | First item |
|
||||
| bar | 2 | Second item |
|
||||
| baz | 3 | Third item |
|
||||
|
||||
## Blockquote
|
||||
|
||||
> This is a blockquote with **bold** and *italic* text.
|
||||
> It spans multiple lines and contains a [link](https://example.com).
|
||||
|
||||
---
|
||||
|
||||
### Nested Lists
|
||||
|
||||
1. First ordered item
|
||||
- Nested unordered
|
||||
- Another nested
|
||||
2. Second ordered item
|
||||
1. Nested ordered
|
||||
2. Another nested
|
||||
3. Third ordered item
|
||||
|
||||
Some final paragraph with ~~strikethrough~~ text and more **formatting**.
|
||||
`;
|
||||
|
||||
const large = medium.repeat(20);
|
||||
|
||||
const renderCallbacks = {
|
||||
heading: (children, { level }) => `<h${level}>${children}</h${level}>`,
|
||||
paragraph: children => `<p>${children}</p>`,
|
||||
strong: children => `<strong>${children}</strong>`,
|
||||
emphasis: children => `<em>${children}</em>`,
|
||||
codespan: children => `<code>${children}</code>`,
|
||||
code: (children, { language }) =>
|
||||
language
|
||||
? `<pre><code class="language-${language}">${children}</code></pre>`
|
||||
: `<pre><code>${children}</code></pre>`,
|
||||
link: (children, { href, title }) =>
|
||||
title ? `<a href="${href}" title="${title}">${children}</a>` : `<a href="${href}">${children}</a>`,
|
||||
image: (children, { src, title }) =>
|
||||
title ? `<img src="${src}" alt="${children}" title="${title}" />` : `<img src="${src}" alt="${children}" />`,
|
||||
list: (children, { ordered, start }) => (ordered ? `<ol start="${start}">${children}</ol>` : `<ul>${children}</ul>`),
|
||||
listItem: children => `<li>${children}</li>`,
|
||||
blockquote: children => `<blockquote>${children}</blockquote>`,
|
||||
hr: () => `<hr />`,
|
||||
strikethrough: children => `<del>${children}</del>`,
|
||||
table: children => `<table>${children}</table>`,
|
||||
thead: children => `<thead>${children}</thead>`,
|
||||
tbody: children => `<tbody>${children}</tbody>`,
|
||||
tr: children => `<tr>${children}</tr>`,
|
||||
th: children => `<th>${children}</th>`,
|
||||
td: children => `<td>${children}</td>`,
|
||||
};
|
||||
|
||||
summary(() => {
|
||||
if (typeof Bun !== "undefined" && Bun.markdown) {
|
||||
bench(`small (${small.length} chars) - Bun.markdown.html`, () => {
|
||||
return Bun.markdown.html(small);
|
||||
});
|
||||
|
||||
bench(`small (${small.length} chars) - Bun.markdown.render`, () => {
|
||||
return Bun.markdown.render(small, renderCallbacks);
|
||||
});
|
||||
}
|
||||
|
||||
bench(`small (${small.length} chars) - marked`, () => {
|
||||
return marked(small);
|
||||
});
|
||||
|
||||
bench(`small (${small.length} chars) - remark`, () => {
|
||||
return remarkProcessor.processSync(small).toString();
|
||||
});
|
||||
});
|
||||
|
||||
summary(() => {
|
||||
if (typeof Bun !== "undefined" && Bun.markdown) {
|
||||
bench(`medium (${medium.length} chars) - Bun.markdown.html`, () => {
|
||||
return Bun.markdown.html(medium);
|
||||
});
|
||||
|
||||
bench(`medium (${medium.length} chars) - Bun.markdown.render`, () => {
|
||||
return Bun.markdown.render(medium, renderCallbacks);
|
||||
});
|
||||
}
|
||||
|
||||
bench(`medium (${medium.length} chars) - marked`, () => {
|
||||
return marked(medium);
|
||||
});
|
||||
|
||||
bench(`medium (${medium.length} chars) - remark`, () => {
|
||||
return remarkProcessor.processSync(medium).toString();
|
||||
});
|
||||
});
|
||||
|
||||
summary(() => {
|
||||
if (typeof Bun !== "undefined" && Bun.markdown) {
|
||||
bench(`large (${large.length} chars) - Bun.markdown.html`, () => {
|
||||
return Bun.markdown.html(large);
|
||||
});
|
||||
|
||||
bench(`large (${large.length} chars) - Bun.markdown.render`, () => {
|
||||
return Bun.markdown.render(large, renderCallbacks);
|
||||
});
|
||||
}
|
||||
|
||||
bench(`large (${large.length} chars) - marked`, () => {
|
||||
return marked(large);
|
||||
});
|
||||
|
||||
bench(`large (${large.length} chars) - remark`, () => {
|
||||
return remarkProcessor.processSync(large).toString();
|
||||
});
|
||||
});
|
||||
|
||||
await run();
|
||||
@@ -69,18 +69,8 @@ if(ENABLE_VALGRIND)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_VALGRIND=ON)
|
||||
endif()
|
||||
|
||||
# Enable architecture-specific optimizations when not building for baseline.
|
||||
# On Linux aarch64, upstream mimalloc force-enables MI_OPT_ARCH which adds
|
||||
# -march=armv8.1-a (LSE atomics). This crashes on ARMv8.0 CPUs
|
||||
# (Cortex-A53, Raspberry Pi 4, AWS a1 instances). Use MI_NO_OPT_ARCH
|
||||
# to prevent that, but keep SIMD enabled. -moutline-atomics for runtime
|
||||
# dispatch to LSE/LL-SC. macOS arm64 always has LSE (Apple Silicon) so
|
||||
# MI_OPT_ARCH is safe there.
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|arm64|ARM64|AARCH64" AND NOT APPLE)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_NO_OPT_ARCH=ON)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OPT_SIMD=ON)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS "-DCMAKE_C_FLAGS=-moutline-atomics")
|
||||
elseif(NOT ENABLE_BASELINE)
|
||||
# Enable SIMD optimizations when not building for baseline (older CPUs)
|
||||
if(NOT ENABLE_BASELINE)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OPT_ARCH=ON)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OPT_SIMD=ON)
|
||||
endif()
|
||||
|
||||
@@ -1,11 +1,8 @@
|
||||
# NOTE: Changes to this file trigger QEMU JIT stress tests in CI.
|
||||
# See scripts/verify-jit-stress-qemu.sh for details.
|
||||
|
||||
option(WEBKIT_VERSION "The version of WebKit to use")
|
||||
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
|
||||
|
||||
if(NOT WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION 515344bc5d65aa2d4f9ff277b5fb944f0e051dcd)
|
||||
set(WEBKIT_VERSION 9a2cc42ae1bf693a0fd0ceb9b1d7d965d9cfd3ea)
|
||||
endif()
|
||||
|
||||
# Use preview build URL for Windows ARM64 until the fix is merged to main
|
||||
@@ -87,11 +84,6 @@ if(LINUX AND ABI STREQUAL "musl")
|
||||
set(WEBKIT_SUFFIX "-musl")
|
||||
endif()
|
||||
|
||||
# Baseline builds require a WebKit compiled without AVX instructions
|
||||
if(ENABLE_BASELINE AND WEBKIT_ARCH STREQUAL "amd64")
|
||||
set(WEBKIT_SUFFIX "${WEBKIT_SUFFIX}-baseline")
|
||||
endif()
|
||||
|
||||
if(DEBUG)
|
||||
set(WEBKIT_SUFFIX "${WEBKIT_SUFFIX}-debug")
|
||||
elseif(ENABLE_LTO)
|
||||
|
||||
@@ -97,31 +97,6 @@ Filters respect your [workspace configuration](/pm/workspaces): If you have a `p
|
||||
bun run --filter foo myscript
|
||||
```
|
||||
|
||||
### Parallel and sequential mode
|
||||
|
||||
Combine `--filter` or `--workspaces` with `--parallel` or `--sequential` to run scripts across workspace packages with Foreman-style prefixed output:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
# Run "build" in all matching packages concurrently
|
||||
bun run --parallel --filter '*' build
|
||||
|
||||
# Run "build" in all workspace packages sequentially
|
||||
bun run --sequential --workspaces build
|
||||
|
||||
# Run glob-matched scripts across all packages
|
||||
bun run --parallel --filter '*' "build:*"
|
||||
|
||||
# Continue running even if one package's script fails
|
||||
bun run --parallel --no-exit-on-error --filter '*' test
|
||||
|
||||
# Run multiple scripts across all packages
|
||||
bun run --parallel --filter '*' build lint
|
||||
```
|
||||
|
||||
Each line of output is prefixed with the package and script name (e.g. `pkg-a:build | ...`). Without `--filter`/`--workspaces`, the prefix is just the script name (e.g. `build | ...`). When a package's `package.json` has no `name` field, the relative path from the workspace root is used instead.
|
||||
|
||||
Use `--if-present` with `--workspaces` to skip packages that don't have the requested script instead of erroring.
|
||||
|
||||
### Dependency Order
|
||||
|
||||
Bun will respect package dependency order when running scripts. Say you have a package `foo` that depends on another package `bar` in your workspace, and both packages have a `build` script. When you run `bun --filter '*' build`, you will notice that `foo` will only start running once `bar` is done.
|
||||
|
||||
@@ -40,18 +40,6 @@ bun run <file or script>
|
||||
Run a script in all workspace packages (from the <code>workspaces</code> field in <code>package.json</code>)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="--parallel" type="boolean">
|
||||
Run multiple scripts or workspace scripts concurrently with prefixed output
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="--sequential" type="boolean">
|
||||
Run multiple scripts or workspace scripts one after another with prefixed output
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="--no-exit-on-error" type="boolean">
|
||||
When using <code>--parallel</code> or <code>--sequential</code>, continue running other scripts when one fails
|
||||
</ParamField>
|
||||
|
||||
### Runtime & Process Control
|
||||
|
||||
<ParamField path="--bun" type="boolean">
|
||||
|
||||
@@ -204,38 +204,26 @@ namespace uWS {
|
||||
}
|
||||
|
||||
// do we have data to emit all?
|
||||
unsigned int remaining = chunkSize(state);
|
||||
if (data.length() >= remaining) {
|
||||
if (data.length() >= chunkSize(state)) {
|
||||
// emit all but 2 bytes then reset state to 0 and goto beginning
|
||||
// not fin
|
||||
std::string_view emitSoon;
|
||||
bool shouldEmit = false;
|
||||
// Validate the chunk terminator (\r\n) accounting for partial reads
|
||||
switch (remaining) {
|
||||
default:
|
||||
// remaining > 2: emit data and validate full terminator
|
||||
emitSoon = std::string_view(data.data(), remaining - 2);
|
||||
shouldEmit = true;
|
||||
[[fallthrough]];
|
||||
case 2:
|
||||
// remaining >= 2: validate both \r and \n
|
||||
if (data[remaining - 2] != '\r' || data[remaining - 1] != '\n') {
|
||||
state = STATE_IS_ERROR;
|
||||
return std::nullopt;
|
||||
}
|
||||
break;
|
||||
case 1:
|
||||
// remaining == 1: only \n left to validate
|
||||
if (data[0] != '\n') {
|
||||
state = STATE_IS_ERROR;
|
||||
return std::nullopt;
|
||||
}
|
||||
break;
|
||||
case 0:
|
||||
// remaining == 0: terminator already consumed
|
||||
break;
|
||||
if (chunkSize(state) > 2) {
|
||||
emitSoon = std::string_view(data.data(), chunkSize(state) - 2);
|
||||
shouldEmit = true;
|
||||
}
|
||||
data.remove_prefix(remaining);
|
||||
// Validate that the chunk terminator is \r\n to prevent request smuggling
|
||||
// The last 2 bytes of the chunk must be exactly \r\n
|
||||
// Note: chunkSize always includes +2 for the terminator (added in consumeHexNumber),
|
||||
// and chunks with size 0 (chunkSize == 2) are handled earlier at line 190.
|
||||
// Therefore chunkSize >= 3 here, so no underflow is possible.
|
||||
size_t terminatorOffset = chunkSize(state) - 2;
|
||||
if (data[terminatorOffset] != '\r' || data[terminatorOffset + 1] != '\n') {
|
||||
state = STATE_IS_ERROR;
|
||||
return std::nullopt;
|
||||
}
|
||||
data.remove_prefix(chunkSize(state));
|
||||
state = STATE_IS_CHUNKED;
|
||||
if (shouldEmit) {
|
||||
return emitSoon;
|
||||
@@ -244,45 +232,19 @@ namespace uWS {
|
||||
} else {
|
||||
/* We will consume all our input data */
|
||||
std::string_view emitSoon;
|
||||
unsigned int size = chunkSize(state);
|
||||
size_t len = data.length();
|
||||
if (size > 2) {
|
||||
uint64_t maximalAppEmit = size - 2;
|
||||
if (len > maximalAppEmit) {
|
||||
if (chunkSize(state) > 2) {
|
||||
uint64_t maximalAppEmit = chunkSize(state) - 2;
|
||||
if (data.length() > maximalAppEmit) {
|
||||
emitSoon = data.substr(0, maximalAppEmit);
|
||||
// Validate terminator bytes being consumed
|
||||
size_t terminatorBytesConsumed = len - maximalAppEmit;
|
||||
if (terminatorBytesConsumed >= 1 && data[maximalAppEmit] != '\r') {
|
||||
state = STATE_IS_ERROR;
|
||||
return std::nullopt;
|
||||
}
|
||||
if (terminatorBytesConsumed >= 2 && data[maximalAppEmit + 1] != '\n') {
|
||||
state = STATE_IS_ERROR;
|
||||
return std::nullopt;
|
||||
}
|
||||
} else {
|
||||
//cb(data);
|
||||
emitSoon = data;
|
||||
}
|
||||
} else if (size == 2) {
|
||||
// Only terminator bytes remain, validate what we have
|
||||
if (len >= 1 && data[0] != '\r') {
|
||||
state = STATE_IS_ERROR;
|
||||
return std::nullopt;
|
||||
}
|
||||
if (len >= 2 && data[1] != '\n') {
|
||||
state = STATE_IS_ERROR;
|
||||
return std::nullopt;
|
||||
}
|
||||
} else if (size == 1) {
|
||||
// Only \n remains
|
||||
if (data[0] != '\n') {
|
||||
state = STATE_IS_ERROR;
|
||||
return std::nullopt;
|
||||
}
|
||||
}
|
||||
decChunkSize(state, (unsigned int) len);
|
||||
decChunkSize(state, (unsigned int) data.length());
|
||||
state |= STATE_IS_CHUNKED;
|
||||
data.remove_prefix(len);
|
||||
// new: decrease data by its size (bug)
|
||||
data.remove_prefix(data.length()); // ny bug fix för getNextChunk
|
||||
if (emitSoon.length()) {
|
||||
return emitSoon;
|
||||
} else {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#!/bin/sh
|
||||
# Version: 27
|
||||
# Version: 26
|
||||
|
||||
# A script that installs the dependencies needed to build and test Bun.
|
||||
# This should work on macOS and Linux with a POSIX shell.
|
||||
@@ -1061,11 +1061,6 @@ install_build_essentials() {
|
||||
go \
|
||||
xz
|
||||
install_packages apache2-utils
|
||||
# QEMU user-mode for baseline CPU verification in CI
|
||||
case "$arch" in
|
||||
x64) install_packages qemu-x86_64 ;;
|
||||
aarch64) install_packages qemu-aarch64 ;;
|
||||
esac
|
||||
;;
|
||||
esac
|
||||
|
||||
|
||||
@@ -1,100 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Verify that a Bun binary doesn't use CPU instructions beyond its baseline target.
|
||||
# Uses QEMU user-mode emulation with restricted CPU features.
|
||||
# Any illegal instruction (SIGILL) causes exit code 132 and fails the build.
|
||||
#
|
||||
# QEMU must be pre-installed in the CI image (see .buildkite/Dockerfile and
|
||||
# scripts/bootstrap.sh).
|
||||
|
||||
ARCH=""
|
||||
BINARY=""
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
--arch) ARCH="$2"; shift 2 ;;
|
||||
--binary) BINARY="$2"; shift 2 ;;
|
||||
*) echo "Unknown arg: $1"; exit 1 ;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ -z "$ARCH" ] || [ -z "$BINARY" ]; then
|
||||
echo "Usage: $0 --arch <x64|aarch64> --binary <path>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -f "$BINARY" ]; then
|
||||
echo "ERROR: Binary not found: $BINARY"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Select QEMU binary and CPU model
|
||||
HOST_ARCH=$(uname -m)
|
||||
if [ "$ARCH" = "x64" ]; then
|
||||
QEMU_BIN="qemu-x86_64"
|
||||
if [ -f "/usr/bin/qemu-x86_64-static" ]; then
|
||||
QEMU_BIN="qemu-x86_64-static"
|
||||
fi
|
||||
QEMU_CPU="Nehalem"
|
||||
CPU_DESC="Nehalem (SSE4.2, no AVX/AVX2/AVX512)"
|
||||
elif [ "$ARCH" = "aarch64" ]; then
|
||||
QEMU_BIN="qemu-aarch64"
|
||||
if [ -f "/usr/bin/qemu-aarch64-static" ]; then
|
||||
QEMU_BIN="qemu-aarch64-static"
|
||||
fi
|
||||
# cortex-a53 is ARMv8.0-A (no LSE atomics, no SVE). It's the most widely
|
||||
# supported ARMv8.0 model across QEMU versions.
|
||||
QEMU_CPU="cortex-a53"
|
||||
CPU_DESC="Cortex-A53 (ARMv8.0-A+CRC, no LSE/SVE)"
|
||||
else
|
||||
echo "ERROR: Unknown arch: $ARCH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! command -v "$QEMU_BIN" &>/dev/null; then
|
||||
echo "ERROR: $QEMU_BIN not found. It must be pre-installed in the CI image."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
BINARY_NAME=$(basename "$BINARY")
|
||||
|
||||
echo "--- Verifying $BINARY_NAME on $CPU_DESC"
|
||||
echo " Binary: $BINARY"
|
||||
echo " QEMU: $QEMU_BIN -cpu $QEMU_CPU"
|
||||
echo " Host: $HOST_ARCH"
|
||||
echo ""
|
||||
|
||||
run_test() {
|
||||
local label="$1"
|
||||
shift
|
||||
echo "+++ $BINARY_NAME: $label"
|
||||
if "$QEMU_BIN" -cpu "$QEMU_CPU" "$@"; then
|
||||
echo " PASS"
|
||||
return 0
|
||||
else
|
||||
local exit_code=$?
|
||||
echo ""
|
||||
if [ $exit_code -eq 132 ]; then
|
||||
echo " FAIL: Illegal instruction (SIGILL)"
|
||||
echo ""
|
||||
echo " The $BINARY_NAME binary uses CPU instructions not available on $QEMU_CPU."
|
||||
if [ "$ARCH" = "x64" ]; then
|
||||
echo " The baseline x64 build targets Nehalem (SSE4.2)."
|
||||
echo " AVX, AVX2, and AVX512 instructions are not allowed."
|
||||
else
|
||||
echo " The aarch64 build targets Cortex-A53 (ARMv8.0-A+CRC)."
|
||||
echo " LSE atomics, SVE, and dotprod instructions are not allowed."
|
||||
fi
|
||||
else
|
||||
echo " FAIL: exit code $exit_code"
|
||||
fi
|
||||
exit $exit_code
|
||||
fi
|
||||
}
|
||||
|
||||
run_test "bun --version" "$BINARY" --version
|
||||
run_test "bun -e eval" "$BINARY" -e "console.log(JSON.stringify({ok:1+1}))"
|
||||
|
||||
echo ""
|
||||
echo " All checks passed for $BINARY_NAME on $QEMU_CPU."
|
||||
@@ -1,148 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Run JSC JIT stress tests under QEMU to verify that JIT-compiled code
|
||||
# doesn't use CPU instructions beyond the baseline target.
|
||||
#
|
||||
# This script exercises all JIT tiers (DFG, FTL, Wasm BBQ/OMG) and catches
|
||||
# cases where JIT-generated code emits AVX instructions on x64 or LSE
|
||||
# atomics on aarch64.
|
||||
#
|
||||
# See: test/js/bun/jsc-stress/ for the test fixtures.
|
||||
|
||||
ARCH=""
|
||||
BINARY=""
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
--arch) ARCH="$2"; shift 2 ;;
|
||||
--binary) BINARY="$2"; shift 2 ;;
|
||||
*) echo "Unknown arg: $1"; exit 1 ;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ -z "$ARCH" ] || [ -z "$BINARY" ]; then
|
||||
echo "Usage: $0 --arch <x64|aarch64> --binary <path>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -f "$BINARY" ]; then
|
||||
echo "ERROR: Binary not found: $BINARY"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Convert to absolute path for use after pushd
|
||||
BINARY="$(cd "$(dirname "$BINARY")" && pwd)/$(basename "$BINARY")"
|
||||
|
||||
# Select QEMU binary and CPU model
|
||||
if [ "$ARCH" = "x64" ]; then
|
||||
QEMU_BIN="qemu-x86_64"
|
||||
if [ -f "/usr/bin/qemu-x86_64-static" ]; then
|
||||
QEMU_BIN="qemu-x86_64-static"
|
||||
fi
|
||||
QEMU_CPU="Nehalem"
|
||||
CPU_DESC="Nehalem (SSE4.2, no AVX/AVX2/AVX512)"
|
||||
elif [ "$ARCH" = "aarch64" ]; then
|
||||
QEMU_BIN="qemu-aarch64"
|
||||
if [ -f "/usr/bin/qemu-aarch64-static" ]; then
|
||||
QEMU_BIN="qemu-aarch64-static"
|
||||
fi
|
||||
QEMU_CPU="cortex-a53"
|
||||
CPU_DESC="Cortex-A53 (ARMv8.0-A+CRC, no LSE/SVE)"
|
||||
else
|
||||
echo "ERROR: Unknown arch: $ARCH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! command -v "$QEMU_BIN" &>/dev/null; then
|
||||
echo "ERROR: $QEMU_BIN not found. It must be pre-installed in the CI image."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
BINARY_NAME=$(basename "$BINARY")
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
FIXTURES_DIR="$REPO_ROOT/test/js/bun/jsc-stress/fixtures"
|
||||
WASM_FIXTURES_DIR="$FIXTURES_DIR/wasm"
|
||||
PRELOAD_PATH="$REPO_ROOT/test/js/bun/jsc-stress/preload.js"
|
||||
|
||||
echo "--- Running JSC JIT stress tests on $CPU_DESC"
|
||||
echo " Binary: $BINARY"
|
||||
echo " QEMU: $QEMU_BIN -cpu $QEMU_CPU"
|
||||
echo ""
|
||||
|
||||
SIGILL_FAILURES=0
|
||||
OTHER_FAILURES=0
|
||||
PASSED=0
|
||||
|
||||
run_fixture() {
|
||||
local fixture="$1"
|
||||
local fixture_name
|
||||
fixture_name=$(basename "$fixture")
|
||||
|
||||
echo "+++ $fixture_name"
|
||||
if "$QEMU_BIN" -cpu "$QEMU_CPU" "$BINARY" --preload "$PRELOAD_PATH" "$fixture" 2>&1; then
|
||||
echo " PASS"
|
||||
((PASSED++))
|
||||
return 0
|
||||
else
|
||||
local exit_code=$?
|
||||
if [ $exit_code -eq 132 ]; then
|
||||
echo " FAIL: Illegal instruction (SIGILL)"
|
||||
echo ""
|
||||
echo " JIT-compiled code in $fixture_name uses CPU instructions not available on $QEMU_CPU."
|
||||
if [ "$ARCH" = "x64" ]; then
|
||||
echo " The baseline x64 build targets Nehalem (SSE4.2)."
|
||||
echo " JIT must not emit AVX, AVX2, or AVX512 instructions."
|
||||
else
|
||||
echo " The aarch64 build targets Cortex-A53 (ARMv8.0-A+CRC)."
|
||||
echo " JIT must not emit LSE atomics, SVE, or dotprod instructions."
|
||||
fi
|
||||
((SIGILL_FAILURES++))
|
||||
else
|
||||
# Non-SIGILL failures are warnings (test issues, not CPU instruction issues)
|
||||
echo " WARN: exit code $exit_code (not a CPU instruction issue)"
|
||||
((OTHER_FAILURES++))
|
||||
fi
|
||||
return $exit_code
|
||||
fi
|
||||
}
|
||||
|
||||
# Run JS fixtures (DFG/FTL)
|
||||
echo "--- JS fixtures (DFG/FTL)"
|
||||
for fixture in "$FIXTURES_DIR"/*.js; do
|
||||
if [ -f "$fixture" ]; then
|
||||
run_fixture "$fixture" || true
|
||||
fi
|
||||
done
|
||||
|
||||
# Run Wasm fixtures (BBQ/OMG)
|
||||
echo "--- Wasm fixtures (BBQ/OMG)"
|
||||
for fixture in "$WASM_FIXTURES_DIR"/*.js; do
|
||||
if [ -f "$fixture" ]; then
|
||||
# Wasm tests need to run from the wasm fixtures directory
|
||||
# because they reference .wasm files relative to the script
|
||||
pushd "$WASM_FIXTURES_DIR" > /dev/null
|
||||
run_fixture "$fixture" || true
|
||||
popd > /dev/null
|
||||
fi
|
||||
done
|
||||
|
||||
echo ""
|
||||
echo "--- Summary"
|
||||
echo " Passed: $PASSED"
|
||||
echo " SIGILL failures: $SIGILL_FAILURES"
|
||||
echo " Other failures: $OTHER_FAILURES (warnings, not CPU instruction issues)"
|
||||
echo ""
|
||||
|
||||
if [ $SIGILL_FAILURES -gt 0 ]; then
|
||||
echo " FAILED: JIT-generated code uses unsupported CPU instructions."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $OTHER_FAILURES -gt 0 ]; then
|
||||
echo " Some tests failed for reasons unrelated to CPU instructions."
|
||||
echo " These are warnings and do not indicate JIT instruction issues."
|
||||
fi
|
||||
|
||||
echo " All JIT stress tests passed on $QEMU_CPU (no SIGILL)."
|
||||
@@ -15,7 +15,6 @@ hash: u64 = 0,
|
||||
is_executable: bool = false,
|
||||
source_map_index: u32 = std.math.maxInt(u32),
|
||||
bytecode_index: u32 = std.math.maxInt(u32),
|
||||
module_info_index: u32 = std.math.maxInt(u32),
|
||||
output_kind: jsc.API.BuildArtifact.OutputKind,
|
||||
/// Relative
|
||||
dest_path: []const u8 = "",
|
||||
@@ -211,7 +210,6 @@ pub const Options = struct {
|
||||
hash: ?u64 = null,
|
||||
source_map_index: ?u32 = null,
|
||||
bytecode_index: ?u32 = null,
|
||||
module_info_index: ?u32 = null,
|
||||
output_path: string,
|
||||
source_index: Index.Optional = .none,
|
||||
size: ?usize = null,
|
||||
@@ -253,7 +251,6 @@ pub fn init(options: Options) OutputFile {
|
||||
.hash = options.hash orelse 0,
|
||||
.output_kind = options.output_kind,
|
||||
.bytecode_index = options.bytecode_index orelse std.math.maxInt(u32),
|
||||
.module_info_index = options.module_info_index orelse std.math.maxInt(u32),
|
||||
.source_map_index = options.source_map_index orelse std.math.maxInt(u32),
|
||||
.is_executable = options.is_executable,
|
||||
.value = switch (options.data) {
|
||||
|
||||
@@ -92,10 +92,6 @@ pub const StandaloneModuleGraph = struct {
|
||||
contents: Schema.StringPointer = .{},
|
||||
sourcemap: Schema.StringPointer = .{},
|
||||
bytecode: Schema.StringPointer = .{},
|
||||
module_info: Schema.StringPointer = .{},
|
||||
/// The file path used when generating bytecode (e.g., "B:/~BUN/root/app.js").
|
||||
/// Must match exactly at runtime for bytecode cache hits.
|
||||
bytecode_origin_path: Schema.StringPointer = .{},
|
||||
encoding: Encoding = .latin1,
|
||||
loader: bun.options.Loader = .file,
|
||||
module_format: ModuleFormat = .none,
|
||||
@@ -163,10 +159,6 @@ pub const StandaloneModuleGraph = struct {
|
||||
encoding: Encoding = .binary,
|
||||
wtf_string: bun.String = bun.String.empty,
|
||||
bytecode: []u8 = "",
|
||||
module_info: []u8 = "",
|
||||
/// The file path used when generating bytecode (e.g., "B:/~BUN/root/app.js").
|
||||
/// Must match exactly at runtime for bytecode cache hits.
|
||||
bytecode_origin_path: []const u8 = "",
|
||||
module_format: ModuleFormat = .none,
|
||||
side: FileSide = .server,
|
||||
|
||||
@@ -341,8 +333,6 @@ pub const StandaloneModuleGraph = struct {
|
||||
else
|
||||
.none,
|
||||
.bytecode = if (module.bytecode.length > 0) @constCast(sliceTo(raw_bytes, module.bytecode)) else &.{},
|
||||
.module_info = if (module.module_info.length > 0) @constCast(sliceTo(raw_bytes, module.module_info)) else &.{},
|
||||
.bytecode_origin_path = if (module.bytecode_origin_path.length > 0) sliceToZ(raw_bytes, module.bytecode_origin_path) else "",
|
||||
.module_format = module.module_format,
|
||||
.side = module.side,
|
||||
},
|
||||
@@ -392,8 +382,6 @@ pub const StandaloneModuleGraph = struct {
|
||||
} else if (output_file.output_kind == .bytecode) {
|
||||
// Allocate up to 256 byte alignment for bytecode
|
||||
string_builder.cap += (output_file.value.buffer.bytes.len + 255) / 256 * 256 + 256;
|
||||
} else if (output_file.output_kind == .module_info) {
|
||||
string_builder.cap += output_file.value.buffer.bytes.len;
|
||||
} else {
|
||||
if (entry_point_id == null) {
|
||||
if (output_file.side == null or output_file.side.? == .server) {
|
||||
@@ -489,19 +477,6 @@ pub const StandaloneModuleGraph = struct {
|
||||
}
|
||||
};
|
||||
|
||||
// Embed module_info for ESM bytecode
|
||||
const module_info: StringPointer = brk: {
|
||||
if (output_file.module_info_index != std.math.maxInt(u32)) {
|
||||
const mi_bytes = output_files[output_file.module_info_index].value.buffer.bytes;
|
||||
const offset = string_builder.len;
|
||||
const writable = string_builder.writable();
|
||||
@memcpy(writable[0..mi_bytes.len], mi_bytes[0..mi_bytes.len]);
|
||||
string_builder.len += mi_bytes.len;
|
||||
break :brk StringPointer{ .offset = @truncate(offset), .length = @truncate(mi_bytes.len) };
|
||||
}
|
||||
break :brk .{};
|
||||
};
|
||||
|
||||
if (comptime bun.Environment.is_canary or bun.Environment.isDebug) {
|
||||
if (bun.env_var.BUN_FEATURE_FLAG_DUMP_CODE.get()) |dump_code_dir| {
|
||||
const buf = bun.path_buffer_pool.get();
|
||||
@@ -523,13 +498,6 @@ pub const StandaloneModuleGraph = struct {
|
||||
}
|
||||
}
|
||||
|
||||
// When there's bytecode, store the bytecode output file's path as bytecode_origin_path.
|
||||
// This path was used to generate the bytecode cache and must match at runtime.
|
||||
const bytecode_origin_path: StringPointer = if (output_file.bytecode_index != std.math.maxInt(u32))
|
||||
string_builder.appendCountZ(output_files[output_file.bytecode_index].dest_path)
|
||||
else
|
||||
.{};
|
||||
|
||||
var module = CompiledModuleGraphFile{
|
||||
.name = string_builder.fmtAppendCountZ("{s}{s}", .{
|
||||
prefix,
|
||||
@@ -547,8 +515,6 @@ pub const StandaloneModuleGraph = struct {
|
||||
else => .none,
|
||||
} else .none,
|
||||
.bytecode = bytecode,
|
||||
.module_info = module_info,
|
||||
.bytecode_origin_path = bytecode_origin_path,
|
||||
.side = switch (output_file.side orelse .server) {
|
||||
.server => .server,
|
||||
.client => .client,
|
||||
|
||||
@@ -1,513 +0,0 @@
|
||||
pub const RecordKind = enum(u8) {
|
||||
/// var_name
|
||||
declared_variable,
|
||||
/// let_name
|
||||
lexical_variable,
|
||||
/// module_name, import_name, local_name
|
||||
import_info_single,
|
||||
/// module_name, import_name, local_name
|
||||
import_info_single_type_script,
|
||||
/// module_name, import_name = '*', local_name
|
||||
import_info_namespace,
|
||||
/// export_name, import_name, module_name
|
||||
export_info_indirect,
|
||||
/// export_name, local_name, padding (for local => indirect conversion)
|
||||
export_info_local,
|
||||
/// export_name, module_name
|
||||
export_info_namespace,
|
||||
/// module_name
|
||||
export_info_star,
|
||||
_,
|
||||
|
||||
pub fn len(record: RecordKind) !usize {
|
||||
return switch (record) {
|
||||
.declared_variable, .lexical_variable => 1,
|
||||
.import_info_single => 3,
|
||||
.import_info_single_type_script => 3,
|
||||
.import_info_namespace => 3,
|
||||
.export_info_indirect => 3,
|
||||
.export_info_local => 3,
|
||||
.export_info_namespace => 2,
|
||||
.export_info_star => 1,
|
||||
else => return error.InvalidRecordKind,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const Flags = packed struct(u8) {
|
||||
contains_import_meta: bool = false,
|
||||
is_typescript: bool = false,
|
||||
_padding: u6 = 0,
|
||||
};
|
||||
|
||||
pub const ModuleInfoDeserialized = struct {
|
||||
strings_buf: []const u8,
|
||||
strings_lens: []align(1) const u32,
|
||||
requested_modules_keys: []align(1) const StringID,
|
||||
requested_modules_values: []align(1) const ModuleInfo.FetchParameters,
|
||||
buffer: []align(1) const StringID,
|
||||
record_kinds: []align(1) const RecordKind,
|
||||
flags: Flags,
|
||||
owner: union(enum) {
|
||||
module_info,
|
||||
allocated_slice: struct {
|
||||
slice: []const u8,
|
||||
allocator: std.mem.Allocator,
|
||||
},
|
||||
},
|
||||
pub fn deinit(self: *ModuleInfoDeserialized) void {
|
||||
switch (self.owner) {
|
||||
.module_info => {
|
||||
const mi: *ModuleInfo = @fieldParentPtr("_deserialized", self);
|
||||
mi.destroy();
|
||||
},
|
||||
.allocated_slice => |as| {
|
||||
as.allocator.free(as.slice);
|
||||
as.allocator.destroy(self);
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
inline fn eat(rem: *[]const u8, len: usize) ![]const u8 {
|
||||
if (rem.*.len < len) return error.BadModuleInfo;
|
||||
const res = rem.*[0..len];
|
||||
rem.* = rem.*[len..];
|
||||
return res;
|
||||
}
|
||||
inline fn eatC(rem: *[]const u8, comptime len: usize) !*const [len]u8 {
|
||||
if (rem.*.len < len) return error.BadModuleInfo;
|
||||
const res = rem.*[0..len];
|
||||
rem.* = rem.*[len..];
|
||||
return res;
|
||||
}
|
||||
pub fn create(source: []const u8, gpa: std.mem.Allocator) !*ModuleInfoDeserialized {
|
||||
const duped = try gpa.dupe(u8, source);
|
||||
errdefer gpa.free(duped);
|
||||
var rem: []const u8 = duped;
|
||||
const res = try gpa.create(ModuleInfoDeserialized);
|
||||
errdefer gpa.destroy(res);
|
||||
|
||||
const record_kinds_len = std.mem.readInt(u32, try eatC(&rem, 4), .little);
|
||||
const record_kinds = std.mem.bytesAsSlice(RecordKind, try eat(&rem, record_kinds_len * @sizeOf(RecordKind)));
|
||||
_ = try eat(&rem, (4 - (record_kinds_len % 4)) % 4); // alignment padding
|
||||
|
||||
const buffer_len = std.mem.readInt(u32, try eatC(&rem, 4), .little);
|
||||
const buffer = std.mem.bytesAsSlice(StringID, try eat(&rem, buffer_len * @sizeOf(StringID)));
|
||||
|
||||
const requested_modules_len = std.mem.readInt(u32, try eatC(&rem, 4), .little);
|
||||
const requested_modules_keys = std.mem.bytesAsSlice(StringID, try eat(&rem, requested_modules_len * @sizeOf(StringID)));
|
||||
const requested_modules_values = std.mem.bytesAsSlice(ModuleInfo.FetchParameters, try eat(&rem, requested_modules_len * @sizeOf(ModuleInfo.FetchParameters)));
|
||||
|
||||
const flags: Flags = @bitCast((try eatC(&rem, 1))[0]);
|
||||
_ = try eat(&rem, 3); // alignment padding
|
||||
|
||||
const strings_len = std.mem.readInt(u32, try eatC(&rem, 4), .little);
|
||||
const strings_lens = std.mem.bytesAsSlice(u32, try eat(&rem, strings_len * @sizeOf(u32)));
|
||||
const strings_buf = rem;
|
||||
|
||||
res.* = .{
|
||||
.strings_buf = strings_buf,
|
||||
.strings_lens = strings_lens,
|
||||
.requested_modules_keys = requested_modules_keys,
|
||||
.requested_modules_values = requested_modules_values,
|
||||
.buffer = buffer,
|
||||
.record_kinds = record_kinds,
|
||||
.flags = flags,
|
||||
.owner = .{ .allocated_slice = .{
|
||||
.slice = duped,
|
||||
.allocator = gpa,
|
||||
} },
|
||||
};
|
||||
return res;
|
||||
}
|
||||
|
||||
/// Wrapper around `create` for use when loading from a cache (transpiler cache or standalone module graph).
|
||||
/// Returns `null` instead of panicking on corrupt/truncated data.
|
||||
pub fn createFromCachedRecord(source: []const u8, gpa: std.mem.Allocator) ?*ModuleInfoDeserialized {
|
||||
return create(source, gpa) catch |e| switch (e) {
|
||||
error.OutOfMemory => bun.outOfMemory(),
|
||||
error.BadModuleInfo => null,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn serialize(self: *const ModuleInfoDeserialized, writer: anytype) !void {
|
||||
try writer.writeInt(u32, @truncate(self.record_kinds.len), .little);
|
||||
try writer.writeAll(std.mem.sliceAsBytes(self.record_kinds));
|
||||
try writer.writeByteNTimes(0, (4 - (self.record_kinds.len % 4)) % 4); // alignment padding
|
||||
|
||||
try writer.writeInt(u32, @truncate(self.buffer.len), .little);
|
||||
try writer.writeAll(std.mem.sliceAsBytes(self.buffer));
|
||||
|
||||
try writer.writeInt(u32, @truncate(self.requested_modules_keys.len), .little);
|
||||
try writer.writeAll(std.mem.sliceAsBytes(self.requested_modules_keys));
|
||||
try writer.writeAll(std.mem.sliceAsBytes(self.requested_modules_values));
|
||||
|
||||
try writer.writeByte(@bitCast(self.flags));
|
||||
try writer.writeByteNTimes(0, 3); // alignment padding
|
||||
|
||||
try writer.writeInt(u32, @truncate(self.strings_lens.len), .little);
|
||||
try writer.writeAll(std.mem.sliceAsBytes(self.strings_lens));
|
||||
try writer.writeAll(self.strings_buf);
|
||||
}
|
||||
};
|
||||
|
||||
const StringMapKey = enum(u32) {
|
||||
_,
|
||||
};
|
||||
pub const StringContext = struct {
|
||||
strings_buf: []const u8,
|
||||
strings_lens: []const u32,
|
||||
|
||||
pub fn hash(_: @This(), s: []const u8) u32 {
|
||||
return @as(u32, @truncate(std.hash.Wyhash.hash(0, s)));
|
||||
}
|
||||
pub fn eql(self: @This(), fetch_key: []const u8, item_key: StringMapKey, item_i: usize) bool {
|
||||
return bun.strings.eqlLong(fetch_key, self.strings_buf[@intFromEnum(item_key)..][0..self.strings_lens[item_i]], true);
|
||||
}
|
||||
};
|
||||
|
||||
pub const ModuleInfo = struct {
|
||||
/// all strings in wtf-8. index in hashmap = StringID
|
||||
gpa: std.mem.Allocator,
|
||||
strings_map: std.ArrayHashMapUnmanaged(StringMapKey, void, void, true),
|
||||
strings_buf: std.ArrayListUnmanaged(u8),
|
||||
strings_lens: std.ArrayListUnmanaged(u32),
|
||||
requested_modules: std.AutoArrayHashMap(StringID, FetchParameters),
|
||||
buffer: std.ArrayListUnmanaged(StringID),
|
||||
record_kinds: std.ArrayListUnmanaged(RecordKind),
|
||||
flags: Flags,
|
||||
exported_names: std.AutoArrayHashMapUnmanaged(StringID, void),
|
||||
finalized: bool = false,
|
||||
|
||||
/// only initialized after .finalize() is called
|
||||
_deserialized: ModuleInfoDeserialized,
|
||||
|
||||
pub fn asDeserialized(self: *ModuleInfo) *ModuleInfoDeserialized {
|
||||
bun.assert(self.finalized);
|
||||
return &self._deserialized;
|
||||
}
|
||||
|
||||
pub const FetchParameters = enum(u32) {
|
||||
none = std.math.maxInt(u32),
|
||||
javascript = std.math.maxInt(u32) - 1,
|
||||
webassembly = std.math.maxInt(u32) - 2,
|
||||
json = std.math.maxInt(u32) - 3,
|
||||
_, // host_defined: cast to StringID
|
||||
pub fn hostDefined(value: StringID) FetchParameters {
|
||||
return @enumFromInt(@intFromEnum(value));
|
||||
}
|
||||
};
|
||||
|
||||
pub const VarKind = enum { declared, lexical };
|
||||
pub fn addVar(self: *ModuleInfo, name: StringID, kind: VarKind) !void {
|
||||
switch (kind) {
|
||||
.declared => try self.addDeclaredVariable(name),
|
||||
.lexical => try self.addLexicalVariable(name),
|
||||
}
|
||||
}
|
||||
|
||||
fn _addRecord(self: *ModuleInfo, kind: RecordKind, data: []const StringID) !void {
|
||||
bun.assert(!self.finalized);
|
||||
bun.assert(data.len == kind.len() catch unreachable);
|
||||
try self.record_kinds.append(self.gpa, kind);
|
||||
try self.buffer.appendSlice(self.gpa, data);
|
||||
}
|
||||
pub fn addDeclaredVariable(self: *ModuleInfo, id: StringID) !void {
|
||||
try self._addRecord(.declared_variable, &.{id});
|
||||
}
|
||||
pub fn addLexicalVariable(self: *ModuleInfo, id: StringID) !void {
|
||||
try self._addRecord(.lexical_variable, &.{id});
|
||||
}
|
||||
pub fn addImportInfoSingle(self: *ModuleInfo, module_name: StringID, import_name: StringID, local_name: StringID, only_used_as_type: bool) !void {
|
||||
try self._addRecord(if (only_used_as_type) .import_info_single_type_script else .import_info_single, &.{ module_name, import_name, local_name });
|
||||
}
|
||||
pub fn addImportInfoNamespace(self: *ModuleInfo, module_name: StringID, local_name: StringID) !void {
|
||||
try self._addRecord(.import_info_namespace, &.{ module_name, try self.str("*"), local_name });
|
||||
}
|
||||
pub fn addExportInfoIndirect(self: *ModuleInfo, export_name: StringID, import_name: StringID, module_name: StringID) !void {
|
||||
if (try self._hasOrAddExportedName(export_name)) return; // a syntax error will be emitted later in this case
|
||||
try self._addRecord(.export_info_indirect, &.{ export_name, import_name, module_name });
|
||||
}
|
||||
pub fn addExportInfoLocal(self: *ModuleInfo, export_name: StringID, local_name: StringID) !void {
|
||||
if (try self._hasOrAddExportedName(export_name)) return; // a syntax error will be emitted later in this case
|
||||
try self._addRecord(.export_info_local, &.{ export_name, local_name, @enumFromInt(std.math.maxInt(u32)) });
|
||||
}
|
||||
pub fn addExportInfoNamespace(self: *ModuleInfo, export_name: StringID, module_name: StringID) !void {
|
||||
if (try self._hasOrAddExportedName(export_name)) return; // a syntax error will be emitted later in this case
|
||||
try self._addRecord(.export_info_namespace, &.{ export_name, module_name });
|
||||
}
|
||||
pub fn addExportInfoStar(self: *ModuleInfo, module_name: StringID) !void {
|
||||
try self._addRecord(.export_info_star, &.{module_name});
|
||||
}
|
||||
|
||||
pub fn _hasOrAddExportedName(self: *ModuleInfo, name: StringID) !bool {
|
||||
if (try self.exported_names.fetchPut(self.gpa, name, {}) != null) return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
pub fn create(gpa: std.mem.Allocator, is_typescript: bool) !*ModuleInfo {
|
||||
const res = try gpa.create(ModuleInfo);
|
||||
res.* = ModuleInfo.init(gpa, is_typescript);
|
||||
return res;
|
||||
}
|
||||
fn init(allocator: std.mem.Allocator, is_typescript: bool) ModuleInfo {
|
||||
return .{
|
||||
.gpa = allocator,
|
||||
.strings_map = .{},
|
||||
.strings_buf = .{},
|
||||
.strings_lens = .{},
|
||||
.exported_names = .{},
|
||||
.requested_modules = std.AutoArrayHashMap(StringID, FetchParameters).init(allocator),
|
||||
.buffer = .empty,
|
||||
.record_kinds = .empty,
|
||||
.flags = .{ .contains_import_meta = false, .is_typescript = is_typescript },
|
||||
._deserialized = undefined,
|
||||
};
|
||||
}
|
||||
fn deinit(self: *ModuleInfo) void {
|
||||
self.strings_map.deinit(self.gpa);
|
||||
self.strings_buf.deinit(self.gpa);
|
||||
self.strings_lens.deinit(self.gpa);
|
||||
self.exported_names.deinit(self.gpa);
|
||||
self.requested_modules.deinit();
|
||||
self.buffer.deinit(self.gpa);
|
||||
self.record_kinds.deinit(self.gpa);
|
||||
}
|
||||
pub fn destroy(self: *ModuleInfo) void {
|
||||
const alloc = self.gpa;
|
||||
self.deinit();
|
||||
alloc.destroy(self);
|
||||
}
|
||||
pub fn str(self: *ModuleInfo, value: []const u8) !StringID {
|
||||
try self.strings_buf.ensureUnusedCapacity(self.gpa, value.len);
|
||||
try self.strings_lens.ensureUnusedCapacity(self.gpa, 1);
|
||||
const gpres = try self.strings_map.getOrPutAdapted(self.gpa, value, StringContext{
|
||||
.strings_buf = self.strings_buf.items,
|
||||
.strings_lens = self.strings_lens.items,
|
||||
});
|
||||
if (gpres.found_existing) return @enumFromInt(@as(u32, @intCast(gpres.index)));
|
||||
|
||||
gpres.key_ptr.* = @enumFromInt(@as(u32, @truncate(self.strings_buf.items.len)));
|
||||
gpres.value_ptr.* = {};
|
||||
self.strings_buf.appendSliceAssumeCapacity(value);
|
||||
self.strings_lens.appendAssumeCapacity(@as(u32, @truncate(value.len)));
|
||||
return @enumFromInt(@as(u32, @intCast(gpres.index)));
|
||||
}
|
||||
pub fn requestModule(self: *ModuleInfo, import_record_path: StringID, fetch_parameters: FetchParameters) !void {
|
||||
// jsc only records the attributes of the first import with the given import_record_path. so only put if not exists.
|
||||
const gpres = try self.requested_modules.getOrPut(import_record_path);
|
||||
if (!gpres.found_existing) gpres.value_ptr.* = fetch_parameters;
|
||||
}
|
||||
|
||||
/// Replace all occurrences of old_id with new_id in records and requested_modules.
|
||||
/// Used to fix up cross-chunk import specifiers after final paths are computed.
|
||||
pub fn replaceStringID(self: *ModuleInfo, old_id: StringID, new_id: StringID) void {
|
||||
bun.assert(!self.finalized);
|
||||
// Replace in record buffer
|
||||
for (self.buffer.items) |*item| {
|
||||
if (item.* == old_id) item.* = new_id;
|
||||
}
|
||||
// Replace in requested_modules keys (preserving insertion order)
|
||||
if (self.requested_modules.getIndex(old_id)) |idx| {
|
||||
self.requested_modules.keys()[idx] = new_id;
|
||||
self.requested_modules.reIndex() catch {};
|
||||
}
|
||||
}
|
||||
|
||||
/// find any exports marked as 'local' that are actually 'indirect' and fix them
|
||||
pub fn finalize(self: *ModuleInfo) !void {
|
||||
bun.assert(!self.finalized);
|
||||
var local_name_to_module_name = std.AutoArrayHashMap(StringID, struct { module_name: StringID, import_name: StringID, record_kinds_idx: usize }).init(bun.default_allocator);
|
||||
defer local_name_to_module_name.deinit();
|
||||
{
|
||||
var i: usize = 0;
|
||||
for (self.record_kinds.items, 0..) |k, idx| {
|
||||
if (k == .import_info_single or k == .import_info_single_type_script) {
|
||||
try local_name_to_module_name.put(self.buffer.items[i + 2], .{ .module_name = self.buffer.items[i], .import_name = self.buffer.items[i + 1], .record_kinds_idx = idx });
|
||||
}
|
||||
i += k.len() catch unreachable;
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
var i: usize = 0;
|
||||
for (self.record_kinds.items) |*k| {
|
||||
if (k.* == .export_info_local) {
|
||||
if (local_name_to_module_name.get(self.buffer.items[i + 1])) |ip| {
|
||||
k.* = .export_info_indirect;
|
||||
self.buffer.items[i + 1] = ip.import_name;
|
||||
self.buffer.items[i + 2] = ip.module_name;
|
||||
// In TypeScript, the re-exported import may target a type-only
|
||||
// export that was elided. Convert the import to SingleTypeScript
|
||||
// so JSC tolerates it being NotFound during linking.
|
||||
if (self.flags.is_typescript) {
|
||||
self.record_kinds.items[ip.record_kinds_idx] = .import_info_single_type_script;
|
||||
}
|
||||
}
|
||||
}
|
||||
i += k.len() catch unreachable;
|
||||
}
|
||||
}
|
||||
|
||||
self._deserialized = .{
|
||||
.strings_buf = self.strings_buf.items,
|
||||
.strings_lens = self.strings_lens.items,
|
||||
.requested_modules_keys = self.requested_modules.keys(),
|
||||
.requested_modules_values = self.requested_modules.values(),
|
||||
.buffer = self.buffer.items,
|
||||
.record_kinds = self.record_kinds.items,
|
||||
.flags = self.flags,
|
||||
.owner = .module_info,
|
||||
};
|
||||
|
||||
self.finalized = true;
|
||||
}
|
||||
};
|
||||
pub const StringID = enum(u32) {
|
||||
star_default = std.math.maxInt(u32),
|
||||
star_namespace = std.math.maxInt(u32) - 1,
|
||||
_,
|
||||
};
|
||||
|
||||
export fn zig__renderDiff(expected_ptr: [*:0]const u8, expected_len: usize, received_ptr: [*:0]const u8, received_len: usize, globalThis: *bun.jsc.JSGlobalObject) void {
|
||||
const formatter = DiffFormatter{
|
||||
.received_string = received_ptr[0..received_len],
|
||||
.expected_string = expected_ptr[0..expected_len],
|
||||
.globalThis = globalThis,
|
||||
};
|
||||
bun.Output.errorWriter().print("DIFF:\n{any}\n", .{formatter}) catch {};
|
||||
}
|
||||
|
||||
export fn zig__ModuleInfoDeserialized__toJSModuleRecord(
|
||||
globalObject: *bun.jsc.JSGlobalObject,
|
||||
vm: *bun.jsc.VM,
|
||||
module_key: *const IdentifierArray,
|
||||
source_code: *const SourceCode,
|
||||
declared_variables: *VariableEnvironment,
|
||||
lexical_variables: *VariableEnvironment,
|
||||
res: *ModuleInfoDeserialized,
|
||||
) ?*JSModuleRecord {
|
||||
defer res.deinit();
|
||||
|
||||
var identifiers = IdentifierArray.create(res.strings_lens.len);
|
||||
defer identifiers.destroy();
|
||||
var offset: usize = 0;
|
||||
for (0.., res.strings_lens) |index, len| {
|
||||
if (res.strings_buf.len < offset + len) return null; // error!
|
||||
const sub = res.strings_buf[offset..][0..len];
|
||||
identifiers.setFromUtf8(index, vm, sub);
|
||||
offset += len;
|
||||
}
|
||||
|
||||
{
|
||||
var i: usize = 0;
|
||||
for (res.record_kinds) |k| {
|
||||
if (i + (k.len() catch 0) > res.buffer.len) return null;
|
||||
switch (k) {
|
||||
.declared_variable => declared_variables.add(vm, identifiers, res.buffer[i]),
|
||||
.lexical_variable => lexical_variables.add(vm, identifiers, res.buffer[i]),
|
||||
.import_info_single, .import_info_single_type_script, .import_info_namespace, .export_info_indirect, .export_info_local, .export_info_namespace, .export_info_star => {},
|
||||
else => return null,
|
||||
}
|
||||
i += k.len() catch unreachable; // handled above
|
||||
}
|
||||
}
|
||||
|
||||
const module_record = JSModuleRecord.create(globalObject, vm, module_key, source_code, declared_variables, lexical_variables, res.flags.contains_import_meta, res.flags.is_typescript);
|
||||
|
||||
for (res.requested_modules_keys, res.requested_modules_values) |reqk, reqv| {
|
||||
switch (reqv) {
|
||||
.none => module_record.addRequestedModuleNullAttributesPtr(identifiers, reqk),
|
||||
.javascript => module_record.addRequestedModuleJavaScript(identifiers, reqk),
|
||||
.webassembly => module_record.addRequestedModuleWebAssembly(identifiers, reqk),
|
||||
.json => module_record.addRequestedModuleJSON(identifiers, reqk),
|
||||
else => |uv| module_record.addRequestedModuleHostDefined(identifiers, reqk, @enumFromInt(@intFromEnum(uv))),
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
var i: usize = 0;
|
||||
for (res.record_kinds) |k| {
|
||||
if (i + (k.len() catch unreachable) > res.buffer.len) unreachable; // handled above
|
||||
switch (k) {
|
||||
.declared_variable, .lexical_variable => {},
|
||||
.import_info_single => module_record.addImportEntrySingle(identifiers, res.buffer[i + 1], res.buffer[i + 2], res.buffer[i]),
|
||||
.import_info_single_type_script => module_record.addImportEntrySingleTypeScript(identifiers, res.buffer[i + 1], res.buffer[i + 2], res.buffer[i]),
|
||||
.import_info_namespace => module_record.addImportEntryNamespace(identifiers, res.buffer[i + 1], res.buffer[i + 2], res.buffer[i]),
|
||||
.export_info_indirect => module_record.addIndirectExport(identifiers, res.buffer[i + 0], res.buffer[i + 1], res.buffer[i + 2]),
|
||||
.export_info_local => module_record.addLocalExport(identifiers, res.buffer[i], res.buffer[i + 1]),
|
||||
.export_info_namespace => module_record.addNamespaceExport(identifiers, res.buffer[i], res.buffer[i + 1]),
|
||||
.export_info_star => module_record.addStarExport(identifiers, res.buffer[i]),
|
||||
else => unreachable, // handled above
|
||||
}
|
||||
i += k.len() catch unreachable; // handled above
|
||||
}
|
||||
}
|
||||
|
||||
return module_record;
|
||||
}
|
||||
export fn zig__ModuleInfo__destroy(info: *ModuleInfo) void {
|
||||
info.destroy();
|
||||
}
|
||||
|
||||
const VariableEnvironment = opaque {
|
||||
extern fn JSC__VariableEnvironment__add(environment: *VariableEnvironment, vm: *bun.jsc.VM, identifier_array: *IdentifierArray, identifier_index: StringID) void;
|
||||
pub const add = JSC__VariableEnvironment__add;
|
||||
};
|
||||
const IdentifierArray = opaque {
|
||||
extern fn JSC__IdentifierArray__create(len: usize) *IdentifierArray;
|
||||
pub const create = JSC__IdentifierArray__create;
|
||||
|
||||
extern fn JSC__IdentifierArray__destroy(identifier_array: *IdentifierArray) void;
|
||||
pub const destroy = JSC__IdentifierArray__destroy;
|
||||
|
||||
extern fn JSC__IdentifierArray__setFromUtf8(identifier_array: *IdentifierArray, n: usize, vm: *bun.jsc.VM, str: [*]const u8, len: usize) void;
|
||||
pub fn setFromUtf8(self: *IdentifierArray, n: usize, vm: *bun.jsc.VM, str: []const u8) void {
|
||||
JSC__IdentifierArray__setFromUtf8(self, n, vm, str.ptr, str.len);
|
||||
}
|
||||
};
|
||||
const SourceCode = opaque {};
|
||||
const JSModuleRecord = opaque {
|
||||
extern fn JSC_JSModuleRecord__create(global_object: *bun.jsc.JSGlobalObject, vm: *bun.jsc.VM, module_key: *const IdentifierArray, source_code: *const SourceCode, declared_variables: *VariableEnvironment, lexical_variables: *VariableEnvironment, has_import_meta: bool, is_typescript: bool) *JSModuleRecord;
|
||||
pub const create = JSC_JSModuleRecord__create;
|
||||
|
||||
extern fn JSC_JSModuleRecord__declaredVariables(module_record: *JSModuleRecord) *VariableEnvironment;
|
||||
pub const declaredVariables = JSC_JSModuleRecord__declaredVariables;
|
||||
extern fn JSC_JSModuleRecord__lexicalVariables(module_record: *JSModuleRecord) *VariableEnvironment;
|
||||
pub const lexicalVariables = JSC_JSModuleRecord__lexicalVariables;
|
||||
|
||||
extern fn JSC_JSModuleRecord__addIndirectExport(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, export_name: StringID, import_name: StringID, module_name: StringID) void;
|
||||
pub const addIndirectExport = JSC_JSModuleRecord__addIndirectExport;
|
||||
extern fn JSC_JSModuleRecord__addLocalExport(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, export_name: StringID, local_name: StringID) void;
|
||||
pub const addLocalExport = JSC_JSModuleRecord__addLocalExport;
|
||||
extern fn JSC_JSModuleRecord__addNamespaceExport(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, export_name: StringID, module_name: StringID) void;
|
||||
pub const addNamespaceExport = JSC_JSModuleRecord__addNamespaceExport;
|
||||
extern fn JSC_JSModuleRecord__addStarExport(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, module_name: StringID) void;
|
||||
pub const addStarExport = JSC_JSModuleRecord__addStarExport;
|
||||
|
||||
extern fn JSC_JSModuleRecord__addRequestedModuleNullAttributesPtr(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, module_name: StringID) void;
|
||||
pub const addRequestedModuleNullAttributesPtr = JSC_JSModuleRecord__addRequestedModuleNullAttributesPtr;
|
||||
extern fn JSC_JSModuleRecord__addRequestedModuleJavaScript(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, module_name: StringID) void;
|
||||
pub const addRequestedModuleJavaScript = JSC_JSModuleRecord__addRequestedModuleJavaScript;
|
||||
extern fn JSC_JSModuleRecord__addRequestedModuleWebAssembly(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, module_name: StringID) void;
|
||||
pub const addRequestedModuleWebAssembly = JSC_JSModuleRecord__addRequestedModuleWebAssembly;
|
||||
extern fn JSC_JSModuleRecord__addRequestedModuleJSON(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, module_name: StringID) void;
|
||||
pub const addRequestedModuleJSON = JSC_JSModuleRecord__addRequestedModuleJSON;
|
||||
extern fn JSC_JSModuleRecord__addRequestedModuleHostDefined(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, module_name: StringID, host_defined_import_type: StringID) void;
|
||||
pub const addRequestedModuleHostDefined = JSC_JSModuleRecord__addRequestedModuleHostDefined;
|
||||
|
||||
extern fn JSC_JSModuleRecord__addImportEntrySingle(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, import_name: StringID, local_name: StringID, module_name: StringID) void;
|
||||
pub const addImportEntrySingle = JSC_JSModuleRecord__addImportEntrySingle;
|
||||
extern fn JSC_JSModuleRecord__addImportEntrySingleTypeScript(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, import_name: StringID, local_name: StringID, module_name: StringID) void;
|
||||
pub const addImportEntrySingleTypeScript = JSC_JSModuleRecord__addImportEntrySingleTypeScript;
|
||||
extern fn JSC_JSModuleRecord__addImportEntryNamespace(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, import_name: StringID, local_name: StringID, module_name: StringID) void;
|
||||
pub const addImportEntryNamespace = JSC_JSModuleRecord__addImportEntryNamespace;
|
||||
};
|
||||
|
||||
export fn zig_log(msg: [*:0]const u8) void {
|
||||
bun.Output.errorWriter().print("{s}\n", .{std.mem.span(msg)}) catch {};
|
||||
}
|
||||
|
||||
const bun = @import("bun");
|
||||
const std = @import("std");
|
||||
const DiffFormatter = @import("./bun.js/test/diff_format.zig").DiffFormatter;
|
||||
@@ -68,7 +68,6 @@ ts_enums: TsEnumsMap = .{},
|
||||
/// This is a list of named exports that may exist in a CommonJS module
|
||||
/// We use this with `commonjs_at_runtime` to re-export CommonJS
|
||||
has_commonjs_export_names: bool = false,
|
||||
has_import_meta: bool = false,
|
||||
import_meta_ref: Ref = Ref.None,
|
||||
|
||||
pub const CommonJSNamedExport = struct {
|
||||
|
||||
@@ -52,7 +52,7 @@ ts_enums: Ast.TsEnumsMap = .{},
|
||||
|
||||
flags: BundledAst.Flags = .{},
|
||||
|
||||
pub const Flags = packed struct(u16) {
|
||||
pub const Flags = packed struct(u8) {
|
||||
// This is a list of CommonJS features. When a file uses CommonJS features,
|
||||
// it's not a candidate for "flat bundling" and must be wrapped in its own
|
||||
// closure.
|
||||
@@ -65,8 +65,6 @@ pub const Flags = packed struct(u16) {
|
||||
has_lazy_export: bool = false,
|
||||
commonjs_module_exports_assigned_deoptimized: bool = false,
|
||||
has_explicit_use_strict_directive: bool = false,
|
||||
has_import_meta: bool = false,
|
||||
_padding: u7 = 0,
|
||||
};
|
||||
|
||||
pub const empty = BundledAst.init(Ast.empty);
|
||||
@@ -118,7 +116,6 @@ pub fn toAST(this: *const BundledAst) Ast {
|
||||
.has_lazy_export = this.flags.has_lazy_export,
|
||||
.commonjs_module_exports_assigned_deoptimized = this.flags.commonjs_module_exports_assigned_deoptimized,
|
||||
.directive = if (this.flags.has_explicit_use_strict_directive) "use strict" else null,
|
||||
.has_import_meta = this.flags.has_import_meta,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -171,7 +168,6 @@ pub fn init(ast: Ast) BundledAst {
|
||||
.has_lazy_export = ast.has_lazy_export,
|
||||
.commonjs_module_exports_assigned_deoptimized = ast.commonjs_module_exports_assigned_deoptimized,
|
||||
.has_explicit_use_strict_directive = strings.eqlComptime(ast.directive orelse "", "use strict"),
|
||||
.has_import_meta = ast.has_import_meta,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -6591,7 +6591,6 @@ pub fn NewParser_(
|
||||
.top_level_await_keyword = p.top_level_await_keyword,
|
||||
.commonjs_named_exports = p.commonjs_named_exports,
|
||||
.has_commonjs_export_names = p.has_commonjs_export_names,
|
||||
.has_import_meta = p.has_import_meta,
|
||||
|
||||
.hashbang = hashbang,
|
||||
// TODO: cross-module constant inlining
|
||||
|
||||
@@ -433,7 +433,6 @@ pub fn buildWithVm(ctx: bun.cli.Command.Context, cwd: []const u8, vm: *VirtualMa
|
||||
.asset => {},
|
||||
.bytecode => {},
|
||||
.sourcemap => {},
|
||||
.module_info => {},
|
||||
.@"metafile-json", .@"metafile-markdown" => {},
|
||||
}
|
||||
},
|
||||
|
||||
@@ -694,7 +694,6 @@ pub const AsyncModule = struct {
|
||||
&printer,
|
||||
.esm_ascii,
|
||||
mapper.get(),
|
||||
null,
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -178,7 +178,6 @@ pub fn transpileSourceCode(
|
||||
var cache = jsc.RuntimeTranspilerCache{
|
||||
.output_code_allocator = allocator,
|
||||
.sourcemap_allocator = bun.default_allocator,
|
||||
.esm_record_allocator = bun.default_allocator,
|
||||
};
|
||||
|
||||
const old = jsc_vm.transpiler.log;
|
||||
@@ -423,10 +422,6 @@ pub fn transpileSourceCode(
|
||||
dumpSourceString(jsc_vm, specifier, entry.output_code.byteSlice());
|
||||
}
|
||||
|
||||
// TODO: module_info is only needed for standalone ESM bytecode.
|
||||
// For now, skip it entirely in the runtime transpiler.
|
||||
const module_info: ?*analyze_transpiled_module.ModuleInfoDeserialized = null;
|
||||
|
||||
return ResolvedSource{
|
||||
.allocator = null,
|
||||
.source_code = switch (entry.output_code) {
|
||||
@@ -441,7 +436,6 @@ pub fn transpileSourceCode(
|
||||
.specifier = input_specifier,
|
||||
.source_url = input_specifier.createIfDifferent(path.text),
|
||||
.is_commonjs_module = entry.metadata.module_type == .cjs,
|
||||
.module_info = module_info,
|
||||
.tag = brk: {
|
||||
if (entry.metadata.module_type == .cjs and source.path.isFile()) {
|
||||
const actual_package_json: *PackageJSON = package_json orelse brk2: {
|
||||
@@ -510,11 +504,6 @@ pub fn transpileSourceCode(
|
||||
jsc_vm.resolved_count += jsc_vm.transpiler.linker.import_counter - start_count;
|
||||
jsc_vm.transpiler.linker.import_counter = 0;
|
||||
|
||||
const is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs;
|
||||
// TODO: module_info is only needed for standalone ESM bytecode.
|
||||
// For now, skip it entirely in the runtime transpiler.
|
||||
const module_info: ?*analyze_transpiled_module.ModuleInfo = null;
|
||||
|
||||
var printer = source_code_printer.*;
|
||||
printer.ctx.reset();
|
||||
defer source_code_printer.* = printer;
|
||||
@@ -527,7 +516,6 @@ pub fn transpileSourceCode(
|
||||
&printer,
|
||||
.esm_ascii,
|
||||
mapper.get(),
|
||||
module_info,
|
||||
);
|
||||
};
|
||||
|
||||
@@ -541,12 +529,9 @@ pub fn transpileSourceCode(
|
||||
}
|
||||
}
|
||||
|
||||
const module_info_deserialized: ?*anyopaque = if (module_info) |mi| @ptrCast(mi.asDeserialized()) else null;
|
||||
|
||||
if (jsc_vm.isWatcherEnabled()) {
|
||||
var resolved_source = jsc_vm.refCountedResolvedSource(printer.ctx.written, input_specifier, path.text, null, false);
|
||||
resolved_source.is_commonjs_module = is_commonjs_module;
|
||||
resolved_source.module_info = module_info_deserialized;
|
||||
resolved_source.is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs;
|
||||
return resolved_source;
|
||||
}
|
||||
|
||||
@@ -579,8 +564,7 @@ pub fn transpileSourceCode(
|
||||
},
|
||||
.specifier = input_specifier,
|
||||
.source_url = input_specifier.createIfDifferent(path.text),
|
||||
.is_commonjs_module = is_commonjs_module,
|
||||
.module_info = module_info_deserialized,
|
||||
.is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs,
|
||||
.tag = tag,
|
||||
};
|
||||
},
|
||||
@@ -1208,15 +1192,9 @@ pub fn fetchBuiltinModule(jsc_vm: *VirtualMachine, specifier: bun.String) !?Reso
|
||||
.source_code = file.toWTFString(),
|
||||
.specifier = specifier,
|
||||
.source_url = specifier.dupeRef(),
|
||||
// bytecode_origin_path is the path used when generating bytecode; must match for cache hits
|
||||
.bytecode_origin_path = if (file.bytecode_origin_path.len > 0) bun.String.fromBytes(file.bytecode_origin_path) else bun.String.empty,
|
||||
.source_code_needs_deref = false,
|
||||
.bytecode_cache = if (file.bytecode.len > 0) file.bytecode.ptr else null,
|
||||
.bytecode_cache_size = file.bytecode.len,
|
||||
.module_info = if (file.module_info.len > 0)
|
||||
analyze_transpiled_module.ModuleInfoDeserialized.createFromCachedRecord(file.module_info, bun.default_allocator)
|
||||
else
|
||||
null,
|
||||
.is_commonjs_module = file.module_format == .cjs,
|
||||
};
|
||||
}
|
||||
@@ -1346,7 +1324,6 @@ const string = []const u8;
|
||||
|
||||
const Fs = @import("../fs.zig");
|
||||
const Runtime = @import("../runtime.zig");
|
||||
const analyze_transpiled_module = @import("../analyze_transpiled_module.zig");
|
||||
const ast = @import("../import_record.zig");
|
||||
const node_module_module = @import("./bindings/NodeModuleModule.zig");
|
||||
const std = @import("std");
|
||||
|
||||
@@ -14,8 +14,7 @@
|
||||
/// Version 15: Updated global defines table list.
|
||||
/// Version 16: Added typeof undefined minification optimization.
|
||||
/// Version 17: Removed transpiler import rewrite for bun:test. Not bumping it causes test/js/bun/http/req-url-leak.test.ts to fail with SyntaxError: Export named 'expect' not found in module 'bun:test'.
|
||||
/// Version 18: Include ESM record (module info) with an ES Module, see #15758
|
||||
const expected_version = 18;
|
||||
const expected_version = 17;
|
||||
|
||||
const debug = Output.scoped(.cache, .visible);
|
||||
const MINIMUM_CACHE_SIZE = 50 * 1024;
|
||||
@@ -33,7 +32,6 @@ pub const RuntimeTranspilerCache = struct {
|
||||
|
||||
sourcemap_allocator: std.mem.Allocator,
|
||||
output_code_allocator: std.mem.Allocator,
|
||||
esm_record_allocator: std.mem.Allocator,
|
||||
|
||||
const seed = 42;
|
||||
pub const Metadata = struct {
|
||||
@@ -54,10 +52,6 @@ pub const RuntimeTranspilerCache = struct {
|
||||
sourcemap_byte_length: u64 = 0,
|
||||
sourcemap_hash: u64 = 0,
|
||||
|
||||
esm_record_byte_offset: u64 = 0,
|
||||
esm_record_byte_length: u64 = 0,
|
||||
esm_record_hash: u64 = 0,
|
||||
|
||||
pub const size = brk: {
|
||||
var count: usize = 0;
|
||||
const meta: Metadata = .{};
|
||||
@@ -84,10 +78,6 @@ pub const RuntimeTranspilerCache = struct {
|
||||
try writer.writeInt(u64, this.sourcemap_byte_offset, .little);
|
||||
try writer.writeInt(u64, this.sourcemap_byte_length, .little);
|
||||
try writer.writeInt(u64, this.sourcemap_hash, .little);
|
||||
|
||||
try writer.writeInt(u64, this.esm_record_byte_offset, .little);
|
||||
try writer.writeInt(u64, this.esm_record_byte_length, .little);
|
||||
try writer.writeInt(u64, this.esm_record_hash, .little);
|
||||
}
|
||||
|
||||
pub fn decode(this: *Metadata, reader: anytype) !void {
|
||||
@@ -112,10 +102,6 @@ pub const RuntimeTranspilerCache = struct {
|
||||
this.sourcemap_byte_length = try reader.readInt(u64, .little);
|
||||
this.sourcemap_hash = try reader.readInt(u64, .little);
|
||||
|
||||
this.esm_record_byte_offset = try reader.readInt(u64, .little);
|
||||
this.esm_record_byte_length = try reader.readInt(u64, .little);
|
||||
this.esm_record_hash = try reader.readInt(u64, .little);
|
||||
|
||||
switch (this.module_type) {
|
||||
.esm, .cjs => {},
|
||||
// Invalid module type
|
||||
@@ -134,7 +120,6 @@ pub const RuntimeTranspilerCache = struct {
|
||||
metadata: Metadata,
|
||||
output_code: OutputCode = .{ .utf8 = "" },
|
||||
sourcemap: []const u8 = "",
|
||||
esm_record: []const u8 = "",
|
||||
|
||||
pub const OutputCode = union(enum) {
|
||||
utf8: []const u8,
|
||||
@@ -157,14 +142,11 @@ pub const RuntimeTranspilerCache = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub fn deinit(this: *Entry, sourcemap_allocator: std.mem.Allocator, output_code_allocator: std.mem.Allocator, esm_record_allocator: std.mem.Allocator) void {
|
||||
pub fn deinit(this: *Entry, sourcemap_allocator: std.mem.Allocator, output_code_allocator: std.mem.Allocator) void {
|
||||
this.output_code.deinit(output_code_allocator);
|
||||
if (this.sourcemap.len > 0) {
|
||||
sourcemap_allocator.free(this.sourcemap);
|
||||
}
|
||||
if (this.esm_record.len > 0) {
|
||||
esm_record_allocator.free(this.esm_record);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn save(
|
||||
@@ -174,7 +156,6 @@ pub const RuntimeTranspilerCache = struct {
|
||||
input_hash: u64,
|
||||
features_hash: u64,
|
||||
sourcemap: []const u8,
|
||||
esm_record: []const u8,
|
||||
output_code: OutputCode,
|
||||
exports_kind: bun.ast.ExportsKind,
|
||||
) !void {
|
||||
@@ -220,16 +201,10 @@ pub const RuntimeTranspilerCache = struct {
|
||||
.output_byte_offset = Metadata.size,
|
||||
.output_byte_length = output_bytes.len,
|
||||
.sourcemap_byte_offset = Metadata.size + output_bytes.len,
|
||||
.esm_record_byte_offset = Metadata.size + output_bytes.len + sourcemap.len,
|
||||
.esm_record_byte_length = esm_record.len,
|
||||
};
|
||||
|
||||
metadata.output_hash = hash(output_bytes);
|
||||
metadata.sourcemap_hash = hash(sourcemap);
|
||||
if (esm_record.len > 0) {
|
||||
metadata.esm_record_hash = hash(esm_record);
|
||||
}
|
||||
|
||||
var metadata_stream = std.io.fixedBufferStream(&metadata_buf);
|
||||
|
||||
try metadata.encode(metadata_stream.writer());
|
||||
@@ -244,26 +219,20 @@ pub const RuntimeTranspilerCache = struct {
|
||||
break :brk metadata_buf[0..metadata_stream.pos];
|
||||
};
|
||||
|
||||
var vecs_buf: [4]bun.PlatformIOVecConst = undefined;
|
||||
var vecs_i: usize = 0;
|
||||
vecs_buf[vecs_i] = bun.platformIOVecConstCreate(metadata_bytes);
|
||||
vecs_i += 1;
|
||||
if (output_bytes.len > 0) {
|
||||
vecs_buf[vecs_i] = bun.platformIOVecConstCreate(output_bytes);
|
||||
vecs_i += 1;
|
||||
}
|
||||
if (sourcemap.len > 0) {
|
||||
vecs_buf[vecs_i] = bun.platformIOVecConstCreate(sourcemap);
|
||||
vecs_i += 1;
|
||||
}
|
||||
if (esm_record.len > 0) {
|
||||
vecs_buf[vecs_i] = bun.platformIOVecConstCreate(esm_record);
|
||||
vecs_i += 1;
|
||||
}
|
||||
const vecs: []const bun.PlatformIOVecConst = vecs_buf[0..vecs_i];
|
||||
const vecs: []const bun.PlatformIOVecConst = if (output_bytes.len > 0)
|
||||
&.{
|
||||
bun.platformIOVecConstCreate(metadata_bytes),
|
||||
bun.platformIOVecConstCreate(output_bytes),
|
||||
bun.platformIOVecConstCreate(sourcemap),
|
||||
}
|
||||
else
|
||||
&.{
|
||||
bun.platformIOVecConstCreate(metadata_bytes),
|
||||
bun.platformIOVecConstCreate(sourcemap),
|
||||
};
|
||||
|
||||
var position: isize = 0;
|
||||
const end_position = Metadata.size + output_bytes.len + sourcemap.len + esm_record.len;
|
||||
const end_position = Metadata.size + output_bytes.len + sourcemap.len;
|
||||
|
||||
if (bun.Environment.allow_assert) {
|
||||
var total: usize = 0;
|
||||
@@ -273,7 +242,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
}
|
||||
bun.assert(end_position == total);
|
||||
}
|
||||
bun.assert(end_position == @as(i64, @intCast(sourcemap.len + output_bytes.len + Metadata.size + esm_record.len)));
|
||||
bun.assert(end_position == @as(i64, @intCast(sourcemap.len + output_bytes.len + Metadata.size)));
|
||||
|
||||
bun.sys.preallocate_file(tmpfile.fd.cast(), 0, @intCast(end_position)) catch {};
|
||||
while (position < end_position) {
|
||||
@@ -294,7 +263,6 @@ pub const RuntimeTranspilerCache = struct {
|
||||
file: std.fs.File,
|
||||
sourcemap_allocator: std.mem.Allocator,
|
||||
output_code_allocator: std.mem.Allocator,
|
||||
esm_record_allocator: std.mem.Allocator,
|
||||
) !void {
|
||||
const stat_size = try file.getEndPos();
|
||||
if (stat_size < Metadata.size + this.metadata.output_byte_length + this.metadata.sourcemap_byte_length) {
|
||||
@@ -370,23 +338,6 @@ pub const RuntimeTranspilerCache = struct {
|
||||
|
||||
this.sourcemap = sourcemap;
|
||||
}
|
||||
|
||||
if (this.metadata.esm_record_byte_length > 0) {
|
||||
const esm_record = try esm_record_allocator.alloc(u8, this.metadata.esm_record_byte_length);
|
||||
errdefer esm_record_allocator.free(esm_record);
|
||||
const read_bytes = try file.preadAll(esm_record, this.metadata.esm_record_byte_offset);
|
||||
if (read_bytes != this.metadata.esm_record_byte_length) {
|
||||
return error.MissingData;
|
||||
}
|
||||
|
||||
if (this.metadata.esm_record_hash != 0) {
|
||||
if (hash(esm_record) != this.metadata.esm_record_hash) {
|
||||
return error.InvalidHash;
|
||||
}
|
||||
}
|
||||
|
||||
this.esm_record = esm_record;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@@ -504,7 +455,6 @@ pub const RuntimeTranspilerCache = struct {
|
||||
input_stat_size: u64,
|
||||
sourcemap_allocator: std.mem.Allocator,
|
||||
output_code_allocator: std.mem.Allocator,
|
||||
esm_record_allocator: std.mem.Allocator,
|
||||
) !Entry {
|
||||
var tracer = bun.perf.trace("RuntimeTranspilerCache.fromFile");
|
||||
defer tracer.end();
|
||||
@@ -519,7 +469,6 @@ pub const RuntimeTranspilerCache = struct {
|
||||
input_stat_size,
|
||||
sourcemap_allocator,
|
||||
output_code_allocator,
|
||||
esm_record_allocator,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -530,7 +479,6 @@ pub const RuntimeTranspilerCache = struct {
|
||||
input_stat_size: u64,
|
||||
sourcemap_allocator: std.mem.Allocator,
|
||||
output_code_allocator: std.mem.Allocator,
|
||||
esm_record_allocator: std.mem.Allocator,
|
||||
) !Entry {
|
||||
var metadata_bytes_buf: [Metadata.size * 2]u8 = undefined;
|
||||
const cache_fd = try bun.sys.open(cache_file_path.sliceAssumeZ(), bun.O.RDONLY, 0).unwrap();
|
||||
@@ -562,7 +510,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
return error.MismatchedFeatureHash;
|
||||
}
|
||||
|
||||
try entry.load(file, sourcemap_allocator, output_code_allocator, esm_record_allocator);
|
||||
try entry.load(file, sourcemap_allocator, output_code_allocator);
|
||||
|
||||
return entry;
|
||||
}
|
||||
@@ -579,7 +527,6 @@ pub const RuntimeTranspilerCache = struct {
|
||||
input_hash: u64,
|
||||
features_hash: u64,
|
||||
sourcemap: []const u8,
|
||||
esm_record: []const u8,
|
||||
source_code: bun.String,
|
||||
exports_kind: bun.ast.ExportsKind,
|
||||
) !void {
|
||||
@@ -619,7 +566,6 @@ pub const RuntimeTranspilerCache = struct {
|
||||
input_hash,
|
||||
features_hash,
|
||||
sourcemap,
|
||||
esm_record,
|
||||
output_code,
|
||||
exports_kind,
|
||||
);
|
||||
@@ -653,7 +599,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
parser_options.hashForRuntimeTranspiler(&features_hasher, used_jsx);
|
||||
this.features_hash = features_hasher.final();
|
||||
|
||||
this.entry = fromFile(input_hash, this.features_hash.?, source.contents.len, this.sourcemap_allocator, this.output_code_allocator, this.esm_record_allocator) catch |err| {
|
||||
this.entry = fromFile(input_hash, this.features_hash.?, source.contents.len, this.sourcemap_allocator, this.output_code_allocator) catch |err| {
|
||||
debug("get(\"{s}\") = {s}", .{ source.path.text, @errorName(err) });
|
||||
return false;
|
||||
};
|
||||
@@ -669,7 +615,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
if (comptime bun.Environment.isDebug) {
|
||||
if (!bun_debug_restore_from_cache) {
|
||||
if (this.entry) |*entry| {
|
||||
entry.deinit(this.sourcemap_allocator, this.output_code_allocator, this.esm_record_allocator);
|
||||
entry.deinit(this.sourcemap_allocator, this.output_code_allocator);
|
||||
this.entry = null;
|
||||
}
|
||||
}
|
||||
@@ -678,7 +624,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
return this.entry != null;
|
||||
}
|
||||
|
||||
pub fn put(this: *RuntimeTranspilerCache, output_code_bytes: []const u8, sourcemap: []const u8, esm_record: []const u8) void {
|
||||
pub fn put(this: *RuntimeTranspilerCache, output_code_bytes: []const u8, sourcemap: []const u8) void {
|
||||
if (comptime !bun.FeatureFlags.runtime_transpiler_cache)
|
||||
@compileError("RuntimeTranspilerCache is disabled");
|
||||
|
||||
@@ -689,7 +635,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
const output_code = bun.String.cloneLatin1(output_code_bytes);
|
||||
this.output_code = output_code;
|
||||
|
||||
toFile(this.input_byte_length.?, this.input_hash.?, this.features_hash.?, sourcemap, esm_record, output_code, this.exports_kind) catch |err| {
|
||||
toFile(this.input_byte_length.?, this.input_hash.?, this.features_hash.?, sourcemap, output_code, this.exports_kind) catch |err| {
|
||||
debug("put() = {s}", .{@errorName(err)});
|
||||
return;
|
||||
};
|
||||
|
||||
@@ -315,7 +315,6 @@ pub const RuntimeTranspilerStore = struct {
|
||||
var cache = jsc.RuntimeTranspilerCache{
|
||||
.output_code_allocator = allocator,
|
||||
.sourcemap_allocator = bun.default_allocator,
|
||||
.esm_record_allocator = bun.default_allocator,
|
||||
};
|
||||
var log = logger.Log.init(allocator);
|
||||
defer {
|
||||
@@ -472,10 +471,6 @@ pub const RuntimeTranspilerStore = struct {
|
||||
dumpSourceString(vm, specifier, entry.output_code.byteSlice());
|
||||
}
|
||||
|
||||
// TODO: module_info is only needed for standalone ESM bytecode.
|
||||
// For now, skip it entirely in the runtime transpiler.
|
||||
const module_info: ?*analyze_transpiled_module.ModuleInfoDeserialized = null;
|
||||
|
||||
this.resolved_source = ResolvedSource{
|
||||
.allocator = null,
|
||||
.source_code = switch (entry.output_code) {
|
||||
@@ -488,7 +483,6 @@ pub const RuntimeTranspilerStore = struct {
|
||||
},
|
||||
},
|
||||
.is_commonjs_module = entry.metadata.module_type == .cjs,
|
||||
.module_info = module_info,
|
||||
.tag = this.resolved_source.tag,
|
||||
};
|
||||
|
||||
@@ -547,11 +541,6 @@ pub const RuntimeTranspilerStore = struct {
|
||||
printer = source_code_printer.?.*;
|
||||
}
|
||||
|
||||
const is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs;
|
||||
// TODO: module_info is only needed for standalone ESM bytecode.
|
||||
// For now, skip it entirely in the runtime transpiler.
|
||||
const module_info: ?*analyze_transpiled_module.ModuleInfo = null;
|
||||
|
||||
{
|
||||
var mapper = vm.sourceMapHandler(&printer);
|
||||
defer source_code_printer.?.* = printer;
|
||||
@@ -561,9 +550,7 @@ pub const RuntimeTranspilerStore = struct {
|
||||
&printer,
|
||||
.esm_ascii,
|
||||
mapper.get(),
|
||||
module_info,
|
||||
) catch |err| {
|
||||
if (module_info) |mi| mi.destroy();
|
||||
this.parse_error = err;
|
||||
return;
|
||||
};
|
||||
@@ -602,8 +589,7 @@ pub const RuntimeTranspilerStore = struct {
|
||||
this.resolved_source = ResolvedSource{
|
||||
.allocator = null,
|
||||
.source_code = source_code,
|
||||
.is_commonjs_module = is_commonjs_module,
|
||||
.module_info = if (module_info) |mi| @ptrCast(mi.asDeserialized()) else null,
|
||||
.is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs,
|
||||
.tag = this.resolved_source.tag,
|
||||
};
|
||||
}
|
||||
@@ -611,7 +597,6 @@ pub const RuntimeTranspilerStore = struct {
|
||||
};
|
||||
|
||||
const Fs = @import("../fs.zig");
|
||||
const analyze_transpiled_module = @import("../analyze_transpiled_module.zig");
|
||||
const node_fallbacks = @import("../node_fallbacks.zig");
|
||||
const std = @import("std");
|
||||
const AsyncModule = @import("./AsyncModule.zig").AsyncModule;
|
||||
|
||||
@@ -675,8 +675,8 @@ pub const JSBundler = struct {
|
||||
if (try config.getOptionalEnum(globalThis, "format", options.Format)) |format| {
|
||||
this.format = format;
|
||||
|
||||
if (this.bytecode and format != .cjs and format != .esm) {
|
||||
return globalThis.throwInvalidArguments("format must be 'cjs' or 'esm' when bytecode is true.", .{});
|
||||
if (this.bytecode and format != .cjs) {
|
||||
return globalThis.throwInvalidArguments("format must be 'cjs' when bytecode is true. Eventually we'll add esm support as well.", .{});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1717,12 +1717,11 @@ pub const BuildArtifact = struct {
|
||||
@"entry-point",
|
||||
sourcemap,
|
||||
bytecode,
|
||||
module_info,
|
||||
@"metafile-json",
|
||||
@"metafile-markdown",
|
||||
|
||||
pub fn isFileInStandaloneMode(this: OutputKind) bool {
|
||||
return this != .sourcemap and this != .bytecode and this != .module_info and this != .@"metafile-json" and this != .@"metafile-markdown";
|
||||
return this != .sourcemap and this != .bytecode and this != .@"metafile-json" and this != .@"metafile-markdown";
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -84,7 +84,6 @@ pub const ProcessExitHandler = struct {
|
||||
LifecycleScriptSubprocess,
|
||||
ShellSubprocess,
|
||||
ProcessHandle,
|
||||
MultiRunProcessHandle,
|
||||
SecurityScanSubprocess,
|
||||
SyncProcess,
|
||||
},
|
||||
@@ -112,10 +111,6 @@ pub const ProcessExitHandler = struct {
|
||||
const subprocess = this.ptr.as(ProcessHandle);
|
||||
subprocess.onProcessExit(process, status, rusage);
|
||||
},
|
||||
@field(TaggedPointer.Tag, @typeName(MultiRunProcessHandle)) => {
|
||||
const subprocess = this.ptr.as(MultiRunProcessHandle);
|
||||
subprocess.onProcessExit(process, status, rusage);
|
||||
},
|
||||
@field(TaggedPointer.Tag, @typeName(ShellSubprocess)) => {
|
||||
const subprocess = this.ptr.as(ShellSubprocess);
|
||||
subprocess.onProcessExit(process, status, rusage);
|
||||
@@ -2256,7 +2251,6 @@ pub const sync = struct {
|
||||
};
|
||||
|
||||
const std = @import("std");
|
||||
const MultiRunProcessHandle = @import("../../../cli/multi_run.zig").ProcessHandle;
|
||||
const ProcessHandle = @import("../../../cli/filter_run.zig").ProcessHandle;
|
||||
|
||||
const bun = @import("bun");
|
||||
|
||||
@@ -1,337 +0,0 @@
|
||||
#include "root.h"
|
||||
|
||||
#include "JavaScriptCore/JSInternalPromise.h"
|
||||
#include "JavaScriptCore/JSModuleRecord.h"
|
||||
#include "JavaScriptCore/GlobalObjectMethodTable.h"
|
||||
#include "JavaScriptCore/Nodes.h"
|
||||
#include "JavaScriptCore/Parser.h"
|
||||
#include "JavaScriptCore/ParserError.h"
|
||||
#include "JavaScriptCore/SyntheticModuleRecord.h"
|
||||
#include <wtf/text/MakeString.h>
|
||||
#include "JavaScriptCore/JSGlobalObject.h"
|
||||
#include "JavaScriptCore/ExceptionScope.h"
|
||||
#include "ZigSourceProvider.h"
|
||||
#include "BunAnalyzeTranspiledModule.h"
|
||||
|
||||
// ref: JSModuleLoader.cpp
|
||||
// ref: ModuleAnalyzer.cpp
|
||||
// ref: JSModuleRecord.cpp
|
||||
// ref: NodesAnalyzeModule.cpp, search ::analyzeModule
|
||||
|
||||
#include "JavaScriptCore/ModuleAnalyzer.h"
|
||||
#include "JavaScriptCore/ErrorType.h"
|
||||
|
||||
namespace JSC {
|
||||
|
||||
String dumpRecordInfo(JSModuleRecord* moduleRecord);
|
||||
|
||||
Identifier getFromIdentifierArray(VM& vm, Identifier* identifierArray, uint32_t n)
|
||||
{
|
||||
if (n == std::numeric_limits<uint32_t>::max()) {
|
||||
return vm.propertyNames->starDefaultPrivateName;
|
||||
}
|
||||
return identifierArray[n];
|
||||
}
|
||||
|
||||
extern "C" JSModuleRecord* zig__ModuleInfoDeserialized__toJSModuleRecord(JSGlobalObject* globalObject, VM& vm, const Identifier& module_key, const SourceCode& source_code, VariableEnvironment& declared_variables, VariableEnvironment& lexical_variables, bun_ModuleInfoDeserialized* module_info);
|
||||
extern "C" void zig__renderDiff(const char* expected_ptr, size_t expected_len, const char* received_ptr, size_t received_len, JSGlobalObject* globalObject);
|
||||
|
||||
extern "C" Identifier* JSC__IdentifierArray__create(size_t len)
|
||||
{
|
||||
return new Identifier[len];
|
||||
}
|
||||
extern "C" void JSC__IdentifierArray__destroy(Identifier* identifier)
|
||||
{
|
||||
delete[] identifier;
|
||||
}
|
||||
extern "C" void JSC__IdentifierArray__setFromUtf8(Identifier* identifierArray, size_t n, VM& vm, char* str, size_t len)
|
||||
{
|
||||
identifierArray[n] = Identifier::fromString(vm, AtomString::fromUTF8(std::span<const char>(str, len)));
|
||||
}
|
||||
|
||||
extern "C" void JSC__VariableEnvironment__add(VariableEnvironment& environment, VM& vm, Identifier* identifierArray, uint32_t index)
|
||||
{
|
||||
environment.add(getFromIdentifierArray(vm, identifierArray, index));
|
||||
}
|
||||
|
||||
extern "C" VariableEnvironment* JSC_JSModuleRecord__declaredVariables(JSModuleRecord* moduleRecord)
|
||||
{
|
||||
return const_cast<VariableEnvironment*>(&moduleRecord->declaredVariables());
|
||||
}
|
||||
extern "C" VariableEnvironment* JSC_JSModuleRecord__lexicalVariables(JSModuleRecord* moduleRecord)
|
||||
{
|
||||
return const_cast<VariableEnvironment*>(&moduleRecord->lexicalVariables());
|
||||
}
|
||||
|
||||
extern "C" JSModuleRecord* JSC_JSModuleRecord__create(JSGlobalObject* globalObject, VM& vm, const Identifier* moduleKey, const SourceCode& sourceCode, const VariableEnvironment& declaredVariables, const VariableEnvironment& lexicalVariables, bool hasImportMeta, bool isTypescript)
|
||||
{
|
||||
JSModuleRecord* result = JSModuleRecord::create(globalObject, vm, globalObject->moduleRecordStructure(), *moduleKey, sourceCode, declaredVariables, lexicalVariables, hasImportMeta ? ImportMetaFeature : 0);
|
||||
result->m_isTypeScript = isTypescript;
|
||||
return result;
|
||||
}
|
||||
|
||||
extern "C" void JSC_JSModuleRecord__addIndirectExport(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t exportName, uint32_t importName, uint32_t moduleName)
|
||||
{
|
||||
moduleRecord->addExportEntry(JSModuleRecord::ExportEntry::createIndirect(getFromIdentifierArray(moduleRecord->vm(), identifierArray, exportName), getFromIdentifierArray(moduleRecord->vm(), identifierArray, importName), getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName)));
|
||||
}
|
||||
extern "C" void JSC_JSModuleRecord__addLocalExport(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t exportName, uint32_t localName)
|
||||
{
|
||||
moduleRecord->addExportEntry(JSModuleRecord::ExportEntry::createLocal(getFromIdentifierArray(moduleRecord->vm(), identifierArray, exportName), getFromIdentifierArray(moduleRecord->vm(), identifierArray, localName)));
|
||||
}
|
||||
extern "C" void JSC_JSModuleRecord__addNamespaceExport(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t exportName, uint32_t moduleName)
|
||||
{
|
||||
moduleRecord->addExportEntry(JSModuleRecord::ExportEntry::createNamespace(getFromIdentifierArray(moduleRecord->vm(), identifierArray, exportName), getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName)));
|
||||
}
|
||||
extern "C" void JSC_JSModuleRecord__addStarExport(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t moduleName)
|
||||
{
|
||||
moduleRecord->addStarExportEntry(getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName));
|
||||
}
|
||||
extern "C" void JSC_JSModuleRecord__addRequestedModuleNullAttributesPtr(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t moduleName)
|
||||
{
|
||||
RefPtr<ScriptFetchParameters> attributes = RefPtr<ScriptFetchParameters> {};
|
||||
moduleRecord->appendRequestedModule(getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName), std::move(attributes));
|
||||
}
|
||||
extern "C" void JSC_JSModuleRecord__addRequestedModuleJavaScript(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t moduleName)
|
||||
{
|
||||
Ref<ScriptFetchParameters> attributes = ScriptFetchParameters::create(ScriptFetchParameters::Type::JavaScript);
|
||||
moduleRecord->appendRequestedModule(getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName), std::move(attributes));
|
||||
}
|
||||
extern "C" void JSC_JSModuleRecord__addRequestedModuleWebAssembly(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t moduleName)
|
||||
{
|
||||
Ref<ScriptFetchParameters> attributes = ScriptFetchParameters::create(ScriptFetchParameters::Type::WebAssembly);
|
||||
moduleRecord->appendRequestedModule(getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName), std::move(attributes));
|
||||
}
|
||||
extern "C" void JSC_JSModuleRecord__addRequestedModuleJSON(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t moduleName)
|
||||
{
|
||||
Ref<ScriptFetchParameters> attributes = ScriptFetchParameters::create(ScriptFetchParameters::Type::JSON);
|
||||
moduleRecord->appendRequestedModule(getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName), std::move(attributes));
|
||||
}
|
||||
extern "C" void JSC_JSModuleRecord__addRequestedModuleHostDefined(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t moduleName, uint32_t hostDefinedImportType)
|
||||
{
|
||||
Ref<ScriptFetchParameters> attributes = ScriptFetchParameters::create(getFromIdentifierArray(moduleRecord->vm(), identifierArray, hostDefinedImportType).string());
|
||||
moduleRecord->appendRequestedModule(getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName), std::move(attributes));
|
||||
}
|
||||
|
||||
extern "C" void JSC_JSModuleRecord__addImportEntrySingle(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t importName, uint32_t localName, uint32_t moduleName)
|
||||
{
|
||||
moduleRecord->addImportEntry(JSModuleRecord::ImportEntry {
|
||||
.type = JSModuleRecord::ImportEntryType::Single,
|
||||
.moduleRequest = getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName),
|
||||
.importName = getFromIdentifierArray(moduleRecord->vm(), identifierArray, importName),
|
||||
.localName = getFromIdentifierArray(moduleRecord->vm(), identifierArray, localName),
|
||||
});
|
||||
}
|
||||
extern "C" void JSC_JSModuleRecord__addImportEntrySingleTypeScript(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t importName, uint32_t localName, uint32_t moduleName)
|
||||
{
|
||||
moduleRecord->addImportEntry(JSModuleRecord::ImportEntry {
|
||||
.type = JSModuleRecord::ImportEntryType::SingleTypeScript,
|
||||
.moduleRequest = getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName),
|
||||
.importName = getFromIdentifierArray(moduleRecord->vm(), identifierArray, importName),
|
||||
.localName = getFromIdentifierArray(moduleRecord->vm(), identifierArray, localName),
|
||||
});
|
||||
}
|
||||
extern "C" void JSC_JSModuleRecord__addImportEntryNamespace(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t importName, uint32_t localName, uint32_t moduleName)
|
||||
{
|
||||
moduleRecord->addImportEntry(JSModuleRecord::ImportEntry {
|
||||
.type = JSModuleRecord::ImportEntryType::Namespace,
|
||||
.moduleRequest = getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName),
|
||||
.importName = getFromIdentifierArray(moduleRecord->vm(), identifierArray, importName),
|
||||
.localName = getFromIdentifierArray(moduleRecord->vm(), identifierArray, localName),
|
||||
});
|
||||
}
|
||||
|
||||
static EncodedJSValue fallbackParse(JSGlobalObject* globalObject, const Identifier& moduleKey, const SourceCode& sourceCode, JSInternalPromise* promise, JSModuleRecord* resultValue = nullptr);
|
||||
extern "C" EncodedJSValue Bun__analyzeTranspiledModule(JSGlobalObject* globalObject, const Identifier& moduleKey, const SourceCode& sourceCode, JSInternalPromise* promise)
|
||||
{
|
||||
VM& vm = globalObject->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
|
||||
auto rejectWithError = [&](JSValue error) {
|
||||
promise->reject(vm, globalObject, error);
|
||||
return promise;
|
||||
};
|
||||
|
||||
VariableEnvironment declaredVariables = VariableEnvironment();
|
||||
VariableEnvironment lexicalVariables = VariableEnvironment();
|
||||
|
||||
auto provider = static_cast<Zig::SourceProvider*>(sourceCode.provider());
|
||||
|
||||
if (provider->m_resolvedSource.module_info == nullptr) {
|
||||
dataLog("[note] module_info is null for module: ", moduleKey.utf8(), "\n");
|
||||
RELEASE_AND_RETURN(scope, JSValue::encode(rejectWithError(createError(globalObject, WTF::String::fromLatin1("module_info is null")))));
|
||||
}
|
||||
|
||||
auto moduleRecord = zig__ModuleInfoDeserialized__toJSModuleRecord(globalObject, vm, moduleKey, sourceCode, declaredVariables, lexicalVariables, static_cast<bun_ModuleInfoDeserialized*>(provider->m_resolvedSource.module_info));
|
||||
// zig__ModuleInfoDeserialized__toJSModuleRecord consumes and frees the module_info.
|
||||
// Null it out to prevent use-after-free via the dangling pointer.
|
||||
provider->m_resolvedSource.module_info = nullptr;
|
||||
if (moduleRecord == nullptr) {
|
||||
RELEASE_AND_RETURN(scope, JSValue::encode(rejectWithError(createError(globalObject, WTF::String::fromLatin1("parseFromSourceCode failed")))));
|
||||
}
|
||||
|
||||
#if BUN_DEBUG
|
||||
RELEASE_AND_RETURN(scope, fallbackParse(globalObject, moduleKey, sourceCode, promise, moduleRecord));
|
||||
#else
|
||||
promise->resolve(globalObject, moduleRecord);
|
||||
RELEASE_AND_RETURN(scope, JSValue::encode(promise));
|
||||
#endif
|
||||
}
|
||||
static EncodedJSValue fallbackParse(JSGlobalObject* globalObject, const Identifier& moduleKey, const SourceCode& sourceCode, JSInternalPromise* promise, JSModuleRecord* resultValue)
|
||||
{
|
||||
VM& vm = globalObject->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
auto rejectWithError = [&](JSValue error) {
|
||||
promise->reject(vm, globalObject, error);
|
||||
return promise;
|
||||
};
|
||||
|
||||
ParserError error;
|
||||
std::unique_ptr<ModuleProgramNode> moduleProgramNode = parseRootNode<ModuleProgramNode>(
|
||||
vm, sourceCode, ImplementationVisibility::Public, JSParserBuiltinMode::NotBuiltin,
|
||||
StrictModeLexicallyScopedFeature, JSParserScriptMode::Module, SourceParseMode::ModuleAnalyzeMode, error);
|
||||
if (error.isValid())
|
||||
RELEASE_AND_RETURN(scope, JSValue::encode(rejectWithError(error.toErrorObject(globalObject, sourceCode))));
|
||||
ASSERT(moduleProgramNode);
|
||||
|
||||
ModuleAnalyzer moduleAnalyzer(globalObject, moduleKey, sourceCode, moduleProgramNode->varDeclarations(), moduleProgramNode->lexicalVariables(), moduleProgramNode->features());
|
||||
RETURN_IF_EXCEPTION(scope, JSValue::encode(promise->rejectWithCaughtException(globalObject, scope)));
|
||||
|
||||
auto result = moduleAnalyzer.analyze(*moduleProgramNode);
|
||||
if (!result) {
|
||||
auto [errorType, message] = std::move(result.error());
|
||||
RELEASE_AND_RETURN(scope, JSValue::encode(rejectWithError(createError(globalObject, errorType, message))));
|
||||
}
|
||||
|
||||
JSModuleRecord* moduleRecord = result.value();
|
||||
|
||||
if (resultValue != nullptr) {
|
||||
auto actual = dumpRecordInfo(resultValue);
|
||||
auto expected = dumpRecordInfo(moduleRecord);
|
||||
if (actual != expected) {
|
||||
dataLog("\n\n\n\n\n\n\x1b[95mBEGIN analyzeTranspiledModule\x1b(B\x1b[m\n --- module key ---\n", moduleKey.utf8().data(), "\n --- code ---\n\n", sourceCode.toUTF8().data(), "\n");
|
||||
dataLog(" ------", "\n");
|
||||
dataLog(" BunAnalyzeTranspiledModule:", "\n");
|
||||
|
||||
zig__renderDiff(expected.utf8().data(), expected.utf8().length(), actual.utf8().data(), actual.utf8().length(), globalObject);
|
||||
|
||||
RELEASE_AND_RETURN(scope, JSValue::encode(rejectWithError(createError(globalObject, WTF::String::fromLatin1("Imports different between parseFromSourceCode and fallbackParse")))));
|
||||
}
|
||||
}
|
||||
|
||||
scope.release();
|
||||
promise->resolve(globalObject, resultValue == nullptr ? moduleRecord : resultValue);
|
||||
return JSValue::encode(promise);
|
||||
}
|
||||
|
||||
String dumpRecordInfo(JSModuleRecord* moduleRecord)
|
||||
{
|
||||
WTF::StringPrintStream stream;
|
||||
|
||||
{
|
||||
Vector<String> sortedVars;
|
||||
for (const auto& pair : moduleRecord->declaredVariables())
|
||||
sortedVars.append(String(pair.key.get()));
|
||||
std::sort(sortedVars.begin(), sortedVars.end(), [](const String& a, const String& b) {
|
||||
return codePointCompare(a, b) < 0;
|
||||
});
|
||||
stream.print(" varDeclarations:\n");
|
||||
for (const auto& name : sortedVars)
|
||||
stream.print(" - ", name, "\n");
|
||||
}
|
||||
|
||||
{
|
||||
Vector<String> sortedVars;
|
||||
for (const auto& pair : moduleRecord->lexicalVariables())
|
||||
sortedVars.append(String(pair.key.get()));
|
||||
std::sort(sortedVars.begin(), sortedVars.end(), [](const String& a, const String& b) {
|
||||
return codePointCompare(a, b) < 0;
|
||||
});
|
||||
stream.print(" lexicalVariables:\n");
|
||||
for (const auto& name : sortedVars)
|
||||
stream.print(" - ", name, "\n");
|
||||
}
|
||||
|
||||
stream.print(" features: (not accessible)\n");
|
||||
|
||||
stream.print("\nAnalyzing ModuleRecord key(", moduleRecord->moduleKey().impl(), ")\n");
|
||||
|
||||
stream.print(" Dependencies: ", moduleRecord->requestedModules().size(), " modules\n");
|
||||
{
|
||||
Vector<String> sortedDeps;
|
||||
for (const auto& request : moduleRecord->requestedModules()) {
|
||||
WTF::StringPrintStream line;
|
||||
if (request.m_attributes == nullptr)
|
||||
line.print(" module(", request.m_specifier, ")\n");
|
||||
else
|
||||
line.print(" module(", request.m_specifier, "),attributes(", (uint8_t)request.m_attributes->type(), ", ", request.m_attributes->hostDefinedImportType(), ")\n");
|
||||
sortedDeps.append(line.toString());
|
||||
}
|
||||
std::sort(sortedDeps.begin(), sortedDeps.end(), [](const String& a, const String& b) {
|
||||
return codePointCompare(a, b) < 0;
|
||||
});
|
||||
for (const auto& dep : sortedDeps)
|
||||
stream.print(dep);
|
||||
}
|
||||
|
||||
stream.print(" Import: ", moduleRecord->importEntries().size(), " entries\n");
|
||||
{
|
||||
Vector<String> sortedImports;
|
||||
for (const auto& pair : moduleRecord->importEntries()) {
|
||||
WTF::StringPrintStream line;
|
||||
auto& importEntry = pair.value;
|
||||
line.print(" import(", importEntry.importName, "), local(", importEntry.localName, "), module(", importEntry.moduleRequest, ")\n");
|
||||
sortedImports.append(line.toString());
|
||||
}
|
||||
std::sort(sortedImports.begin(), sortedImports.end(), [](const String& a, const String& b) {
|
||||
return codePointCompare(a, b) < 0;
|
||||
});
|
||||
for (const auto& imp : sortedImports)
|
||||
stream.print(imp);
|
||||
}
|
||||
|
||||
stream.print(" Export: ", moduleRecord->exportEntries().size(), " entries\n");
|
||||
Vector<String> sortedEntries;
|
||||
for (const auto& pair : moduleRecord->exportEntries()) {
|
||||
WTF::StringPrintStream line;
|
||||
auto& exportEntry = pair.value;
|
||||
switch (exportEntry.type) {
|
||||
case AbstractModuleRecord::ExportEntry::Type::Local:
|
||||
line.print(" [Local] ", "export(", exportEntry.exportName, "), local(", exportEntry.localName, ")\n");
|
||||
break;
|
||||
|
||||
case AbstractModuleRecord::ExportEntry::Type::Indirect:
|
||||
line.print(" [Indirect] ", "export(", exportEntry.exportName, "), import(", exportEntry.importName, "), module(", exportEntry.moduleName, ")\n");
|
||||
break;
|
||||
|
||||
case AbstractModuleRecord::ExportEntry::Type::Namespace:
|
||||
line.print(" [Namespace] ", "export(", exportEntry.exportName, "), module(", exportEntry.moduleName, ")\n");
|
||||
break;
|
||||
}
|
||||
sortedEntries.append(line.toString());
|
||||
}
|
||||
std::sort(sortedEntries.begin(), sortedEntries.end(), [](const String& a, const String& b) {
|
||||
return codePointCompare(a, b) < 0;
|
||||
});
|
||||
for (const auto& entry : sortedEntries)
|
||||
stream.print(entry);
|
||||
|
||||
{
|
||||
Vector<String> sortedStarExports;
|
||||
for (const auto& moduleName : moduleRecord->starExportEntries()) {
|
||||
WTF::StringPrintStream line;
|
||||
line.print(" [Star] module(", moduleName.get(), ")\n");
|
||||
sortedStarExports.append(line.toString());
|
||||
}
|
||||
std::sort(sortedStarExports.begin(), sortedStarExports.end(), [](const String& a, const String& b) {
|
||||
return codePointCompare(a, b) < 0;
|
||||
});
|
||||
for (const auto& entry : sortedStarExports)
|
||||
stream.print(entry);
|
||||
}
|
||||
|
||||
stream.print(" -> done\n");
|
||||
|
||||
return stream.toString();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
struct bun_ModuleInfoDeserialized;
|
||||
@@ -24,16 +24,8 @@ pub const ResolvedSource = extern struct {
|
||||
/// This is for source_code
|
||||
source_code_needs_deref: bool = true,
|
||||
already_bundled: bool = false,
|
||||
|
||||
// -- Bytecode cache fields --
|
||||
bytecode_cache: ?[*]u8 = null,
|
||||
bytecode_cache_size: usize = 0,
|
||||
module_info: ?*anyopaque = null,
|
||||
/// The file path used as the source origin for bytecode cache validation.
|
||||
/// JSC validates bytecode by checking if the origin URL matches exactly what
|
||||
/// was used at build time. If empty, the origin is derived from source_url.
|
||||
/// This is converted to a file:// URL on the C++ side.
|
||||
bytecode_origin_path: bun.String = bun.String.empty,
|
||||
|
||||
pub const Tag = @import("ResolvedSourceTag").ResolvedSourceTag;
|
||||
};
|
||||
|
||||
@@ -75,14 +75,6 @@ Ref<SourceProvider> SourceProvider::create(
|
||||
JSC::SourceProviderSourceType sourceType,
|
||||
bool isBuiltin)
|
||||
{
|
||||
// Use BunTranspiledModule when module_info is present.
|
||||
// This allows JSC to skip parsing during the analyze phase (uses pre-computed imports/exports).
|
||||
// Bytecode cache (if present) is used separately during the evaluate phase.
|
||||
if (resolvedSource.module_info != nullptr) {
|
||||
ASSERT(!resolvedSource.isCommonJSModule);
|
||||
sourceType = JSC::SourceProviderSourceType::BunTranspiledModule;
|
||||
}
|
||||
|
||||
auto string = resolvedSource.source_code.toWTFString(BunString::ZeroCopy);
|
||||
auto sourceURLString = resolvedSource.source_url.toWTFString(BunString::ZeroCopy);
|
||||
|
||||
@@ -99,18 +91,6 @@ Ref<SourceProvider> SourceProvider::create(
|
||||
// https://github.com/oven-sh/bun/issues/9521
|
||||
}
|
||||
|
||||
// Compute source origin: use explicit bytecode_origin_path if provided, otherwise derive from source_url.
|
||||
// bytecode_origin_path is used for bytecode cache validation where the origin must match
|
||||
// exactly what was used at build time.
|
||||
const auto getSourceOrigin = [&]() -> SourceOrigin {
|
||||
auto bytecodeOriginPath = resolvedSource.bytecode_origin_path.toWTFString(BunString::ZeroCopy);
|
||||
if (!bytecodeOriginPath.isNull() && !bytecodeOriginPath.isEmpty()) {
|
||||
// Convert file path to file:// URL (same as build time)
|
||||
return SourceOrigin(WTF::URL::fileURLWithFileSystemPath(bytecodeOriginPath));
|
||||
}
|
||||
return toSourceOrigin(sourceURLString, isBuiltin);
|
||||
};
|
||||
|
||||
const auto getProvider = [&]() -> Ref<SourceProvider> {
|
||||
if (resolvedSource.bytecode_cache != nullptr) {
|
||||
const auto destructorPtr = [](const void* ptr) {
|
||||
@@ -121,15 +101,13 @@ Ref<SourceProvider> SourceProvider::create(
|
||||
};
|
||||
const auto destructor = resolvedSource.needsDeref ? destructorPtr : destructorNoOp;
|
||||
|
||||
auto origin = getSourceOrigin();
|
||||
|
||||
Ref<JSC::CachedBytecode> bytecode = JSC::CachedBytecode::create(std::span<uint8_t>(resolvedSource.bytecode_cache, resolvedSource.bytecode_cache_size), destructor, {});
|
||||
auto provider = adoptRef(*new SourceProvider(
|
||||
globalObject->isThreadLocalDefaultGlobalObject ? globalObject : nullptr,
|
||||
resolvedSource,
|
||||
string.isNull() ? *StringImpl::empty() : *string.impl(),
|
||||
JSC::SourceTaintedOrigin::Untainted,
|
||||
origin,
|
||||
toSourceOrigin(sourceURLString, isBuiltin),
|
||||
sourceURLString.impl(), TextPosition(),
|
||||
sourceType));
|
||||
provider->m_cachedBytecode = WTF::move(bytecode);
|
||||
@@ -141,7 +119,7 @@ Ref<SourceProvider> SourceProvider::create(
|
||||
resolvedSource,
|
||||
string.isNull() ? *StringImpl::empty() : *string.impl(),
|
||||
JSC::SourceTaintedOrigin::Untainted,
|
||||
getSourceOrigin(),
|
||||
toSourceOrigin(sourceURLString, isBuiltin),
|
||||
sourceURLString.impl(), TextPosition(),
|
||||
sourceType));
|
||||
};
|
||||
@@ -211,8 +189,6 @@ extern "C" bool generateCachedModuleByteCodeFromSourceCode(BunString* sourceProv
|
||||
|
||||
auto key = JSC::sourceCodeKeyForSerializedModule(vm, sourceCode);
|
||||
|
||||
dataLogLnIf(JSC::Options::verboseDiskCache(), "[Bytecode Build] generateModule url=", sourceProviderURL->toWTFString(), " origin=", sourceCode.provider()->sourceOrigin().url().string(), " sourceSize=", inputSourceCodeSize, " keyHash=", key.hash());
|
||||
|
||||
RefPtr<JSC::CachedBytecode> cachedBytecode = JSC::encodeCodeBlock(vm, key, unlinkedCodeBlock);
|
||||
if (!cachedBytecode)
|
||||
return false;
|
||||
@@ -246,8 +222,6 @@ extern "C" bool generateCachedCommonJSProgramByteCodeFromSourceCode(BunString* s
|
||||
|
||||
auto key = JSC::sourceCodeKeyForSerializedProgram(vm, sourceCode);
|
||||
|
||||
dataLogLnIf(JSC::Options::verboseDiskCache(), "[Bytecode Build] generateCJS url=", sourceProviderURL->toWTFString(), " origin=", sourceCode.provider()->sourceOrigin().url().string(), " sourceSize=", inputSourceCodeSize, " keyHash=", key.hash());
|
||||
|
||||
RefPtr<JSC::CachedBytecode> cachedBytecode = JSC::encodeCodeBlock(vm, key, unlinkedCodeBlock);
|
||||
if (!cachedBytecode)
|
||||
return false;
|
||||
|
||||
@@ -116,13 +116,8 @@ typedef struct ResolvedSource {
|
||||
uint32_t tag;
|
||||
bool needsDeref;
|
||||
bool already_bundled;
|
||||
// -- Bytecode cache fields --
|
||||
uint8_t* bytecode_cache;
|
||||
size_t bytecode_cache_size;
|
||||
void* module_info;
|
||||
// File path used as source origin for bytecode cache validation.
|
||||
// Converted to file:// URL. If empty, origin is derived from source_url.
|
||||
BunString bytecode_origin_path;
|
||||
} ResolvedSource;
|
||||
static const uint32_t ResolvedSourceTagPackageJSONTypeModule = 1;
|
||||
typedef union ErrorableResolvedSourceResult {
|
||||
|
||||
@@ -202,7 +202,12 @@ static inline bool setJSMessagePort_onmessageSetter(JSGlobalObject& lexicalGloba
|
||||
vm.writeBarrier(&thisObject, value);
|
||||
ensureStillAliveHere(value);
|
||||
|
||||
thisObject.wrapped().jsRef(&lexicalGlobalObject);
|
||||
// Only ref when setting to a callable function; unref when clearing or setting to non-function.
|
||||
// This matches Node.js behavior where setting onmessage to a non-function allows the event loop to exit.
|
||||
if (value.isCallable())
|
||||
thisObject.wrapped().jsRef(&lexicalGlobalObject);
|
||||
else
|
||||
thisObject.wrapped().jsUnref(&lexicalGlobalObject);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -60,6 +60,8 @@
|
||||
#include <JavaScriptCore/JSModuleLoader.h>
|
||||
#include <JavaScriptCore/DeferredWorkTimer.h>
|
||||
#include "MessageEvent.h"
|
||||
#include "MessageChannel.h"
|
||||
#include "AddEventListenerOptions.h"
|
||||
#include "BunWorkerGlobalScope.h"
|
||||
#include "CloseEvent.h"
|
||||
#include "JSMessagePort.h"
|
||||
@@ -69,6 +71,56 @@ namespace WebCore {
|
||||
|
||||
WTF_MAKE_TZONE_ALLOCATED_IMPL(Worker);
|
||||
|
||||
// Event listener that forwards messages from a MessagePort to a Worker object
|
||||
// https://github.com/nodejs/node/blob/e1fc3dc2fcf19d9278ab59c353aa1fa59290378b/lib/internal/worker.js#L331-L335
|
||||
class WorkerMessageForwarder final : public EventListener {
|
||||
public:
|
||||
static Ref<WorkerMessageForwarder> create(Worker& worker)
|
||||
{
|
||||
return adoptRef(*new WorkerMessageForwarder(worker));
|
||||
}
|
||||
|
||||
bool operator==(const EventListener& other) const override
|
||||
{
|
||||
return this == &other;
|
||||
}
|
||||
|
||||
void handleEvent(ScriptExecutionContext& context, Event& event) override
|
||||
{
|
||||
if (!m_worker)
|
||||
return;
|
||||
|
||||
if (event.type() != eventNames().messageEvent)
|
||||
return;
|
||||
|
||||
auto& messageEvent = static_cast<MessageEvent&>(event);
|
||||
|
||||
// Get the data value from the event's cache
|
||||
JSC::JSValue dataValue = messageEvent.cachedData().getValue(JSC::jsNull());
|
||||
|
||||
// Create and dispatch the message event to the Worker object synchronously.
|
||||
// This is safe because Worker is a different EventTarget from the MessagePort,
|
||||
// so we won't trigger "event is already being dispatched" assertions.
|
||||
// Dispatching synchronously ensures message events are processed before exit events.
|
||||
MessageEvent::Init init;
|
||||
init.data = dataValue;
|
||||
init.ports = messageEvent.ports();
|
||||
auto newEvent = MessageEvent::create(eventNames().messageEvent, WTF::move(init), EventIsTrusted::Yes);
|
||||
m_worker->dispatchEvent(newEvent);
|
||||
}
|
||||
|
||||
private:
|
||||
explicit WorkerMessageForwarder(Worker& worker)
|
||||
: EventListener(NativeEventListenerType)
|
||||
, m_worker(&worker)
|
||||
{
|
||||
}
|
||||
|
||||
// Raw pointer is safe because the Worker owns the MessagePort which owns this listener.
|
||||
// When the Worker is destroyed, the MessagePort is destroyed first.
|
||||
Worker* m_worker;
|
||||
};
|
||||
|
||||
extern "C" void WebWorker__notifyNeedTermination(
|
||||
void* worker);
|
||||
|
||||
@@ -151,6 +203,25 @@ ExceptionOr<Ref<Worker>> Worker::create(ScriptExecutionContext& context, const S
|
||||
{
|
||||
auto worker = adoptRef(*new Worker(context, WTF::move(options)));
|
||||
|
||||
// For Node workers, create a MessagePort pair for parent-worker communication.
|
||||
// The parent keeps port1 (m_parentPort) and the child gets port2 (via options).
|
||||
if (worker->m_options.kind == WorkerOptions::Kind::Node) {
|
||||
auto channel = MessageChannel::create(context);
|
||||
worker->m_parentPort = &channel->port1();
|
||||
worker->m_parentPort->entangle();
|
||||
|
||||
// Set up a listener on the parent port that forwards messages to the Worker object
|
||||
// This allows worker.on('message', ...) to receive messages sent via parentPort.postMessage()
|
||||
auto forwarder = WorkerMessageForwarder::create(worker.get());
|
||||
static_cast<EventTarget*>(worker->m_parentPort.get())->addEventListener(eventNames().messageEvent, WTF::move(forwarder), {});
|
||||
worker->m_parentPort->start();
|
||||
|
||||
// Disentangle the child port from the parent context so it can be transferred to the worker
|
||||
MessagePort& childPort = channel->port2();
|
||||
auto disentangledPort = childPort.disentangle();
|
||||
worker->m_options.parentPortTransferred = WTF::move(disentangledPort);
|
||||
}
|
||||
|
||||
WTF::String url = urlInit;
|
||||
if (url.startsWith("file://"_s)) {
|
||||
WTF::URL urlObject = WTF::URL(url);
|
||||
@@ -224,6 +295,11 @@ ExceptionOr<Ref<Worker>> Worker::create(ScriptExecutionContext& context, const S
|
||||
|
||||
Worker::~Worker()
|
||||
{
|
||||
// Close the parent port before member destruction begins.
|
||||
// This removes the WorkerMessageForwarder listener while Worker is still fully valid.
|
||||
if (m_parentPort)
|
||||
m_parentPort->close();
|
||||
|
||||
{
|
||||
Locker locker { allWorkersLock };
|
||||
allWorkers().remove(m_clientIdentifier);
|
||||
@@ -236,6 +312,14 @@ ExceptionOr<void> Worker::postMessage(JSC::JSGlobalObject& state, JSC::JSValue m
|
||||
if (m_terminationFlags & TerminatedFlag)
|
||||
return Exception { InvalidStateError, "Worker has been terminated"_s };
|
||||
|
||||
// For Node workers, post through the MessagePort (m_parentPort) which delivers
|
||||
// to the worker's parentPort. This avoids triggering self.onmessage which is
|
||||
// Web Worker behavior, not Node worker_threads behavior.
|
||||
if (m_options.kind == WorkerOptions::Kind::Node && m_parentPort) {
|
||||
return m_parentPort->postMessage(state, messageValue, WTF::move(options));
|
||||
}
|
||||
|
||||
// For Web Workers, dispatch to globalEventScope (which triggers self.onmessage)
|
||||
Vector<RefPtr<MessagePort>> ports;
|
||||
auto serialized = SerializedScriptValue::create(state, messageValue, WTF::move(options.transfer), ports, SerializationForStorage::No, SerializationContext::WorkerPostMessage);
|
||||
if (serialized.hasException())
|
||||
@@ -562,6 +646,7 @@ JSValue createNodeWorkerThreadsBinding(Zig::GlobalObject* globalObject)
|
||||
JSValue workerData = jsNull();
|
||||
JSValue threadId = jsNumber(0);
|
||||
JSMap* environmentData = nullptr;
|
||||
JSValue parentPortValue = jsNull();
|
||||
|
||||
if (auto* worker = WebWorker__getParentWorker(globalObject->bunVM())) {
|
||||
auto& options = worker->options();
|
||||
@@ -583,6 +668,16 @@ JSValue createNodeWorkerThreadsBinding(Zig::GlobalObject* globalObject)
|
||||
|
||||
// Main thread starts at 1
|
||||
threadId = jsNumber(worker->clientIdentifier() - 1);
|
||||
|
||||
// Entangle the parentPort MessagePort for Node workers (transferred from parent)
|
||||
if (options.parentPortTransferred.has_value()) {
|
||||
auto* context = globalObject->scriptExecutionContext();
|
||||
if (context) {
|
||||
auto parentPort = MessagePort::entangle(*context, WTF::move(*options.parentPortTransferred));
|
||||
parentPort->start();
|
||||
parentPortValue = toJS(globalObject, globalObject, parentPort.get());
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!environmentData) {
|
||||
environmentData = JSMap::create(vm, globalObject->mapStructure());
|
||||
@@ -591,12 +686,13 @@ JSValue createNodeWorkerThreadsBinding(Zig::GlobalObject* globalObject)
|
||||
ASSERT(environmentData);
|
||||
globalObject->setNodeWorkerEnvironmentData(environmentData);
|
||||
|
||||
JSObject* array = constructEmptyArray(globalObject, nullptr, 4);
|
||||
JSObject* array = constructEmptyArray(globalObject, nullptr, 5);
|
||||
RETURN_IF_EXCEPTION(scope, {});
|
||||
array->putDirectIndex(globalObject, 0, workerData);
|
||||
array->putDirectIndex(globalObject, 1, threadId);
|
||||
array->putDirectIndex(globalObject, 2, JSFunction::create(vm, globalObject, 1, "receiveMessageOnPort"_s, jsReceiveMessageOnPort, ImplementationVisibility::Public, NoIntrinsic));
|
||||
array->putDirectIndex(globalObject, 3, environmentData);
|
||||
array->putDirectIndex(globalObject, 4, parentPortValue);
|
||||
return array;
|
||||
}
|
||||
|
||||
|
||||
@@ -28,6 +28,7 @@
|
||||
#include "ActiveDOMObject.h"
|
||||
#include "EventTarget.h"
|
||||
#include "WorkerOptions.h"
|
||||
#include "MessagePort.h"
|
||||
#include <JavaScriptCore/RuntimeFlags.h>
|
||||
#include <wtf/Deque.h>
|
||||
#include <wtf/MonotonicTime.h>
|
||||
@@ -120,6 +121,9 @@ private:
|
||||
std::atomic<uint8_t> m_terminationFlags { 0 };
|
||||
const ScriptExecutionContextIdentifier m_clientIdentifier;
|
||||
void* impl_ { nullptr };
|
||||
|
||||
// For Node workers: the parent-side MessagePort for communicating with the worker's parentPort
|
||||
RefPtr<MessagePort> m_parentPort;
|
||||
};
|
||||
|
||||
JSValue createNodeWorkerThreadsBinding(Zig::GlobalObject* globalObject);
|
||||
|
||||
@@ -34,6 +34,10 @@ struct WorkerOptions {
|
||||
Vector<String> argv;
|
||||
// If nullopt, inherit execArgv from the parent thread
|
||||
std::optional<Vector<String>> execArgv;
|
||||
|
||||
// For Node workers: the transferred parentPort
|
||||
// This is disentangled from the parent and entangled in the worker
|
||||
std::optional<TransferredMessagePort> parentPortTransferred;
|
||||
};
|
||||
|
||||
} // namespace WebCore
|
||||
|
||||
@@ -502,11 +502,6 @@ pub const Chunk = struct {
|
||||
///
|
||||
/// Mutated while sorting chunks in `computeChunks`
|
||||
css_chunks: []u32 = &.{},
|
||||
|
||||
/// Serialized ModuleInfo for ESM bytecode (--compile --bytecode --format=esm)
|
||||
module_info_bytes: ?[]const u8 = null,
|
||||
/// Unserialized ModuleInfo for deferred serialization (after chunk paths are resolved)
|
||||
module_info: ?*analyze_transpiled_module.ModuleInfo = null,
|
||||
};
|
||||
|
||||
pub const CssChunk = struct {
|
||||
@@ -659,7 +654,6 @@ pub const ParseTask = bun.bundle_v2.ParseTask;
|
||||
const string = []const u8;
|
||||
|
||||
const HTMLImportManifest = @import("./HTMLImportManifest.zig");
|
||||
const analyze_transpiled_module = @import("../analyze_transpiled_module.zig");
|
||||
const std = @import("std");
|
||||
|
||||
const options = @import("../options.zig");
|
||||
|
||||
@@ -70,7 +70,6 @@ pub const LinkerContext = struct {
|
||||
css_chunking: bool = false,
|
||||
source_maps: options.SourceMapOption = .none,
|
||||
target: options.Target = .browser,
|
||||
compile: bool = false,
|
||||
metafile: bool = false,
|
||||
/// Path to write JSON metafile (for Bun.build API)
|
||||
metafile_json_path: []const u8 = "",
|
||||
|
||||
@@ -971,7 +971,6 @@ pub const BundleV2 = struct {
|
||||
this.linker.options.target = transpiler.options.target;
|
||||
this.linker.options.output_format = transpiler.options.output_format;
|
||||
this.linker.options.generate_bytecode_cache = transpiler.options.bytecode;
|
||||
this.linker.options.compile = transpiler.options.compile;
|
||||
this.linker.options.metafile = transpiler.options.metafile;
|
||||
this.linker.options.metafile_json_path = transpiler.options.metafile_json_path;
|
||||
this.linker.options.metafile_markdown_path = transpiler.options.metafile_markdown_path;
|
||||
@@ -4509,19 +4508,9 @@ pub const CrossChunkImport = struct {
|
||||
};
|
||||
|
||||
pub const CompileResult = union(enum) {
|
||||
pub const DeclInfo = struct {
|
||||
pub const Kind = enum(u1) { declared, lexical };
|
||||
name: []const u8,
|
||||
kind: Kind,
|
||||
};
|
||||
|
||||
javascript: struct {
|
||||
source_index: Index.Int,
|
||||
result: js_printer.PrintResult,
|
||||
/// Top-level declarations collected from converted statements during
|
||||
/// parallel printing. Used by postProcessJSChunk to populate ModuleInfo
|
||||
/// without re-scanning the original (unconverted) AST.
|
||||
decls: []const DeclInfo = &.{},
|
||||
|
||||
pub fn code(this: @This()) []const u8 {
|
||||
return switch (this.result) {
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
//! chunk indexing remains the same:
|
||||
//!
|
||||
//! 1. chunks
|
||||
//! 2. sourcemaps, bytecode, and module_info
|
||||
//! 2. sourcemaps and bytecode
|
||||
//! 3. additional output files
|
||||
//!
|
||||
//! We can calculate the space ahead of time and avoid having to do something
|
||||
@@ -41,7 +41,7 @@ pub fn init(
|
||||
chunks: []const bun.bundle_v2.Chunk,
|
||||
_: usize,
|
||||
) !@This() {
|
||||
const length, const supplementary_file_count = OutputFileList.calculateOutputFileListCapacity(c, chunks);
|
||||
const length, const source_map_and_bytecode_count = OutputFileList.calculateOutputFileListCapacity(c, chunks);
|
||||
var output_files = try std.array_list.Managed(options.OutputFile).initCapacity(
|
||||
allocator,
|
||||
length,
|
||||
@@ -51,8 +51,8 @@ pub fn init(
|
||||
return .{
|
||||
.output_files = output_files,
|
||||
.index_for_chunk = 0,
|
||||
.index_for_sourcemaps_and_bytecode = if (supplementary_file_count == 0) null else @as(u32, @truncate(chunks.len)),
|
||||
.additional_output_files_start = @as(u32, @intCast(chunks.len)) + supplementary_file_count,
|
||||
.index_for_sourcemaps_and_bytecode = if (source_map_and_bytecode_count == 0) null else @as(u32, @truncate(chunks.len)),
|
||||
.additional_output_files_start = @as(u32, @intCast(chunks.len)) + source_map_and_bytecode_count,
|
||||
.total_insertions = 0,
|
||||
};
|
||||
}
|
||||
@@ -94,10 +94,7 @@ pub fn calculateOutputFileListCapacity(c: *const bun.bundle_v2.LinkerContext, ch
|
||||
break :bytecode_count bytecode_count;
|
||||
} else 0;
|
||||
|
||||
// module_info is generated for ESM bytecode in --compile builds
|
||||
const module_info_count = if (c.options.generate_bytecode_cache and c.options.output_format == .esm and c.options.compile) bytecode_count else 0;
|
||||
|
||||
return .{ @intCast(chunks.len + source_map_count + bytecode_count + module_info_count + c.parse_graph.additional_output_files.items.len), @intCast(source_map_count + bytecode_count + module_info_count) };
|
||||
return .{ @intCast(chunks.len + source_map_count + bytecode_count + c.parse_graph.additional_output_files.items.len), @intCast(source_map_count + bytecode_count) };
|
||||
}
|
||||
|
||||
pub fn insertForChunk(this: *OutputFileList, output_file: options.OutputFile) u32 {
|
||||
|
||||
@@ -304,69 +304,6 @@ pub fn generateChunksInParallel(
|
||||
}
|
||||
}
|
||||
|
||||
// After final_rel_path is computed for all chunks, fix up module_info
|
||||
// cross-chunk import specifiers. During printing, cross-chunk imports use
|
||||
// unique_key placeholders as paths. Now that final paths are known, replace
|
||||
// those placeholders with the resolved paths and serialize.
|
||||
if (c.options.generate_bytecode_cache and c.options.output_format == .esm and c.options.compile) {
|
||||
// Build map from unique_key -> final resolved path
|
||||
const b = @as(*bun.bundle_v2.BundleV2, @fieldParentPtr("linker", c));
|
||||
var unique_key_to_path = bun.StringHashMap([]const u8).init(c.allocator());
|
||||
defer unique_key_to_path.deinit();
|
||||
for (chunks) |*ch| {
|
||||
if (ch.unique_key.len > 0 and ch.final_rel_path.len > 0) {
|
||||
// Use the per-chunk public_path to match what IntermediateOutput.code()
|
||||
// uses during emission (browser chunks from server builds use the
|
||||
// browser transpiler's public_path).
|
||||
const public_path = if (ch.flags.is_browser_chunk_from_server_build)
|
||||
b.transpilerForTarget(.browser).options.public_path
|
||||
else
|
||||
c.options.public_path;
|
||||
const normalizer = bun.bundle_v2.cheapPrefixNormalizer(public_path, ch.final_rel_path);
|
||||
const resolved = std.fmt.allocPrint(c.allocator(), "{s}{s}", .{ normalizer[0], normalizer[1] }) catch |err| bun.handleOom(err);
|
||||
unique_key_to_path.put(ch.unique_key, resolved) catch |err| bun.handleOom(err);
|
||||
}
|
||||
}
|
||||
|
||||
// Fix up each chunk's module_info
|
||||
for (chunks) |*chunk| {
|
||||
if (chunk.content != .javascript) continue;
|
||||
const mi = chunk.content.javascript.module_info orelse continue;
|
||||
|
||||
// Collect replacements first (can't modify string table while iterating)
|
||||
const Replacement = struct { old_id: analyze_transpiled_module.StringID, resolved_path: []const u8 };
|
||||
var replacements: std.ArrayListUnmanaged(Replacement) = .{};
|
||||
defer replacements.deinit(c.allocator());
|
||||
|
||||
var offset: usize = 0;
|
||||
for (mi.strings_lens.items, 0..) |slen, string_index| {
|
||||
const len: usize = @intCast(slen);
|
||||
const s = mi.strings_buf.items[offset..][0..len];
|
||||
if (unique_key_to_path.get(s)) |resolved_path| {
|
||||
replacements.append(c.allocator(), .{
|
||||
.old_id = @enumFromInt(@as(u32, @intCast(string_index))),
|
||||
.resolved_path = resolved_path,
|
||||
}) catch |err| bun.handleOom(err);
|
||||
}
|
||||
offset += len;
|
||||
}
|
||||
|
||||
for (replacements.items) |rep| {
|
||||
const new_id = mi.str(rep.resolved_path) catch |err| bun.handleOom(err);
|
||||
mi.replaceStringID(rep.old_id, new_id);
|
||||
}
|
||||
|
||||
// Serialize the fixed-up module_info
|
||||
chunk.content.javascript.module_info_bytes = bun.js_printer.serializeModuleInfo(mi);
|
||||
|
||||
// Free the ModuleInfo now that it's been serialized to bytes.
|
||||
// It was allocated with bun.default_allocator (not the arena),
|
||||
// so it must be explicitly destroyed.
|
||||
mi.destroy();
|
||||
chunk.content.javascript.module_info = null;
|
||||
}
|
||||
}
|
||||
|
||||
// Generate metafile JSON fragments for each chunk (after paths are resolved)
|
||||
if (c.options.metafile) {
|
||||
for (chunks) |*chunk| {
|
||||
@@ -494,14 +431,6 @@ pub fn generateChunksInParallel(
|
||||
.none => {},
|
||||
}
|
||||
|
||||
// Compute side early so it can be used for bytecode, module_info, and main chunk output files
|
||||
const side: bun.bake.Side = if (chunk.content == .css or chunk.flags.is_browser_chunk_from_server_build)
|
||||
.client
|
||||
else switch (c.graph.ast.items(.target)[chunk.entry_point.source_index]) {
|
||||
.browser => .client,
|
||||
else => .server,
|
||||
};
|
||||
|
||||
const bytecode_output_file: ?options.OutputFile = brk: {
|
||||
if (c.options.generate_bytecode_cache) {
|
||||
const loader: Loader = if (chunk.entry_point.is_entry_point)
|
||||
@@ -515,18 +444,7 @@ pub fn generateChunksInParallel(
|
||||
jsc.VirtualMachine.is_bundler_thread_for_bytecode_cache = true;
|
||||
jsc.initialize(false);
|
||||
var fdpath: bun.PathBuffer = undefined;
|
||||
// For --compile builds, the bytecode URL must match the module name
|
||||
// that will be used at runtime. The module name is:
|
||||
// public_path + final_rel_path (e.g., "/$bunfs/root/app.js")
|
||||
// Without this prefix, the JSC bytecode cache key won't match at runtime.
|
||||
// Use the per-chunk public_path (already computed above) for browser chunks
|
||||
// from server builds, and normalize with cheapPrefixNormalizer for consistency
|
||||
// with module_info path fixup.
|
||||
// For non-compile builds, use the normal .jsc extension.
|
||||
var source_provider_url = if (c.options.compile) url_blk: {
|
||||
const normalizer = bun.bundle_v2.cheapPrefixNormalizer(public_path, chunk.final_rel_path);
|
||||
break :url_blk try bun.String.createFormat("{s}{s}", .{ normalizer[0], normalizer[1] });
|
||||
} else try bun.String.createFormat("{s}" ++ bun.bytecode_extension, .{chunk.final_rel_path});
|
||||
var source_provider_url = try bun.String.createFormat("{s}" ++ bun.bytecode_extension, .{chunk.final_rel_path});
|
||||
source_provider_url.ref();
|
||||
|
||||
defer source_provider_url.deref();
|
||||
@@ -551,7 +469,7 @@ pub fn generateChunksInParallel(
|
||||
.data = .{
|
||||
.buffer = .{ .data = bytecode, .allocator = cached_bytecode.allocator() },
|
||||
},
|
||||
.side = side,
|
||||
.side = .server,
|
||||
.entry_point_index = null,
|
||||
.is_executable = false,
|
||||
});
|
||||
@@ -567,40 +485,6 @@ pub fn generateChunksInParallel(
|
||||
break :brk null;
|
||||
};
|
||||
|
||||
// Create module_info output file for ESM bytecode in --compile builds
|
||||
const module_info_output_file: ?options.OutputFile = brk: {
|
||||
if (c.options.generate_bytecode_cache and c.options.output_format == .esm and c.options.compile) {
|
||||
const loader: Loader = if (chunk.entry_point.is_entry_point)
|
||||
c.parse_graph.input_files.items(.loader)[
|
||||
chunk.entry_point.source_index
|
||||
]
|
||||
else
|
||||
.js;
|
||||
|
||||
if (chunk.content == .javascript and loader.isJavaScriptLike()) {
|
||||
if (chunk.content.javascript.module_info_bytes) |module_info_bytes| {
|
||||
break :brk options.OutputFile.init(.{
|
||||
.output_path = bun.handleOom(std.fmt.allocPrint(bun.default_allocator, "{s}.module-info", .{chunk.final_rel_path})),
|
||||
.input_path = bun.handleOom(std.fmt.allocPrint(bun.default_allocator, "{s}.module-info", .{chunk.final_rel_path})),
|
||||
.input_loader = .js,
|
||||
.hash = if (chunk.template.placeholder.hash != null) bun.hash(module_info_bytes) else null,
|
||||
.output_kind = .module_info,
|
||||
.loader = .file,
|
||||
.size = @as(u32, @truncate(module_info_bytes.len)),
|
||||
.display_size = @as(u32, @truncate(module_info_bytes.len)),
|
||||
.data = .{
|
||||
.buffer = .{ .data = module_info_bytes, .allocator = bun.default_allocator },
|
||||
},
|
||||
.side = side,
|
||||
.entry_point_index = null,
|
||||
.is_executable = false,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
break :brk null;
|
||||
};
|
||||
|
||||
const source_map_index: ?u32 = if (sourcemap_output_file != null)
|
||||
try output_files.insertForSourcemapOrBytecode(sourcemap_output_file.?)
|
||||
else
|
||||
@@ -611,11 +495,6 @@ pub fn generateChunksInParallel(
|
||||
else
|
||||
null;
|
||||
|
||||
const module_info_index: ?u32 = if (module_info_output_file != null)
|
||||
try output_files.insertForSourcemapOrBytecode(module_info_output_file.?)
|
||||
else
|
||||
null;
|
||||
|
||||
const output_kind = if (chunk.content == .css)
|
||||
.asset
|
||||
else if (chunk.entry_point.is_entry_point)
|
||||
@@ -623,6 +502,12 @@ pub fn generateChunksInParallel(
|
||||
else
|
||||
.chunk;
|
||||
|
||||
const side: bun.bake.Side = if (chunk.content == .css or chunk.flags.is_browser_chunk_from_server_build)
|
||||
.client
|
||||
else switch (c.graph.ast.items(.target)[chunk.entry_point.source_index]) {
|
||||
.browser => .client,
|
||||
else => .server,
|
||||
};
|
||||
const chunk_index = output_files.insertForChunk(options.OutputFile.init(.{
|
||||
.data = .{
|
||||
.buffer = .{
|
||||
@@ -640,7 +525,6 @@ pub fn generateChunksInParallel(
|
||||
.is_executable = chunk.flags.is_executable,
|
||||
.source_map_index = source_map_index,
|
||||
.bytecode_index = bytecode_index,
|
||||
.module_info_index = module_info_index,
|
||||
.side = side,
|
||||
.entry_point_index = if (output_kind == .@"entry-point")
|
||||
chunk.entry_point.source_index - @as(u32, (if (c.framework) |fw| if (fw.server_components != null) 3 else 1 else 1))
|
||||
@@ -680,7 +564,6 @@ pub const ThreadPool = bun.bundle_v2.ThreadPool;
|
||||
|
||||
const debugPartRanges = Output.scoped(.PartRanges, .hidden);
|
||||
|
||||
const analyze_transpiled_module = @import("../../analyze_transpiled_module.zig");
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
|
||||
@@ -10,7 +10,6 @@ pub fn generateCodeForFileInChunkJS(
|
||||
stmts: *StmtList,
|
||||
allocator: std.mem.Allocator,
|
||||
temp_allocator: std.mem.Allocator,
|
||||
decl_collector: ?*DeclCollector,
|
||||
) js_printer.PrintResult {
|
||||
const parts: []Part = c.graph.ast.items(.parts)[part_range.source_index.get()].slice()[part_range.part_index_begin..part_range.part_index_end];
|
||||
const all_flags: []const JSMeta.Flags = c.graph.meta.items(.flags);
|
||||
@@ -614,15 +613,6 @@ pub fn generateCodeForFileInChunkJS(
|
||||
};
|
||||
}
|
||||
|
||||
// Collect top-level declarations from the converted statements.
|
||||
// This is done here (after convertStmtsForChunk) rather than in
|
||||
// postProcessJSChunk, because convertStmtsForChunk transforms the AST
|
||||
// (e.g. export default expr → var, export stripping) and the converted
|
||||
// statements reflect what actually gets printed.
|
||||
if (decl_collector) |dc| {
|
||||
dc.collectFromStmts(out_stmts, r, c);
|
||||
}
|
||||
|
||||
return c.printCodeForFileInChunkJS(
|
||||
r,
|
||||
allocator,
|
||||
@@ -638,77 +628,6 @@ pub fn generateCodeForFileInChunkJS(
|
||||
);
|
||||
}
|
||||
|
||||
pub const DeclCollector = struct {
|
||||
decls: std.ArrayListUnmanaged(CompileResult.DeclInfo) = .{},
|
||||
allocator: std.mem.Allocator,
|
||||
|
||||
const CompileResult = bun.bundle_v2.CompileResult;
|
||||
|
||||
/// Collect top-level declarations from **converted** statements (after
|
||||
/// `convertStmtsForChunk`). At that point, export statements have already
|
||||
/// been transformed:
|
||||
/// - `s_export_default` → `s_local` / `s_function` / `s_class`
|
||||
/// - `s_export_clause` → removed entirely
|
||||
/// - `s_export_from` / `s_export_star` → removed or converted to `s_import`
|
||||
///
|
||||
/// Remaining `s_import` statements (external, non-bundled) don't need
|
||||
/// handling here; their bindings are recorded separately in
|
||||
/// `postProcessJSChunk` by scanning the original AST import records.
|
||||
pub fn collectFromStmts(self: *DeclCollector, stmts: []const Stmt, r: renamer.Renamer, c: *LinkerContext) void {
|
||||
for (stmts) |stmt| {
|
||||
switch (stmt.data) {
|
||||
.s_local => |s| {
|
||||
const kind: CompileResult.DeclInfo.Kind = if (s.kind == .k_var) .declared else .lexical;
|
||||
for (s.decls.slice()) |decl| {
|
||||
self.collectFromBinding(decl.binding, kind, r, c);
|
||||
}
|
||||
},
|
||||
.s_function => |s| {
|
||||
if (s.func.name) |name_loc_ref| {
|
||||
if (name_loc_ref.ref) |name_ref| {
|
||||
self.addRef(name_ref, .lexical, r, c);
|
||||
}
|
||||
}
|
||||
},
|
||||
.s_class => |s| {
|
||||
if (s.class.class_name) |class_name| {
|
||||
if (class_name.ref) |name_ref| {
|
||||
self.addRef(name_ref, .lexical, r, c);
|
||||
}
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn collectFromBinding(self: *DeclCollector, binding: Binding, kind: CompileResult.DeclInfo.Kind, r: renamer.Renamer, c: *LinkerContext) void {
|
||||
switch (binding.data) {
|
||||
.b_identifier => |b| {
|
||||
self.addRef(b.ref, kind, r, c);
|
||||
},
|
||||
.b_array => |b| {
|
||||
for (b.items) |item| {
|
||||
self.collectFromBinding(item.binding, kind, r, c);
|
||||
}
|
||||
},
|
||||
.b_object => |b| {
|
||||
for (b.properties) |prop| {
|
||||
self.collectFromBinding(prop.value, kind, r, c);
|
||||
}
|
||||
},
|
||||
.b_missing => {},
|
||||
}
|
||||
}
|
||||
|
||||
fn addRef(self: *DeclCollector, ref: Ref, kind: CompileResult.DeclInfo.Kind, r: renamer.Renamer, c: *LinkerContext) void {
|
||||
const followed = c.graph.symbols.follow(ref);
|
||||
const name = r.nameForSymbol(followed);
|
||||
if (name.len == 0) return;
|
||||
self.decls.append(self.allocator, .{ .name = name, .kind = kind }) catch return;
|
||||
}
|
||||
};
|
||||
|
||||
fn mergeAdjacentLocalStmts(stmts: *std.ArrayListUnmanaged(Stmt), allocator: std.mem.Allocator) void {
|
||||
if (stmts.items.len == 0)
|
||||
return;
|
||||
|
||||
@@ -46,9 +46,6 @@ fn generateCompileResultForJSChunkImpl(worker: *ThreadPool.Worker, c: *LinkerCon
|
||||
const toESMRef = c.graph.symbols.follow(runtime_members.get("__toESM").?.ref);
|
||||
const runtimeRequireRef = if (c.options.output_format == .cjs) null else c.graph.symbols.follow(runtime_members.get("__require").?.ref);
|
||||
|
||||
const collect_decls = c.options.generate_bytecode_cache and c.options.output_format == .esm and c.options.compile;
|
||||
var dc = DeclCollector{ .allocator = allocator };
|
||||
|
||||
const result = c.generateCodeForFileInChunkJS(
|
||||
&buffer_writer,
|
||||
chunk.renamer,
|
||||
@@ -60,7 +57,6 @@ fn generateCompileResultForJSChunkImpl(worker: *ThreadPool.Worker, c: *LinkerCon
|
||||
&worker.stmt_list,
|
||||
worker.allocator,
|
||||
arena.allocator(),
|
||||
if (collect_decls) &dc else null,
|
||||
);
|
||||
|
||||
// Update bytesInOutput for this source in the chunk (for metafile)
|
||||
@@ -79,7 +75,6 @@ fn generateCompileResultForJSChunkImpl(worker: *ThreadPool.Worker, c: *LinkerCon
|
||||
.javascript = .{
|
||||
.source_index = part_range.source_index.get(),
|
||||
.result = result,
|
||||
.decls = if (collect_decls) dc.decls.items else &.{},
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -87,8 +82,6 @@ fn generateCompileResultForJSChunkImpl(worker: *ThreadPool.Worker, c: *LinkerCon
|
||||
pub const DeferredBatchTask = bun.bundle_v2.DeferredBatchTask;
|
||||
pub const ParseTask = bun.bundle_v2.ParseTask;
|
||||
|
||||
const DeclCollector = @import("./generateCodeForFileInChunkJS.zig").DeclCollector;
|
||||
|
||||
const bun = @import("bun");
|
||||
const Environment = bun.Environment;
|
||||
const ThreadPoolLib = bun.ThreadPool;
|
||||
|
||||
@@ -25,15 +25,6 @@ pub fn postProcessJSChunk(ctx: GenerateChunkCtx, worker: *ThreadPool.Worker, chu
|
||||
const toESMRef = c.graph.symbols.follow(runtime_members.get("__toESM").?.ref);
|
||||
const runtimeRequireRef = if (c.options.output_format == .cjs) null else c.graph.symbols.follow(runtime_members.get("__require").?.ref);
|
||||
|
||||
// Create ModuleInfo for ESM bytecode in --compile builds
|
||||
const generate_module_info = c.options.generate_bytecode_cache and c.options.output_format == .esm and c.options.compile;
|
||||
const loader = c.parse_graph.input_files.items(.loader)[chunk.entry_point.source_index];
|
||||
const is_typescript = loader.isTypeScript();
|
||||
const module_info: ?*analyze_transpiled_module.ModuleInfo = if (generate_module_info)
|
||||
analyze_transpiled_module.ModuleInfo.create(bun.default_allocator, is_typescript) catch null
|
||||
else
|
||||
null;
|
||||
|
||||
{
|
||||
const print_options = js_printer.Options{
|
||||
.bundling = true,
|
||||
@@ -48,7 +39,6 @@ pub fn postProcessJSChunk(ctx: GenerateChunkCtx, worker: *ThreadPool.Worker, chu
|
||||
.target = c.options.target,
|
||||
.print_dce_annotations = c.options.emit_dce_annotations,
|
||||
.mangled_props = &c.mangled_props,
|
||||
.module_info = module_info,
|
||||
// .const_values = c.graph.const_values,
|
||||
};
|
||||
|
||||
@@ -94,124 +84,7 @@ pub fn postProcessJSChunk(ctx: GenerateChunkCtx, worker: *ThreadPool.Worker, chu
|
||||
);
|
||||
}
|
||||
|
||||
// Populate ModuleInfo with declarations collected during parallel printing,
|
||||
// external import records from the original AST, and wrapper refs.
|
||||
if (module_info) |mi| {
|
||||
// 1. Add declarations collected by DeclCollector during parallel part printing.
|
||||
// These come from the CONVERTED statements (after convertStmtsForChunk transforms
|
||||
// export default → var, strips exports, etc.), so they match what's actually printed.
|
||||
for (chunk.compile_results_for_chunk) |cr| {
|
||||
const decls = switch (cr) {
|
||||
.javascript => |js| js.decls,
|
||||
else => continue,
|
||||
};
|
||||
for (decls) |decl| {
|
||||
const var_kind: analyze_transpiled_module.ModuleInfo.VarKind = switch (decl.kind) {
|
||||
.declared => .declared,
|
||||
.lexical => .lexical,
|
||||
};
|
||||
const string_id = mi.str(decl.name) catch continue;
|
||||
mi.addVar(string_id, var_kind) catch continue;
|
||||
}
|
||||
}
|
||||
|
||||
// 1b. Check if any source in this chunk uses import.meta. The per-part
|
||||
// parallel printer does not have module_info, so the printer cannot set
|
||||
// this flag during per-part printing. We derive it from the AST instead.
|
||||
// Note: the runtime source (index 0) also uses import.meta (e.g.
|
||||
// `import.meta.require`), so we must not skip it.
|
||||
{
|
||||
const all_ast_flags = c.graph.ast.items(.flags);
|
||||
for (chunk.content.javascript.parts_in_chunk_in_order) |part_range| {
|
||||
if (all_ast_flags[part_range.source_index.get()].has_import_meta) {
|
||||
mi.flags.contains_import_meta = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Collect truly-external imports from the original AST. Bundled imports
|
||||
// (where source_index is valid) are removed by convertStmtsForChunk and
|
||||
// re-created as cross-chunk imports — those are already captured by the
|
||||
// printer when it prints cross_chunk_prefix_stmts above. Only truly-external
|
||||
// imports (node built-ins, etc.) survive as s_import in per-file parts and
|
||||
// need recording here.
|
||||
const all_parts = c.graph.ast.items(.parts);
|
||||
const all_flags = c.graph.meta.items(.flags);
|
||||
const all_import_records = c.graph.ast.items(.import_records);
|
||||
for (chunk.content.javascript.parts_in_chunk_in_order) |part_range| {
|
||||
if (all_flags[part_range.source_index.get()].wrap == .cjs) continue;
|
||||
const source_parts = all_parts[part_range.source_index.get()].slice();
|
||||
const source_import_records = all_import_records[part_range.source_index.get()].slice();
|
||||
var part_i = part_range.part_index_begin;
|
||||
while (part_i < part_range.part_index_end) : (part_i += 1) {
|
||||
for (source_parts[part_i].stmts) |stmt| {
|
||||
switch (stmt.data) {
|
||||
.s_import => |s| {
|
||||
const record = &source_import_records[s.import_record_index];
|
||||
if (record.path.is_disabled) continue;
|
||||
if (record.tag == .bun) continue;
|
||||
// Skip bundled imports — these are converted to cross-chunk
|
||||
// imports by the linker. The printer already recorded them
|
||||
// when printing cross_chunk_prefix_stmts.
|
||||
if (record.source_index.isValid()) continue;
|
||||
|
||||
const import_path = record.path.text;
|
||||
const irp_id = mi.str(import_path) catch continue;
|
||||
mi.requestModule(irp_id, .none) catch continue;
|
||||
|
||||
if (s.default_name) |name| {
|
||||
if (name.ref) |name_ref| {
|
||||
const local_name = chunk.renamer.nameForSymbol(name_ref);
|
||||
const local_name_id = mi.str(local_name) catch continue;
|
||||
mi.addVar(local_name_id, .lexical) catch continue;
|
||||
mi.addImportInfoSingle(irp_id, mi.str("default") catch continue, local_name_id, false) catch continue;
|
||||
}
|
||||
}
|
||||
|
||||
for (s.items) |item| {
|
||||
if (item.name.ref) |name_ref| {
|
||||
const local_name = chunk.renamer.nameForSymbol(name_ref);
|
||||
const local_name_id = mi.str(local_name) catch continue;
|
||||
mi.addVar(local_name_id, .lexical) catch continue;
|
||||
mi.addImportInfoSingle(irp_id, mi.str(item.alias) catch continue, local_name_id, false) catch continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (record.flags.contains_import_star) {
|
||||
const local_name = chunk.renamer.nameForSymbol(s.namespace_ref);
|
||||
const local_name_id = mi.str(local_name) catch continue;
|
||||
mi.addVar(local_name_id, .lexical) catch continue;
|
||||
mi.addImportInfoNamespace(irp_id, local_name_id) catch continue;
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Add wrapper-generated declarations (init_xxx, require_xxx) that are
|
||||
// not in any part statement.
|
||||
const all_wrapper_refs = c.graph.ast.items(.wrapper_ref);
|
||||
for (chunk.content.javascript.parts_in_chunk_in_order) |part_range| {
|
||||
const source_index = part_range.source_index.get();
|
||||
if (all_flags[source_index].wrap != .none) {
|
||||
const wrapper_ref = all_wrapper_refs[source_index];
|
||||
if (!wrapper_ref.isEmpty()) {
|
||||
const name = chunk.renamer.nameForSymbol(wrapper_ref);
|
||||
if (name.len > 0) {
|
||||
const string_id = mi.str(name) catch continue;
|
||||
mi.addVar(string_id, .declared) catch continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Generate the exports for the entry point, if there are any.
|
||||
// This must happen before module_info serialization so the printer
|
||||
// can populate export entries in module_info.
|
||||
// Generate the exports for the entry point, if there are any
|
||||
const entry_point_tail = brk: {
|
||||
if (chunk.isEntryPoint()) {
|
||||
break :brk generateEntryPointTailJS(
|
||||
@@ -222,21 +95,12 @@ pub fn postProcessJSChunk(ctx: GenerateChunkCtx, worker: *ThreadPool.Worker, chu
|
||||
worker.allocator,
|
||||
arena.allocator(),
|
||||
chunk.renamer,
|
||||
module_info,
|
||||
);
|
||||
}
|
||||
|
||||
break :brk CompileResult.empty;
|
||||
};
|
||||
|
||||
// Store unserialized ModuleInfo on the chunk. Serialization is deferred to
|
||||
// generateChunksInParallel after final chunk paths are computed, so that
|
||||
// cross-chunk import specifiers (which use unique_key placeholders during
|
||||
// printing) can be resolved to actual paths.
|
||||
if (module_info) |mi| {
|
||||
chunk.content.javascript.module_info = mi;
|
||||
}
|
||||
|
||||
var j = StringJoiner{
|
||||
.allocator = worker.allocator,
|
||||
.watcher = .{
|
||||
@@ -571,37 +435,6 @@ pub fn postProcessJSChunk(ctx: GenerateChunkCtx, worker: *ThreadPool.Worker, chu
|
||||
}
|
||||
}
|
||||
|
||||
/// Recursively walk a binding and add all declared names to `ModuleInfo`.
|
||||
/// Handles `b_identifier`, `b_array`, `b_object`, and `b_missing`.
|
||||
fn addBindingVarsToModuleInfo(
|
||||
mi: *analyze_transpiled_module.ModuleInfo,
|
||||
binding: Binding,
|
||||
var_kind: analyze_transpiled_module.ModuleInfo.VarKind,
|
||||
r: renamer.Renamer,
|
||||
symbols: *const js_ast.Symbol.Map,
|
||||
) void {
|
||||
switch (binding.data) {
|
||||
.b_identifier => |b| {
|
||||
const name = r.nameForSymbol(symbols.follow(b.ref));
|
||||
if (name.len > 0) {
|
||||
const str_id = mi.str(name) catch return;
|
||||
mi.addVar(str_id, var_kind) catch {};
|
||||
}
|
||||
},
|
||||
.b_array => |b| {
|
||||
for (b.items) |item| {
|
||||
addBindingVarsToModuleInfo(mi, item.binding, var_kind, r, symbols);
|
||||
}
|
||||
},
|
||||
.b_object => |b| {
|
||||
for (b.properties) |prop| {
|
||||
addBindingVarsToModuleInfo(mi, prop.value, var_kind, r, symbols);
|
||||
}
|
||||
},
|
||||
.b_missing => {},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn generateEntryPointTailJS(
|
||||
c: *LinkerContext,
|
||||
toCommonJSRef: Ref,
|
||||
@@ -610,7 +443,6 @@ pub fn generateEntryPointTailJS(
|
||||
allocator: std.mem.Allocator,
|
||||
temp_allocator: std.mem.Allocator,
|
||||
r: renamer.Renamer,
|
||||
module_info: ?*analyze_transpiled_module.ModuleInfo,
|
||||
) CompileResult {
|
||||
const flags: JSMeta.Flags = c.graph.meta.items(.flags)[source_index];
|
||||
var stmts = std.array_list.Managed(Stmt).init(temp_allocator);
|
||||
@@ -993,22 +825,6 @@ pub fn generateEntryPointTailJS(
|
||||
},
|
||||
}
|
||||
|
||||
// Add generated local declarations from entry point tail to module_info.
|
||||
// This captures vars like `var export_foo = cjs.foo` for CJS export copies.
|
||||
if (module_info) |mi| {
|
||||
for (stmts.items) |stmt| {
|
||||
switch (stmt.data) {
|
||||
.s_local => |s| {
|
||||
const var_kind: analyze_transpiled_module.ModuleInfo.VarKind = if (s.kind == .k_var) .declared else .lexical;
|
||||
for (s.decls.slice()) |decl| {
|
||||
addBindingVarsToModuleInfo(mi, decl.binding, var_kind, r, &c.graph.symbols);
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (stmts.items.len == 0) {
|
||||
return .{
|
||||
.javascript = .{
|
||||
@@ -1034,7 +850,6 @@ pub fn generateEntryPointTailJS(
|
||||
.print_dce_annotations = c.options.emit_dce_annotations,
|
||||
.minify_syntax = c.options.minify_syntax,
|
||||
.mangled_props = &c.mangled_props,
|
||||
.module_info = module_info,
|
||||
// .const_values = c.graph.const_values,
|
||||
};
|
||||
|
||||
@@ -1060,7 +875,6 @@ pub fn generateEntryPointTailJS(
|
||||
};
|
||||
}
|
||||
|
||||
const analyze_transpiled_module = @import("../../analyze_transpiled_module.zig");
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
|
||||
18
src/cli.zig
18
src/cli.zig
@@ -425,9 +425,6 @@ pub const Command = struct {
|
||||
filters: []const []const u8 = &.{},
|
||||
workspaces: bool = false,
|
||||
if_present: bool = false,
|
||||
parallel: bool = false,
|
||||
sequential: bool = false,
|
||||
no_exit_on_error: bool = false,
|
||||
|
||||
preloads: []const string = &.{},
|
||||
has_loaded_global_config: bool = false,
|
||||
@@ -891,13 +888,6 @@ pub const Command = struct {
|
||||
const ctx = try Command.init(allocator, log, .RunCommand);
|
||||
ctx.args.target = .bun;
|
||||
|
||||
if (ctx.parallel or ctx.sequential) {
|
||||
MultiRun.run(ctx) catch |err| {
|
||||
Output.prettyErrorln("<r><red>error<r>: {s}", .{@errorName(err)});
|
||||
Global.exit(1);
|
||||
};
|
||||
}
|
||||
|
||||
if (ctx.filters.len > 0 or ctx.workspaces) {
|
||||
FilterRun.runScriptsWithFilter(ctx) catch |err| {
|
||||
Output.prettyErrorln("<r><red>error<r>: {s}", .{@errorName(err)});
|
||||
@@ -937,13 +927,6 @@ pub const Command = struct {
|
||||
};
|
||||
ctx.args.target = .bun;
|
||||
|
||||
if (ctx.parallel or ctx.sequential) {
|
||||
MultiRun.run(ctx) catch |err| {
|
||||
Output.prettyErrorln("<r><red>error<r>: {s}", .{@errorName(err)});
|
||||
Global.exit(1);
|
||||
};
|
||||
}
|
||||
|
||||
if (ctx.filters.len > 0 or ctx.workspaces) {
|
||||
FilterRun.runScriptsWithFilter(ctx) catch |err| {
|
||||
Output.prettyErrorln("<r><red>error<r>: {s}", .{@errorName(err)});
|
||||
@@ -1779,7 +1762,6 @@ const string = []const u8;
|
||||
|
||||
const AddCompletions = @import("./cli/add_completions.zig");
|
||||
const FilterRun = @import("./cli/filter_run.zig");
|
||||
const MultiRun = @import("./cli/multi_run.zig");
|
||||
const PmViewCommand = @import("./cli/pm_view_command.zig");
|
||||
const fs = @import("./fs.zig");
|
||||
const options = @import("./options.zig");
|
||||
|
||||
@@ -130,9 +130,6 @@ pub const auto_or_run_params = [_]ParamType{
|
||||
clap.parseParam("-b, --bun Force a script or package to use Bun's runtime instead of Node.js (via symlinking node)") catch unreachable,
|
||||
clap.parseParam("--shell <STR> Control the shell used for package.json scripts. Supports either 'bun' or 'system'") catch unreachable,
|
||||
clap.parseParam("--workspaces Run a script in all workspace packages (from the \"workspaces\" field in package.json)") catch unreachable,
|
||||
clap.parseParam("--parallel Run multiple scripts concurrently with Foreman-style output") catch unreachable,
|
||||
clap.parseParam("--sequential Run multiple scripts sequentially with Foreman-style output") catch unreachable,
|
||||
clap.parseParam("--no-exit-on-error Continue running other scripts when one fails (with --parallel/--sequential)") catch unreachable,
|
||||
};
|
||||
|
||||
pub const auto_only_params = [_]ParamType{
|
||||
@@ -456,9 +453,6 @@ pub fn parse(allocator: std.mem.Allocator, ctx: Command.Context, comptime cmd: C
|
||||
ctx.filters = args.options("--filter");
|
||||
ctx.workspaces = args.flag("--workspaces");
|
||||
ctx.if_present = args.flag("--if-present");
|
||||
ctx.parallel = args.flag("--parallel");
|
||||
ctx.sequential = args.flag("--sequential");
|
||||
ctx.no_exit_on_error = args.flag("--no-exit-on-error");
|
||||
|
||||
if (args.option("--elide-lines")) |elide_lines| {
|
||||
if (elide_lines.len > 0) {
|
||||
@@ -974,6 +968,7 @@ pub fn parse(allocator: std.mem.Allocator, ctx: Command.Context, comptime cmd: C
|
||||
args.flag("--debug-no-minify");
|
||||
}
|
||||
|
||||
// TODO: support --format=esm
|
||||
if (ctx.bundler_options.bytecode) {
|
||||
ctx.bundler_options.output_format = .cjs;
|
||||
ctx.args.target = .bun;
|
||||
@@ -1186,10 +1181,6 @@ pub fn parse(allocator: std.mem.Allocator, ctx: Command.Context, comptime cmd: C
|
||||
Output.errGeneric("Using --windows-hide-console is only available when compiling on Windows", .{});
|
||||
Global.crash();
|
||||
}
|
||||
if (ctx.bundler_options.compile_target.os != .windows) {
|
||||
Output.errGeneric("--windows-hide-console requires a Windows compile target", .{});
|
||||
Global.crash();
|
||||
}
|
||||
if (!ctx.bundler_options.compile) {
|
||||
Output.errGeneric("--windows-hide-console requires --compile", .{});
|
||||
Global.crash();
|
||||
@@ -1201,10 +1192,6 @@ pub fn parse(allocator: std.mem.Allocator, ctx: Command.Context, comptime cmd: C
|
||||
Output.errGeneric("Using --windows-icon is only available when compiling on Windows", .{});
|
||||
Global.crash();
|
||||
}
|
||||
if (ctx.bundler_options.compile_target.os != .windows) {
|
||||
Output.errGeneric("--windows-icon requires a Windows compile target", .{});
|
||||
Global.crash();
|
||||
}
|
||||
if (!ctx.bundler_options.compile) {
|
||||
Output.errGeneric("--windows-icon requires --compile", .{});
|
||||
Global.crash();
|
||||
@@ -1216,10 +1203,6 @@ pub fn parse(allocator: std.mem.Allocator, ctx: Command.Context, comptime cmd: C
|
||||
Output.errGeneric("Using --windows-title is only available when compiling on Windows", .{});
|
||||
Global.crash();
|
||||
}
|
||||
if (ctx.bundler_options.compile_target.os != .windows) {
|
||||
Output.errGeneric("--windows-title requires a Windows compile target", .{});
|
||||
Global.crash();
|
||||
}
|
||||
if (!ctx.bundler_options.compile) {
|
||||
Output.errGeneric("--windows-title requires --compile", .{});
|
||||
Global.crash();
|
||||
@@ -1231,10 +1214,6 @@ pub fn parse(allocator: std.mem.Allocator, ctx: Command.Context, comptime cmd: C
|
||||
Output.errGeneric("Using --windows-publisher is only available when compiling on Windows", .{});
|
||||
Global.crash();
|
||||
}
|
||||
if (ctx.bundler_options.compile_target.os != .windows) {
|
||||
Output.errGeneric("--windows-publisher requires a Windows compile target", .{});
|
||||
Global.crash();
|
||||
}
|
||||
if (!ctx.bundler_options.compile) {
|
||||
Output.errGeneric("--windows-publisher requires --compile", .{});
|
||||
Global.crash();
|
||||
@@ -1246,10 +1225,6 @@ pub fn parse(allocator: std.mem.Allocator, ctx: Command.Context, comptime cmd: C
|
||||
Output.errGeneric("Using --windows-version is only available when compiling on Windows", .{});
|
||||
Global.crash();
|
||||
}
|
||||
if (ctx.bundler_options.compile_target.os != .windows) {
|
||||
Output.errGeneric("--windows-version requires a Windows compile target", .{});
|
||||
Global.crash();
|
||||
}
|
||||
if (!ctx.bundler_options.compile) {
|
||||
Output.errGeneric("--windows-version requires --compile", .{});
|
||||
Global.crash();
|
||||
@@ -1261,10 +1236,6 @@ pub fn parse(allocator: std.mem.Allocator, ctx: Command.Context, comptime cmd: C
|
||||
Output.errGeneric("Using --windows-description is only available when compiling on Windows", .{});
|
||||
Global.crash();
|
||||
}
|
||||
if (ctx.bundler_options.compile_target.os != .windows) {
|
||||
Output.errGeneric("--windows-description requires a Windows compile target", .{});
|
||||
Global.crash();
|
||||
}
|
||||
if (!ctx.bundler_options.compile) {
|
||||
Output.errGeneric("--windows-description requires --compile", .{});
|
||||
Global.crash();
|
||||
@@ -1276,10 +1247,6 @@ pub fn parse(allocator: std.mem.Allocator, ctx: Command.Context, comptime cmd: C
|
||||
Output.errGeneric("Using --windows-copyright is only available when compiling on Windows", .{});
|
||||
Global.crash();
|
||||
}
|
||||
if (ctx.bundler_options.compile_target.os != .windows) {
|
||||
Output.errGeneric("--windows-copyright requires a Windows compile target", .{});
|
||||
Global.crash();
|
||||
}
|
||||
if (!ctx.bundler_options.compile) {
|
||||
Output.errGeneric("--windows-copyright requires --compile", .{});
|
||||
Global.crash();
|
||||
@@ -1339,9 +1306,8 @@ pub fn parse(allocator: std.mem.Allocator, ctx: Command.Context, comptime cmd: C
|
||||
}
|
||||
|
||||
ctx.bundler_options.output_format = format;
|
||||
// ESM bytecode is supported for --compile builds (module_info is embedded in binary)
|
||||
if (format != .cjs and format != .esm and ctx.bundler_options.bytecode) {
|
||||
Output.errGeneric("format must be 'cjs' or 'esm' when bytecode is true.", .{});
|
||||
if (format != .cjs and ctx.bundler_options.bytecode) {
|
||||
Output.errGeneric("format must be 'cjs' when bytecode is true. Eventually we'll add esm support as well.", .{});
|
||||
Global.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -583,7 +583,6 @@ pub const BuildCommand = struct {
|
||||
.asset => Output.prettyFmt("<magenta>", true),
|
||||
.sourcemap => Output.prettyFmt("<d>", true),
|
||||
.bytecode => Output.prettyFmt("<d>", true),
|
||||
.module_info => Output.prettyFmt("<d>", true),
|
||||
.@"metafile-json", .@"metafile-markdown" => Output.prettyFmt("<green>", true),
|
||||
});
|
||||
|
||||
@@ -615,7 +614,6 @@ pub const BuildCommand = struct {
|
||||
.asset => "asset",
|
||||
.sourcemap => "source map",
|
||||
.bytecode => "bytecode",
|
||||
.module_info => "module info",
|
||||
.@"metafile-json" => "metafile json",
|
||||
.@"metafile-markdown" => "metafile markdown",
|
||||
}});
|
||||
|
||||
@@ -1,841 +0,0 @@
|
||||
const ScriptConfig = struct {
|
||||
label: []const u8,
|
||||
command: [:0]const u8,
|
||||
cwd: []const u8,
|
||||
PATH: []const u8,
|
||||
};
|
||||
|
||||
/// Wraps a BufferedReader and tracks whether it represents stdout or stderr,
|
||||
/// so output can be routed to the correct parent stream.
|
||||
const PipeReader = struct {
|
||||
const This = @This();
|
||||
|
||||
reader: bun.io.BufferedReader = bun.io.BufferedReader.init(This),
|
||||
handle: *ProcessHandle = undefined, // set in ProcessHandle.start()
|
||||
is_stderr: bool,
|
||||
line_buffer: std.array_list.Managed(u8) = std.array_list.Managed(u8).init(bun.default_allocator),
|
||||
|
||||
pub fn onReadChunk(this: *This, chunk: []const u8, hasMore: bun.io.ReadState) bool {
|
||||
_ = hasMore;
|
||||
this.handle.state.readChunk(this, chunk) catch {};
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn onReaderDone(this: *This) void {
|
||||
_ = this;
|
||||
}
|
||||
|
||||
pub fn onReaderError(this: *This, err: bun.sys.Error) void {
|
||||
_ = this;
|
||||
_ = err;
|
||||
}
|
||||
|
||||
pub fn eventLoop(this: *This) *bun.jsc.MiniEventLoop {
|
||||
return this.handle.state.event_loop;
|
||||
}
|
||||
|
||||
pub fn loop(this: *This) *bun.Async.Loop {
|
||||
if (comptime bun.Environment.isWindows) {
|
||||
return this.handle.state.event_loop.loop.uv_loop;
|
||||
} else {
|
||||
return this.handle.state.event_loop.loop;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub const ProcessHandle = struct {
|
||||
const This = @This();
|
||||
|
||||
config: *ScriptConfig,
|
||||
state: *State,
|
||||
color_idx: usize,
|
||||
|
||||
stdout_reader: PipeReader = .{ .is_stderr = false },
|
||||
stderr_reader: PipeReader = .{ .is_stderr = true },
|
||||
|
||||
process: ?struct {
|
||||
ptr: *bun.spawn.Process,
|
||||
status: bun.spawn.Status = .running,
|
||||
} = null,
|
||||
options: bun.spawn.SpawnOptions,
|
||||
|
||||
start_time: ?std.time.Instant = null,
|
||||
end_time: ?std.time.Instant = null,
|
||||
|
||||
remaining_dependencies: usize = 0,
|
||||
/// Dependents within the same script group (pre->main->post chain).
|
||||
/// These are NOT started if this handle fails, even with --no-exit-on-error.
|
||||
group_dependents: std.array_list.Managed(*This) = std.array_list.Managed(*This).init(bun.default_allocator),
|
||||
/// Dependents across sequential groups (group N -> group N+1).
|
||||
/// These ARE started even if this handle fails when --no-exit-on-error is set.
|
||||
next_dependents: std.array_list.Managed(*This) = std.array_list.Managed(*This).init(bun.default_allocator),
|
||||
|
||||
fn start(this: *This) !void {
|
||||
this.state.remaining_scripts += 1;
|
||||
|
||||
var argv = [_:null]?[*:0]const u8{
|
||||
this.state.shell_bin,
|
||||
if (Environment.isPosix) "-c" else "exec",
|
||||
this.config.command,
|
||||
null,
|
||||
};
|
||||
|
||||
this.start_time = std.time.Instant.now() catch null;
|
||||
var spawned: bun.spawn.process.SpawnProcessResult = brk: {
|
||||
var arena = std.heap.ArenaAllocator.init(bun.default_allocator);
|
||||
defer arena.deinit();
|
||||
const original_path = this.state.env.map.get("PATH") orelse "";
|
||||
bun.handleOom(this.state.env.map.put("PATH", this.config.PATH));
|
||||
defer bun.handleOom(this.state.env.map.put("PATH", original_path));
|
||||
const envp = try this.state.env.map.createNullDelimitedEnvMap(arena.allocator());
|
||||
break :brk try (try bun.spawn.spawnProcess(&this.options, argv[0..], envp)).unwrap();
|
||||
};
|
||||
var process = spawned.toProcess(this.state.event_loop, false);
|
||||
|
||||
this.stdout_reader.handle = this;
|
||||
this.stderr_reader.handle = this;
|
||||
this.stdout_reader.reader.setParent(&this.stdout_reader);
|
||||
this.stderr_reader.reader.setParent(&this.stderr_reader);
|
||||
|
||||
if (Environment.isWindows) {
|
||||
this.stdout_reader.reader.source = .{ .pipe = this.options.stdout.buffer };
|
||||
this.stderr_reader.reader.source = .{ .pipe = this.options.stderr.buffer };
|
||||
}
|
||||
|
||||
if (Environment.isPosix) {
|
||||
if (spawned.stdout) |stdout_fd| {
|
||||
_ = bun.sys.setNonblocking(stdout_fd);
|
||||
try this.stdout_reader.reader.start(stdout_fd, true).unwrap();
|
||||
}
|
||||
if (spawned.stderr) |stderr_fd| {
|
||||
_ = bun.sys.setNonblocking(stderr_fd);
|
||||
try this.stderr_reader.reader.start(stderr_fd, true).unwrap();
|
||||
}
|
||||
} else {
|
||||
try this.stdout_reader.reader.startWithCurrentPipe().unwrap();
|
||||
try this.stderr_reader.reader.startWithCurrentPipe().unwrap();
|
||||
}
|
||||
|
||||
this.process = .{ .ptr = process };
|
||||
process.setExitHandler(this);
|
||||
|
||||
switch (process.watchOrReap()) {
|
||||
.result => {},
|
||||
.err => |err| {
|
||||
if (!process.hasExited())
|
||||
process.onExit(.{ .err = err }, &std.mem.zeroes(bun.spawn.Rusage));
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn onProcessExit(this: *This, proc: *bun.spawn.Process, status: bun.spawn.Status, _: *const bun.spawn.Rusage) void {
|
||||
this.process.?.status = status;
|
||||
this.end_time = std.time.Instant.now() catch null;
|
||||
_ = proc;
|
||||
this.state.processExit(this) catch {};
|
||||
}
|
||||
|
||||
pub fn eventLoop(this: *This) *bun.jsc.MiniEventLoop {
|
||||
return this.state.event_loop;
|
||||
}
|
||||
|
||||
pub fn loop(this: *This) *bun.Async.Loop {
|
||||
if (comptime bun.Environment.isWindows) {
|
||||
return this.state.event_loop.loop.uv_loop;
|
||||
} else {
|
||||
return this.state.event_loop.loop;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const colors = [_][]const u8{
|
||||
"\x1b[36m", // cyan
|
||||
"\x1b[33m", // yellow
|
||||
"\x1b[35m", // magenta
|
||||
"\x1b[32m", // green
|
||||
"\x1b[34m", // blue
|
||||
"\x1b[31m", // red
|
||||
};
|
||||
const reset = "\x1b[0m";
|
||||
|
||||
const State = struct {
|
||||
const This = @This();
|
||||
|
||||
handles: []ProcessHandle,
|
||||
event_loop: *bun.jsc.MiniEventLoop,
|
||||
remaining_scripts: usize = 0,
|
||||
max_label_len: usize,
|
||||
shell_bin: [:0]const u8,
|
||||
aborted: bool = false,
|
||||
no_exit_on_error: bool,
|
||||
env: *bun.DotEnv.Loader,
|
||||
use_colors: bool,
|
||||
|
||||
pub fn isDone(this: *This) bool {
|
||||
return this.remaining_scripts == 0;
|
||||
}
|
||||
|
||||
fn readChunk(this: *This, pipe: *PipeReader, chunk: []const u8) (std.Io.Writer.Error || bun.OOM)!void {
|
||||
try pipe.line_buffer.appendSlice(chunk);
|
||||
|
||||
// Route to correct parent stream: child stdout -> parent stdout, child stderr -> parent stderr
|
||||
const writer = if (pipe.is_stderr) Output.errorWriter() else Output.writer();
|
||||
|
||||
// Process complete lines
|
||||
while (std.mem.indexOfScalar(u8, pipe.line_buffer.items, '\n')) |newline_pos| {
|
||||
const line = pipe.line_buffer.items[0 .. newline_pos + 1];
|
||||
try this.writeLineWithPrefix(pipe.handle, line, writer);
|
||||
// Remove processed line from buffer
|
||||
const remaining = pipe.line_buffer.items[newline_pos + 1 ..];
|
||||
std.mem.copyForwards(u8, pipe.line_buffer.items[0..remaining.len], remaining);
|
||||
pipe.line_buffer.items.len = remaining.len;
|
||||
}
|
||||
}
|
||||
|
||||
fn writeLineWithPrefix(this: *This, handle: *ProcessHandle, line: []const u8, writer: *std.Io.Writer) std.Io.Writer.Error!void {
|
||||
try this.writePrefix(handle, writer);
|
||||
try writer.writeAll(line);
|
||||
}
|
||||
|
||||
fn writePrefix(this: *This, handle: *ProcessHandle, writer: *std.Io.Writer) std.Io.Writer.Error!void {
|
||||
if (this.use_colors) {
|
||||
try writer.writeAll(colors[handle.color_idx % colors.len]);
|
||||
}
|
||||
|
||||
try writer.writeAll(handle.config.label);
|
||||
const padding = this.max_label_len -| handle.config.label.len;
|
||||
for (0..padding) |_| {
|
||||
try writer.writeByte(' ');
|
||||
}
|
||||
|
||||
if (this.use_colors) {
|
||||
try writer.writeAll(reset);
|
||||
}
|
||||
|
||||
try writer.writeAll(" | ");
|
||||
}
|
||||
|
||||
fn flushPipeBuffer(this: *This, handle: *ProcessHandle, pipe: *PipeReader) std.Io.Writer.Error!void {
|
||||
if (pipe.line_buffer.items.len > 0) {
|
||||
const line = pipe.line_buffer.items;
|
||||
const needs_newline = line.len > 0 and line[line.len - 1] != '\n';
|
||||
const writer = if (pipe.is_stderr) Output.errorWriter() else Output.writer();
|
||||
try this.writeLineWithPrefix(handle, line, writer);
|
||||
if (needs_newline) {
|
||||
writer.writeAll("\n") catch {};
|
||||
}
|
||||
pipe.line_buffer.clearRetainingCapacity();
|
||||
}
|
||||
}
|
||||
|
||||
fn processExit(this: *This, handle: *ProcessHandle) std.Io.Writer.Error!void {
|
||||
this.remaining_scripts -= 1;
|
||||
|
||||
// Flush remaining buffers (stdout first, then stderr)
|
||||
try this.flushPipeBuffer(handle, &handle.stdout_reader);
|
||||
try this.flushPipeBuffer(handle, &handle.stderr_reader);
|
||||
|
||||
// Print exit status to stderr (status messages always go to stderr)
|
||||
const writer = Output.errorWriter();
|
||||
try this.writePrefix(handle, writer);
|
||||
|
||||
switch (handle.process.?.status) {
|
||||
.exited => |exited| {
|
||||
if (exited.code != 0) {
|
||||
try writer.print("Exited with code {d}\n", .{exited.code});
|
||||
} else {
|
||||
if (handle.start_time != null and handle.end_time != null) {
|
||||
const duration = handle.end_time.?.since(handle.start_time.?);
|
||||
const ms = @as(f64, @floatFromInt(duration)) / 1_000_000.0;
|
||||
if (ms > 1000.0) {
|
||||
try writer.print("Done in {d:.2}s\n", .{ms / 1000.0});
|
||||
} else {
|
||||
try writer.print("Done in {d:.0}ms\n", .{ms});
|
||||
}
|
||||
} else {
|
||||
try writer.writeAll("Done\n");
|
||||
}
|
||||
}
|
||||
},
|
||||
.signaled => |signal| {
|
||||
try writer.print("Signaled: {s}\n", .{@tagName(signal)});
|
||||
},
|
||||
else => {
|
||||
try writer.writeAll("Error\n");
|
||||
},
|
||||
}
|
||||
|
||||
// Check if we should abort on error
|
||||
const failed = switch (handle.process.?.status) {
|
||||
.exited => |exited| exited.code != 0,
|
||||
.signaled => true,
|
||||
else => true,
|
||||
};
|
||||
|
||||
if (failed and !this.no_exit_on_error) {
|
||||
this.abort();
|
||||
return;
|
||||
}
|
||||
|
||||
if (failed) {
|
||||
// Pre->main->post chain is broken -- skip group dependents.
|
||||
this.skipDependents(handle.group_dependents.items);
|
||||
// But cascade to next-group dependents (sequential --no-exit-on-error).
|
||||
if (!this.aborted) {
|
||||
this.startDependents(handle.next_dependents.items);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Success: cascade to all dependents
|
||||
if (!this.aborted) {
|
||||
this.startDependents(handle.group_dependents.items);
|
||||
this.startDependents(handle.next_dependents.items);
|
||||
}
|
||||
}
|
||||
|
||||
fn startDependents(_: *This, dependents: []*ProcessHandle) void {
|
||||
for (dependents) |dependent| {
|
||||
dependent.remaining_dependencies -= 1;
|
||||
if (dependent.remaining_dependencies == 0) {
|
||||
dependent.start() catch {
|
||||
Output.prettyErrorln("<r><red>error<r>: Failed to start process", .{});
|
||||
Global.exit(1);
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Skip group dependents that will never start because their predecessor
|
||||
/// failed. Recursively skip their group dependents too.
|
||||
fn skipDependents(this: *This, dependents: []*ProcessHandle) void {
|
||||
for (dependents) |dependent| {
|
||||
dependent.remaining_dependencies -= 1;
|
||||
if (dependent.remaining_dependencies == 0) {
|
||||
this.skipDependents(dependent.group_dependents.items);
|
||||
// Still cascade next_dependents so sequential chains continue
|
||||
if (!this.aborted) {
|
||||
this.startDependents(dependent.next_dependents.items);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn abort(this: *This) void {
|
||||
this.aborted = true;
|
||||
for (this.handles) |*handle| {
|
||||
if (handle.process) |*proc| {
|
||||
if (proc.status == .running) {
|
||||
_ = proc.ptr.kill(std.posix.SIG.INT);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn finalize(this: *This) u8 {
|
||||
for (this.handles) |handle| {
|
||||
if (handle.process) |proc| {
|
||||
switch (proc.status) {
|
||||
.exited => |exited| {
|
||||
if (exited.code != 0) return exited.code;
|
||||
},
|
||||
.signaled => |signal| return signal.toExitCode() orelse 1,
|
||||
else => return 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
};
|
||||
|
||||
const AbortHandler = struct {
|
||||
var should_abort = false;
|
||||
|
||||
fn posixSignalHandler(sig: i32, info: *const std.posix.siginfo_t, _: ?*const anyopaque) callconv(.c) void {
|
||||
_ = sig;
|
||||
_ = info;
|
||||
should_abort = true;
|
||||
}
|
||||
|
||||
fn windowsCtrlHandler(dwCtrlType: std.os.windows.DWORD) callconv(.winapi) std.os.windows.BOOL {
|
||||
if (dwCtrlType == std.os.windows.CTRL_C_EVENT) {
|
||||
should_abort = true;
|
||||
return std.os.windows.TRUE;
|
||||
}
|
||||
return std.os.windows.FALSE;
|
||||
}
|
||||
|
||||
pub fn install() void {
|
||||
if (Environment.isPosix) {
|
||||
const action = std.posix.Sigaction{
|
||||
.handler = .{ .sigaction = AbortHandler.posixSignalHandler },
|
||||
.mask = std.posix.sigemptyset(),
|
||||
.flags = std.posix.SA.SIGINFO | std.posix.SA.RESTART | std.posix.SA.RESETHAND,
|
||||
};
|
||||
std.posix.sigaction(std.posix.SIG.INT, &action, null);
|
||||
} else {
|
||||
const res = bun.c.SetConsoleCtrlHandler(windowsCtrlHandler, std.os.windows.TRUE);
|
||||
if (res == 0) {
|
||||
if (Environment.isDebug) {
|
||||
Output.warn("Failed to set abort handler\n", .{});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn uninstall() void {
|
||||
if (Environment.isWindows) {
|
||||
_ = bun.c.SetConsoleCtrlHandler(null, std.os.windows.FALSE);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/// Simple glob matching: `*` matches any sequence of characters.
|
||||
fn matchesGlob(pattern: []const u8, name: []const u8) bool {
|
||||
var pi: usize = 0;
|
||||
var ni: usize = 0;
|
||||
var star_pi: usize = 0;
|
||||
var star_ni: usize = 0;
|
||||
var have_star = false;
|
||||
|
||||
while (ni < name.len or pi < pattern.len) {
|
||||
if (pi < pattern.len and pattern[pi] == '*') {
|
||||
have_star = true;
|
||||
star_pi = pi;
|
||||
star_ni = ni;
|
||||
pi += 1;
|
||||
} else if (pi < pattern.len and ni < name.len and pattern[pi] == name[ni]) {
|
||||
pi += 1;
|
||||
ni += 1;
|
||||
} else if (have_star) {
|
||||
pi = star_pi + 1;
|
||||
star_ni += 1;
|
||||
ni = star_ni;
|
||||
if (ni > name.len) return false;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/// Add configs for a single script name (with pre/post handling).
|
||||
/// When `label_prefix` is non-null, labels become "{prefix}:{name}" (for workspace runs).
|
||||
fn addScriptConfigs(
|
||||
configs: *std.array_list.Managed(ScriptConfig),
|
||||
group_infos: *std.array_list.Managed(GroupInfo),
|
||||
raw_name: []const u8,
|
||||
scripts_map: ?*const bun.StringArrayHashMap([]const u8),
|
||||
allocator: std.mem.Allocator,
|
||||
cwd: []const u8,
|
||||
PATH: []const u8,
|
||||
label_prefix: ?[]const u8,
|
||||
) !void {
|
||||
const group_start = configs.items.len;
|
||||
|
||||
const label = if (label_prefix) |prefix|
|
||||
try std.fmt.allocPrint(allocator, "{s}:{s}", .{ prefix, raw_name })
|
||||
else
|
||||
raw_name;
|
||||
|
||||
const script_content = if (scripts_map) |sm| sm.get(raw_name) else null;
|
||||
|
||||
if (script_content) |content| {
|
||||
// It's a package.json script - check for pre/post
|
||||
const pre_name = try std.fmt.allocPrint(allocator, "pre{s}", .{raw_name});
|
||||
const post_name = try std.fmt.allocPrint(allocator, "post{s}", .{raw_name});
|
||||
|
||||
const pre_content = if (scripts_map) |sm| sm.get(pre_name) else null;
|
||||
const post_content = if (scripts_map) |sm| sm.get(post_name) else null;
|
||||
|
||||
if (pre_content) |pc| {
|
||||
var cmd_buf = try std.array_list.Managed(u8).initCapacity(allocator, pc.len + 1);
|
||||
try RunCommand.replacePackageManagerRun(&cmd_buf, pc);
|
||||
try cmd_buf.append(0);
|
||||
try configs.append(.{
|
||||
.label = label,
|
||||
.command = cmd_buf.items[0 .. cmd_buf.items.len - 1 :0],
|
||||
.cwd = cwd,
|
||||
.PATH = PATH,
|
||||
});
|
||||
}
|
||||
|
||||
// Main script
|
||||
{
|
||||
var cmd_buf = try std.array_list.Managed(u8).initCapacity(allocator, content.len + 1);
|
||||
try RunCommand.replacePackageManagerRun(&cmd_buf, content);
|
||||
try cmd_buf.append(0);
|
||||
try configs.append(.{
|
||||
.label = label,
|
||||
.command = cmd_buf.items[0 .. cmd_buf.items.len - 1 :0],
|
||||
.cwd = cwd,
|
||||
.PATH = PATH,
|
||||
});
|
||||
}
|
||||
|
||||
if (post_content) |pc| {
|
||||
var cmd_buf = try std.array_list.Managed(u8).initCapacity(allocator, pc.len + 1);
|
||||
try RunCommand.replacePackageManagerRun(&cmd_buf, pc);
|
||||
try cmd_buf.append(0);
|
||||
try configs.append(.{
|
||||
.label = label,
|
||||
.command = cmd_buf.items[0 .. cmd_buf.items.len - 1 :0],
|
||||
.cwd = cwd,
|
||||
.PATH = PATH,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// Not a package.json script - run as a raw command
|
||||
// If it looks like a file path, prefix with bun executable
|
||||
const is_file = raw_name.len > 0 and (raw_name[0] == '.' or raw_name[0] == '/' or
|
||||
(Environment.isWindows and raw_name[0] == '\\') or hasRunnableExtension(raw_name));
|
||||
const command_z = if (is_file) brk: {
|
||||
const bun_path = bun.selfExePath() catch "bun";
|
||||
// Quote the bun path so that backslashes on Windows are not
|
||||
// interpreted as escape characters by `bun exec` (Bun's shell).
|
||||
const cmd_str = try std.fmt.allocPrint(allocator, "\"{s}\" {s}" ++ "\x00", .{ bun_path, raw_name });
|
||||
break :brk cmd_str[0 .. cmd_str.len - 1 :0];
|
||||
} else try allocator.dupeZ(u8, raw_name);
|
||||
try configs.append(.{
|
||||
.label = label,
|
||||
.command = command_z,
|
||||
.cwd = cwd,
|
||||
.PATH = PATH,
|
||||
});
|
||||
}
|
||||
|
||||
try group_infos.append(.{
|
||||
.start = group_start,
|
||||
.count = configs.items.len - group_start,
|
||||
});
|
||||
}
|
||||
|
||||
const GroupInfo = struct { start: usize, count: usize };
|
||||
|
||||
pub fn run(ctx: Command.Context) !noreturn {
|
||||
// Validate flags
|
||||
if (ctx.parallel and ctx.sequential) {
|
||||
Output.prettyErrorln("<r><red>error<r>: --parallel and --sequential cannot be used together", .{});
|
||||
Global.exit(1);
|
||||
}
|
||||
|
||||
// Collect script names from positionals + passthrough
|
||||
// For RunCommand: positionals[0] is "run", skip it. For AutoCommand: no "run" prefix.
|
||||
var script_names = std.array_list.Managed([]const u8).init(ctx.allocator);
|
||||
|
||||
var positionals = ctx.positionals;
|
||||
if (positionals.len > 0 and (strings.eqlComptime(positionals[0], "run") or strings.eqlComptime(positionals[0], "r"))) {
|
||||
positionals = positionals[1..];
|
||||
}
|
||||
for (positionals) |pos| {
|
||||
if (pos.len > 0) {
|
||||
try script_names.append(pos);
|
||||
}
|
||||
}
|
||||
for (ctx.passthrough) |pt| {
|
||||
if (pt.len > 0) {
|
||||
try script_names.append(pt);
|
||||
}
|
||||
}
|
||||
|
||||
if (script_names.items.len == 0) {
|
||||
Output.prettyErrorln("<r><red>error<r>: --parallel/--sequential requires at least one script name", .{});
|
||||
Global.exit(1);
|
||||
}
|
||||
|
||||
// Set up the transpiler/environment
|
||||
const fsinstance = try bun.fs.FileSystem.init(null);
|
||||
var this_transpiler: transpiler.Transpiler = undefined;
|
||||
_ = try RunCommand.configureEnvForRun(ctx, &this_transpiler, null, true, false);
|
||||
const cwd = fsinstance.top_level_dir;
|
||||
|
||||
const event_loop = bun.jsc.MiniEventLoop.initGlobal(this_transpiler.env, null);
|
||||
const shell_bin: [:0]const u8 = if (Environment.isPosix)
|
||||
RunCommand.findShell(this_transpiler.env.get("PATH") orelse "", cwd) orelse return error.MissingShell
|
||||
else
|
||||
bun.selfExePath() catch return error.MissingShell;
|
||||
|
||||
// Build ScriptConfigs and ProcessHandles
|
||||
// Each script name can produce up to 3 handles (pre, main, post)
|
||||
var configs = std.array_list.Managed(ScriptConfig).init(ctx.allocator);
|
||||
var group_infos = std.array_list.Managed(GroupInfo).init(ctx.allocator);
|
||||
|
||||
if (ctx.filters.len > 0 or ctx.workspaces) {
|
||||
// Workspace-aware mode: iterate over matching workspace packages
|
||||
var filters_to_use = ctx.filters;
|
||||
if (ctx.workspaces) {
|
||||
filters_to_use = &.{"*"};
|
||||
}
|
||||
|
||||
var filter_instance = try FilterArg.FilterSet.init(ctx.allocator, filters_to_use, cwd);
|
||||
var patterns = std.array_list.Managed([]u8).init(ctx.allocator);
|
||||
|
||||
var root_buf: bun.PathBuffer = undefined;
|
||||
const resolve_root = try FilterArg.getCandidatePackagePatterns(ctx.allocator, ctx.log, &patterns, cwd, &root_buf);
|
||||
|
||||
var package_json_iter = try FilterArg.PackageFilterIterator.init(ctx.allocator, patterns.items, resolve_root);
|
||||
defer package_json_iter.deinit();
|
||||
|
||||
// Phase 1: Collect matching packages (filesystem order is nondeterministic)
|
||||
const MatchedPackage = struct {
|
||||
name: []const u8,
|
||||
dirpath: []const u8,
|
||||
scripts: *const bun.StringArrayHashMap([]const u8),
|
||||
PATH: []const u8,
|
||||
};
|
||||
var matched_packages = std.array_list.Managed(MatchedPackage).init(ctx.allocator);
|
||||
|
||||
while (try package_json_iter.next()) |package_json_path| {
|
||||
const dirpath = try ctx.allocator.dupe(u8, std.fs.path.dirname(package_json_path) orelse Global.crash());
|
||||
const path = bun.strings.withoutTrailingSlash(dirpath);
|
||||
|
||||
// When using --workspaces, skip the root package to prevent recursion
|
||||
if (ctx.workspaces and strings.eql(path, resolve_root)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const pkgjson = bun.PackageJSON.parse(&this_transpiler.resolver, dirpath, .invalid, null, .include_scripts, .main) orelse {
|
||||
continue;
|
||||
};
|
||||
|
||||
if (!filter_instance.matches(path, pkgjson.name))
|
||||
continue;
|
||||
|
||||
const pkg_scripts = pkgjson.scripts orelse continue;
|
||||
const pkg_PATH = try RunCommand.configurePathForRunWithPackageJsonDir(ctx, dirpath, &this_transpiler, null, dirpath, ctx.debug.run_in_bun);
|
||||
const pkg_name = if (pkgjson.name.len > 0)
|
||||
pkgjson.name
|
||||
else
|
||||
// Fallback: use relative path from workspace root
|
||||
try ctx.allocator.dupe(u8, bun.path.relativePlatform(resolve_root, path, .posix, false));
|
||||
|
||||
try matched_packages.append(.{
|
||||
.name = pkg_name,
|
||||
.dirpath = dirpath,
|
||||
.scripts = pkg_scripts,
|
||||
.PATH = pkg_PATH,
|
||||
});
|
||||
}
|
||||
|
||||
// Phase 2: Sort by package name, then by path as tiebreaker for deterministic ordering
|
||||
std.mem.sort(MatchedPackage, matched_packages.items, {}, struct {
|
||||
fn lessThan(_: void, a: MatchedPackage, b: MatchedPackage) bool {
|
||||
const name_order = std.mem.order(u8, a.name, b.name);
|
||||
if (name_order != .eq) return name_order == .lt;
|
||||
return std.mem.order(u8, a.dirpath, b.dirpath) == .lt;
|
||||
}
|
||||
}.lessThan);
|
||||
|
||||
// Phase 3: Build configs from sorted packages
|
||||
for (matched_packages.items) |pkg| {
|
||||
for (script_names.items) |raw_name| {
|
||||
if (std.mem.indexOfScalar(u8, raw_name, '*') != null) {
|
||||
// Glob: expand against this package's scripts
|
||||
var matches = std.array_list.Managed([]const u8).init(ctx.allocator);
|
||||
for (pkg.scripts.keys()) |key| {
|
||||
if (matchesGlob(raw_name, key)) {
|
||||
try matches.append(key);
|
||||
}
|
||||
}
|
||||
std.mem.sort([]const u8, matches.items, {}, struct {
|
||||
fn lessThan(_: void, a: []const u8, b: []const u8) bool {
|
||||
return std.mem.order(u8, a, b) == .lt;
|
||||
}
|
||||
}.lessThan);
|
||||
for (matches.items) |matched_name| {
|
||||
try addScriptConfigs(&configs, &group_infos, matched_name, pkg.scripts, ctx.allocator, pkg.dirpath, pkg.PATH, pkg.name);
|
||||
}
|
||||
} else {
|
||||
if (pkg.scripts.get(raw_name) != null) {
|
||||
try addScriptConfigs(&configs, &group_infos, raw_name, pkg.scripts, ctx.allocator, pkg.dirpath, pkg.PATH, pkg.name);
|
||||
} else if (ctx.workspaces and !ctx.if_present) {
|
||||
Output.prettyErrorln("<r><red>error<r>: Missing \"{s}\" script in package \"{s}\"", .{ raw_name, pkg.name });
|
||||
Global.exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (configs.items.len == 0) {
|
||||
if (ctx.if_present) {
|
||||
Global.exit(0);
|
||||
}
|
||||
if (ctx.workspaces) {
|
||||
Output.prettyErrorln("<r><red>error<r>: No workspace packages have matching scripts", .{});
|
||||
} else {
|
||||
Output.prettyErrorln("<r><red>error<r>: No packages matched the filter", .{});
|
||||
}
|
||||
Global.exit(1);
|
||||
}
|
||||
} else {
|
||||
// Single-package mode: use the root package.json
|
||||
const PATH = try RunCommand.configurePathForRunWithPackageJsonDir(ctx, "", &this_transpiler, null, cwd, ctx.debug.run_in_bun);
|
||||
|
||||
// Load package.json scripts
|
||||
const root_dir_info = this_transpiler.resolver.readDirInfo(cwd) catch {
|
||||
Output.prettyErrorln("<r><red>error<r>: Failed to read directory", .{});
|
||||
Global.exit(1);
|
||||
} orelse {
|
||||
Output.prettyErrorln("<r><red>error<r>: Failed to read directory", .{});
|
||||
Global.exit(1);
|
||||
};
|
||||
|
||||
const package_json = root_dir_info.enclosing_package_json;
|
||||
const scripts_map: ?*const bun.StringArrayHashMap([]const u8) = if (package_json) |pkg| pkg.scripts else null;
|
||||
|
||||
for (script_names.items) |raw_name| {
|
||||
// Check if this is a glob pattern
|
||||
if (std.mem.indexOfScalar(u8, raw_name, '*') != null) {
|
||||
if (scripts_map) |sm| {
|
||||
// Collect matching script names
|
||||
var matches = std.array_list.Managed([]const u8).init(ctx.allocator);
|
||||
for (sm.keys()) |key| {
|
||||
if (matchesGlob(raw_name, key)) {
|
||||
try matches.append(key);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort alphabetically
|
||||
std.mem.sort([]const u8, matches.items, {}, struct {
|
||||
fn lessThan(_: void, a: []const u8, b: []const u8) bool {
|
||||
return std.mem.order(u8, a, b) == .lt;
|
||||
}
|
||||
}.lessThan);
|
||||
|
||||
if (matches.items.len == 0) {
|
||||
Output.prettyErrorln("<r><red>error<r>: No scripts match pattern \"{s}\"", .{raw_name});
|
||||
Global.exit(1);
|
||||
}
|
||||
|
||||
for (matches.items) |matched_name| {
|
||||
try addScriptConfigs(&configs, &group_infos, matched_name, scripts_map, ctx.allocator, cwd, PATH, null);
|
||||
}
|
||||
} else {
|
||||
Output.prettyErrorln("<r><red>error<r>: Cannot use glob pattern \"{s}\" without package.json scripts", .{raw_name});
|
||||
Global.exit(1);
|
||||
}
|
||||
} else {
|
||||
try addScriptConfigs(&configs, &group_infos, raw_name, scripts_map, ctx.allocator, cwd, PATH, null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (configs.items.len == 0) {
|
||||
Output.prettyErrorln("<r><red>error<r>: No scripts to run", .{});
|
||||
Global.exit(1);
|
||||
}
|
||||
|
||||
// Compute max label width
|
||||
var max_label_len: usize = 0;
|
||||
for (configs.items) |*config| {
|
||||
if (config.label.len > max_label_len) {
|
||||
max_label_len = config.label.len;
|
||||
}
|
||||
}
|
||||
|
||||
const use_colors = Output.enable_ansi_colors_stderr;
|
||||
|
||||
var state = State{
|
||||
.handles = try ctx.allocator.alloc(ProcessHandle, configs.items.len),
|
||||
.event_loop = event_loop,
|
||||
.max_label_len = max_label_len,
|
||||
.shell_bin = shell_bin,
|
||||
.no_exit_on_error = ctx.no_exit_on_error,
|
||||
.env = this_transpiler.env,
|
||||
.use_colors = use_colors,
|
||||
};
|
||||
|
||||
// Initialize handles
|
||||
for (configs.items, 0..) |*config, i| {
|
||||
// Find which group this belongs to, for color assignment
|
||||
var color_idx: usize = 0;
|
||||
for (group_infos.items, 0..) |group, gi| {
|
||||
if (i >= group.start and i < group.start + group.count) {
|
||||
color_idx = gi;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
state.handles[i] = ProcessHandle{
|
||||
.state = &state,
|
||||
.config = config,
|
||||
.color_idx = color_idx,
|
||||
.options = .{
|
||||
.stdin = .ignore,
|
||||
.stdout = if (Environment.isPosix) .buffer else .{ .buffer = try bun.default_allocator.create(bun.windows.libuv.Pipe) },
|
||||
.stderr = if (Environment.isPosix) .buffer else .{ .buffer = try bun.default_allocator.create(bun.windows.libuv.Pipe) },
|
||||
.cwd = config.cwd,
|
||||
.windows = if (Environment.isWindows) .{ .loop = bun.jsc.EventLoopHandle.init(event_loop) },
|
||||
.stream = true,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Set up pre->main->post chaining within each group
|
||||
for (group_infos.items) |group| {
|
||||
if (group.count > 1) {
|
||||
var j: usize = group.start;
|
||||
while (j < group.start + group.count - 1) : (j += 1) {
|
||||
try state.handles[j].group_dependents.append(&state.handles[j + 1]);
|
||||
state.handles[j + 1].remaining_dependencies += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// For sequential mode, chain groups together
|
||||
if (ctx.sequential) {
|
||||
var gi: usize = 0;
|
||||
while (gi < group_infos.items.len - 1) : (gi += 1) {
|
||||
const current_group = group_infos.items[gi];
|
||||
const next_group = group_infos.items[gi + 1];
|
||||
// Last handle of current group -> first handle of next group
|
||||
const last_in_current = current_group.start + current_group.count - 1;
|
||||
const first_in_next = next_group.start;
|
||||
try state.handles[last_in_current].next_dependents.append(&state.handles[first_in_next]);
|
||||
state.handles[first_in_next].remaining_dependencies += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// Start handles with no dependencies
|
||||
for (state.handles) |*handle| {
|
||||
if (handle.remaining_dependencies == 0) {
|
||||
handle.start() catch {
|
||||
Output.prettyErrorln("<r><red>error<r>: Failed to start process", .{});
|
||||
Global.exit(1);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
AbortHandler.install();
|
||||
|
||||
while (!state.isDone()) {
|
||||
if (AbortHandler.should_abort and !state.aborted) {
|
||||
AbortHandler.uninstall();
|
||||
state.abort();
|
||||
}
|
||||
event_loop.tickOnce(&state);
|
||||
}
|
||||
|
||||
const status = state.finalize();
|
||||
Global.exit(status);
|
||||
}
|
||||
|
||||
fn hasRunnableExtension(name: []const u8) bool {
|
||||
const ext = std.fs.path.extension(name);
|
||||
const loader = bun.options.defaultLoaders.get(ext) orelse return false;
|
||||
return loader.canBeRunByBun();
|
||||
}
|
||||
|
||||
const FilterArg = @import("./filter_arg.zig");
|
||||
const std = @import("std");
|
||||
const RunCommand = @import("./run_command.zig").RunCommand;
|
||||
|
||||
const bun = @import("bun");
|
||||
const Environment = bun.Environment;
|
||||
const Global = bun.Global;
|
||||
const Output = bun.Output;
|
||||
const strings = bun.strings;
|
||||
const transpiler = bun.transpiler;
|
||||
|
||||
const CLI = bun.cli;
|
||||
const Command = CLI.Command;
|
||||
@@ -1,6 +1,5 @@
|
||||
// import type { Readable, Writable } from "node:stream";
|
||||
// import type { WorkerOptions } from "node:worker_threads";
|
||||
declare const self: typeof globalThis;
|
||||
type WebWorker = InstanceType<typeof globalThis.Worker>;
|
||||
|
||||
const EventEmitter = require("node:events");
|
||||
@@ -25,11 +24,13 @@ const {
|
||||
1: _threadId,
|
||||
2: _receiveMessageOnPort,
|
||||
3: environmentData,
|
||||
4: _parentPort,
|
||||
} = $cpp("Worker.cpp", "createNodeWorkerThreadsBinding") as [
|
||||
unknown,
|
||||
number,
|
||||
(port: unknown) => unknown,
|
||||
Map<unknown, unknown>,
|
||||
MessagePort | null,
|
||||
];
|
||||
|
||||
type NodeWorkerOptions = import("node:worker_threads").WorkerOptions;
|
||||
@@ -127,81 +128,15 @@ function receiveMessageOnPort(port: MessagePort) {
|
||||
};
|
||||
}
|
||||
|
||||
// TODO: parent port emulation is not complete
|
||||
function fakeParentPort() {
|
||||
const fake = Object.create(MessagePort.prototype);
|
||||
Object.defineProperty(fake, "onmessage", {
|
||||
get() {
|
||||
return self.onmessage;
|
||||
},
|
||||
set(value) {
|
||||
self.onmessage = value;
|
||||
},
|
||||
});
|
||||
// For Node workers, parentPort is a real MessagePort (separate from self.onmessage).
|
||||
// Messages sent via worker.postMessage() only trigger parentPort listeners,
|
||||
// not self.onmessage (which is Web Worker behavior).
|
||||
let parentPort: MessagePort | null = isMainThread ? null : _parentPort;
|
||||
|
||||
Object.defineProperty(fake, "onmessageerror", {
|
||||
get() {
|
||||
return self.onmessageerror;
|
||||
},
|
||||
set(value) {
|
||||
self.onmessageerror = value;
|
||||
},
|
||||
});
|
||||
|
||||
const postMessage = $newCppFunction("ZigGlobalObject.cpp", "jsFunctionPostMessage", 1);
|
||||
Object.defineProperty(fake, "postMessage", {
|
||||
value(...args: [any, any]) {
|
||||
return postMessage.$apply(null, args);
|
||||
},
|
||||
});
|
||||
|
||||
Object.defineProperty(fake, "close", {
|
||||
value() {},
|
||||
});
|
||||
|
||||
Object.defineProperty(fake, "start", {
|
||||
value() {},
|
||||
});
|
||||
|
||||
Object.defineProperty(fake, "unref", {
|
||||
value() {},
|
||||
});
|
||||
|
||||
Object.defineProperty(fake, "ref", {
|
||||
value() {},
|
||||
});
|
||||
|
||||
Object.defineProperty(fake, "hasRef", {
|
||||
value() {
|
||||
return false;
|
||||
},
|
||||
});
|
||||
|
||||
Object.defineProperty(fake, "setEncoding", {
|
||||
value() {},
|
||||
});
|
||||
|
||||
Object.defineProperty(fake, "addEventListener", {
|
||||
value: self.addEventListener.bind(self),
|
||||
});
|
||||
|
||||
Object.defineProperty(fake, "removeEventListener", {
|
||||
value: self.removeEventListener.bind(self),
|
||||
});
|
||||
|
||||
Object.defineProperty(fake, "removeListener", {
|
||||
value: self.removeEventListener.bind(self),
|
||||
enumerable: false,
|
||||
});
|
||||
|
||||
Object.defineProperty(fake, "addListener", {
|
||||
value: self.addEventListener.bind(self),
|
||||
enumerable: false,
|
||||
});
|
||||
|
||||
return fake;
|
||||
// Add setEncoding which is a Node.js-specific no-op for stream compatibility.
|
||||
if (parentPort && !("setEncoding" in parentPort)) {
|
||||
Object.defineProperty(parentPort, "setEncoding", { value() {}, enumerable: false });
|
||||
}
|
||||
let parentPort: MessagePort | null = isMainThread ? null : fakeParentPort();
|
||||
|
||||
function getEnvironmentData(key: unknown): unknown {
|
||||
return environmentData.get(key);
|
||||
|
||||
@@ -395,7 +395,6 @@ pub const Options = struct {
|
||||
target: options.Target = .browser,
|
||||
|
||||
runtime_transpiler_cache: ?*bun.jsc.RuntimeTranspilerCache = null,
|
||||
module_info: ?*analyze_transpiled_module.ModuleInfo = null,
|
||||
input_files_for_dev_server: ?[]logger.Source = null,
|
||||
|
||||
commonjs_named_exports: js_ast.Ast.CommonJSNamedExports = .{},
|
||||
@@ -633,44 +632,9 @@ fn NewPrinter(
|
||||
binary_expression_stack: std.array_list.Managed(BinaryExpressionVisitor) = undefined,
|
||||
|
||||
was_lazy_export: bool = false,
|
||||
module_info: if (!may_have_module_info) void else ?*analyze_transpiled_module.ModuleInfo = if (!may_have_module_info) {} else null,
|
||||
|
||||
const Printer = @This();
|
||||
|
||||
const may_have_module_info = is_bun_platform and !rewrite_esm_to_cjs;
|
||||
const TopLevelAndIsExport = if (!may_have_module_info) struct {} else struct {
|
||||
is_export: bool = false,
|
||||
is_top_level: ?analyze_transpiled_module.ModuleInfo.VarKind = null,
|
||||
};
|
||||
const TopLevel = if (!may_have_module_info) struct {
|
||||
pub inline fn init(_: IsTopLevel) @This() {
|
||||
return .{};
|
||||
}
|
||||
pub inline fn subVar(_: @This()) @This() {
|
||||
return .{};
|
||||
}
|
||||
pub inline fn isTopLevel(_: @This()) bool {
|
||||
return false;
|
||||
}
|
||||
} else struct {
|
||||
is_top_level: IsTopLevel = .no,
|
||||
pub inline fn init(is_top_level: IsTopLevel) @This() {
|
||||
return .{ .is_top_level = is_top_level };
|
||||
}
|
||||
pub fn subVar(self: @This()) @This() {
|
||||
if (self.is_top_level == .no) return @This().init(.no);
|
||||
return @This().init(.var_only);
|
||||
}
|
||||
pub inline fn isTopLevel(self: @This()) bool {
|
||||
return self.is_top_level != .no;
|
||||
}
|
||||
};
|
||||
const IsTopLevel = enum { yes, var_only, no };
|
||||
inline fn moduleInfo(self: *const Printer) ?*analyze_transpiled_module.ModuleInfo {
|
||||
if (!may_have_module_info) return null;
|
||||
return self.module_info;
|
||||
}
|
||||
|
||||
/// When Printer is used as a io.Writer, this represents it's error type, aka nothing.
|
||||
pub const Error = error{};
|
||||
|
||||
@@ -1067,25 +1031,6 @@ fn NewPrinter(
|
||||
|
||||
p.printSemicolonAfterStatement();
|
||||
}
|
||||
|
||||
// Record var declarations for module_info. printGlobalBunImportStatement
|
||||
// bypasses printDeclStmt/printBinding, so we must record vars explicitly.
|
||||
if (may_have_module_info) {
|
||||
if (p.moduleInfo()) |mi| {
|
||||
if (import.star_name_loc != null) {
|
||||
const name = p.renamer.nameForSymbol(import.namespace_ref);
|
||||
bun.handleOom(mi.addVar(bun.handleOom(mi.str(name)), .declared));
|
||||
}
|
||||
if (import.default_name) |default| {
|
||||
const name = p.renamer.nameForSymbol(default.ref.?);
|
||||
bun.handleOom(mi.addVar(bun.handleOom(mi.str(name)), .declared));
|
||||
}
|
||||
for (import.items) |item| {
|
||||
const name = p.renamer.nameForSymbol(item.name.ref.?);
|
||||
bun.handleOom(mi.addVar(bun.handleOom(mi.str(name)), .declared));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub inline fn printSpaceBeforeIdentifier(
|
||||
@@ -1128,30 +1073,30 @@ fn NewPrinter(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn printBody(p: *Printer, stmt: Stmt, tlmtlo: TopLevel) void {
|
||||
pub fn printBody(p: *Printer, stmt: Stmt) void {
|
||||
switch (stmt.data) {
|
||||
.s_block => |block| {
|
||||
p.printSpace();
|
||||
p.printBlock(stmt.loc, block.stmts, block.close_brace_loc, tlmtlo);
|
||||
p.printBlock(stmt.loc, block.stmts, block.close_brace_loc);
|
||||
p.printNewline();
|
||||
},
|
||||
else => {
|
||||
p.printNewline();
|
||||
p.indent();
|
||||
p.printStmt(stmt, tlmtlo) catch unreachable;
|
||||
p.printStmt(stmt) catch unreachable;
|
||||
p.unindent();
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn printBlockBody(p: *Printer, stmts: []const Stmt, tlmtlo: TopLevel) void {
|
||||
pub fn printBlockBody(p: *Printer, stmts: []const Stmt) void {
|
||||
for (stmts) |stmt| {
|
||||
p.printSemicolonIfNeeded();
|
||||
p.printStmt(stmt, tlmtlo) catch unreachable;
|
||||
p.printStmt(stmt) catch unreachable;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn printBlock(p: *Printer, loc: logger.Loc, stmts: []const Stmt, close_brace_loc: ?logger.Loc, tlmtlo: TopLevel) void {
|
||||
pub fn printBlock(p: *Printer, loc: logger.Loc, stmts: []const Stmt, close_brace_loc: ?logger.Loc) void {
|
||||
p.addSourceMapping(loc);
|
||||
p.print("{");
|
||||
if (stmts.len > 0) {
|
||||
@@ -1159,7 +1104,7 @@ fn NewPrinter(
|
||||
p.printNewline();
|
||||
|
||||
p.indent();
|
||||
p.printBlockBody(stmts, tlmtlo);
|
||||
p.printBlockBody(stmts);
|
||||
p.unindent();
|
||||
|
||||
p.printIndent();
|
||||
@@ -1178,8 +1123,8 @@ fn NewPrinter(
|
||||
p.printNewline();
|
||||
|
||||
p.indent();
|
||||
p.printBlockBody(prepend, TopLevel.init(.no));
|
||||
p.printBlockBody(stmts, TopLevel.init(.no));
|
||||
p.printBlockBody(prepend);
|
||||
p.printBlockBody(stmts);
|
||||
p.unindent();
|
||||
p.needs_semicolon = false;
|
||||
|
||||
@@ -1187,7 +1132,7 @@ fn NewPrinter(
|
||||
p.print("}");
|
||||
}
|
||||
|
||||
pub fn printDecls(p: *Printer, comptime keyword: string, decls_: []G.Decl, flags: ExprFlag.Set, tlm: TopLevelAndIsExport) void {
|
||||
pub fn printDecls(p: *Printer, comptime keyword: string, decls_: []G.Decl, flags: ExprFlag.Set) void {
|
||||
p.print(keyword);
|
||||
p.printSpace();
|
||||
var decls = decls_;
|
||||
@@ -1295,7 +1240,7 @@ fn NewPrinter(
|
||||
.is_single_line = true,
|
||||
};
|
||||
const binding = Binding.init(&b_object, target_e_dot.target.loc);
|
||||
p.printBinding(binding, tlm);
|
||||
p.printBinding(binding);
|
||||
}
|
||||
|
||||
p.printWhitespacer(ws(" = "));
|
||||
@@ -1311,7 +1256,7 @@ fn NewPrinter(
|
||||
}
|
||||
|
||||
{
|
||||
p.printBinding(decls[0].binding, tlm);
|
||||
p.printBinding(decls[0].binding);
|
||||
|
||||
if (decls[0].value) |value| {
|
||||
p.printWhitespacer(ws(" = "));
|
||||
@@ -1323,7 +1268,7 @@ fn NewPrinter(
|
||||
p.print(",");
|
||||
p.printSpace();
|
||||
|
||||
p.printBinding(decl.binding, tlm);
|
||||
p.printBinding(decl.binding);
|
||||
|
||||
if (decl.value) |value| {
|
||||
p.printWhitespacer(ws(" = "));
|
||||
@@ -1397,7 +1342,7 @@ fn NewPrinter(
|
||||
p.print("...");
|
||||
}
|
||||
|
||||
p.printBinding(arg.binding, .{});
|
||||
p.printBinding(arg.binding);
|
||||
|
||||
if (arg.default) |default| {
|
||||
p.printWhitespacer(ws(" = "));
|
||||
@@ -1413,7 +1358,7 @@ fn NewPrinter(
|
||||
pub fn printFunc(p: *Printer, func: G.Fn) void {
|
||||
p.printFnArgs(func.open_parens_loc, func.args, func.flags.contains(.has_rest_arg), false);
|
||||
p.printSpace();
|
||||
p.printBlock(func.body.loc, func.body.stmts, null, TopLevel.init(.no));
|
||||
p.printBlock(func.body.loc, func.body.stmts, null);
|
||||
}
|
||||
|
||||
pub fn printClass(p: *Printer, class: G.Class) void {
|
||||
@@ -1437,7 +1382,7 @@ fn NewPrinter(
|
||||
if (item.kind == .class_static_block) {
|
||||
p.print("static");
|
||||
p.printSpace();
|
||||
p.printBlock(item.class_static_block.?.loc, item.class_static_block.?.stmts.slice(), null, TopLevel.init(.no));
|
||||
p.printBlock(item.class_static_block.?.loc, item.class_static_block.?.stmts.slice(), null);
|
||||
p.printNewline();
|
||||
continue;
|
||||
}
|
||||
@@ -2064,7 +2009,6 @@ fn NewPrinter(
|
||||
p.print(".importMeta");
|
||||
} else if (!p.options.import_meta_ref.isValid()) {
|
||||
// Most of the time, leave it in there
|
||||
if (p.moduleInfo()) |mi| mi.flags.contains_import_meta = true;
|
||||
p.print("import.meta");
|
||||
} else {
|
||||
// Note: The bundler will not hit this code path. The bundler will replace
|
||||
@@ -2090,7 +2034,6 @@ fn NewPrinter(
|
||||
p.printSpaceBeforeIdentifier();
|
||||
p.addSourceMapping(expr.loc);
|
||||
}
|
||||
if (p.moduleInfo()) |mi| mi.flags.contains_import_meta = true;
|
||||
p.print("import.meta.main");
|
||||
} else {
|
||||
bun.debugAssert(p.options.module_type != .internal_bake_dev);
|
||||
@@ -2596,7 +2539,7 @@ fn NewPrinter(
|
||||
}
|
||||
|
||||
if (!wasPrinted) {
|
||||
p.printBlock(e.body.loc, e.body.stmts, null, TopLevel.init(.no));
|
||||
p.printBlock(e.body.loc, e.body.stmts, null);
|
||||
}
|
||||
|
||||
if (wrap) {
|
||||
@@ -3582,21 +3525,13 @@ fn NewPrinter(
|
||||
p.printExpr(initial, .comma, ExprFlag.None());
|
||||
}
|
||||
|
||||
pub fn printBinding(p: *Printer, binding: Binding, tlm: TopLevelAndIsExport) void {
|
||||
pub fn printBinding(p: *Printer, binding: Binding) void {
|
||||
switch (binding.data) {
|
||||
.b_missing => {},
|
||||
.b_identifier => |b| {
|
||||
p.printSpaceBeforeIdentifier();
|
||||
p.addSourceMapping(binding.loc);
|
||||
p.printSymbol(b.ref);
|
||||
if (may_have_module_info) {
|
||||
if (p.moduleInfo()) |mi| {
|
||||
const local_name = p.renamer.nameForSymbol(b.ref);
|
||||
const name_id = bun.handleOom(mi.str(local_name));
|
||||
if (tlm.is_top_level) |vk| bun.handleOom(mi.addVar(name_id, vk));
|
||||
if (tlm.is_export) bun.handleOom(mi.addExportInfoLocal(name_id, name_id));
|
||||
}
|
||||
}
|
||||
},
|
||||
.b_array => |b| {
|
||||
p.print("[");
|
||||
@@ -3623,7 +3558,7 @@ fn NewPrinter(
|
||||
p.print("...");
|
||||
}
|
||||
|
||||
p.printBinding(item.binding, tlm);
|
||||
p.printBinding(item.binding);
|
||||
|
||||
p.maybePrintDefaultBindingValue(item);
|
||||
|
||||
@@ -3670,7 +3605,7 @@ fn NewPrinter(
|
||||
p.print("]:");
|
||||
p.printSpace();
|
||||
|
||||
p.printBinding(property.value, tlm);
|
||||
p.printBinding(property.value);
|
||||
p.maybePrintDefaultBindingValue(property);
|
||||
continue;
|
||||
}
|
||||
@@ -3695,13 +3630,6 @@ fn NewPrinter(
|
||||
switch (property.value.data) {
|
||||
.b_identifier => |id| {
|
||||
if (str.eql(string, p.renamer.nameForSymbol(id.ref))) {
|
||||
if (may_have_module_info) {
|
||||
if (p.moduleInfo()) |mi| {
|
||||
const name_id = bun.handleOom(mi.str(str.data));
|
||||
if (tlm.is_top_level) |vk| bun.handleOom(mi.addVar(name_id, vk));
|
||||
if (tlm.is_export) bun.handleOom(mi.addExportInfoLocal(name_id, name_id));
|
||||
}
|
||||
}
|
||||
p.maybePrintDefaultBindingValue(property);
|
||||
continue;
|
||||
}
|
||||
@@ -3719,14 +3647,6 @@ fn NewPrinter(
|
||||
switch (property.value.data) {
|
||||
.b_identifier => |id| {
|
||||
if (strings.utf16EqlString(str.slice16(), p.renamer.nameForSymbol(id.ref))) {
|
||||
if (may_have_module_info) {
|
||||
if (p.moduleInfo()) |mi| {
|
||||
const str8 = str.slice(p.options.allocator);
|
||||
const name_id = bun.handleOom(mi.str(str8));
|
||||
if (tlm.is_top_level) |vk| bun.handleOom(mi.addVar(name_id, vk));
|
||||
if (tlm.is_export) bun.handleOom(mi.addExportInfoLocal(name_id, name_id));
|
||||
}
|
||||
}
|
||||
p.maybePrintDefaultBindingValue(property);
|
||||
continue;
|
||||
}
|
||||
@@ -3746,7 +3666,7 @@ fn NewPrinter(
|
||||
p.printSpace();
|
||||
}
|
||||
|
||||
p.printBinding(property.value, tlm);
|
||||
p.printBinding(property.value);
|
||||
p.maybePrintDefaultBindingValue(property);
|
||||
}
|
||||
|
||||
@@ -3772,7 +3692,7 @@ fn NewPrinter(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn printStmt(p: *Printer, stmt: Stmt, tlmtlo: TopLevel) !void {
|
||||
pub fn printStmt(p: *Printer, stmt: Stmt) !void {
|
||||
const prev_stmt_tag = p.prev_stmt_tag;
|
||||
|
||||
defer {
|
||||
@@ -3809,25 +3729,23 @@ fn NewPrinter(
|
||||
}
|
||||
|
||||
p.addSourceMapping(name.loc);
|
||||
const local_name = p.renamer.nameForSymbol(nameRef);
|
||||
p.printIdentifier(local_name);
|
||||
p.printSymbol(nameRef);
|
||||
p.printFunc(s.func);
|
||||
|
||||
if (may_have_module_info) {
|
||||
if (p.moduleInfo()) |mi| {
|
||||
const name_id = bun.handleOom(mi.str(local_name));
|
||||
// function declarations are lexical (block-scoped in modules);
|
||||
// only record at true top-level, not inside blocks.
|
||||
if (tlmtlo.is_top_level == .yes) bun.handleOom(mi.addVar(name_id, .lexical));
|
||||
if (s.func.flags.contains(.is_export)) bun.handleOom(mi.addExportInfoLocal(name_id, name_id));
|
||||
}
|
||||
}
|
||||
|
||||
// if (rewrite_esm_to_cjs and s.func.flags.contains(.is_export)) {
|
||||
// p.printSemicolonAfterStatement();
|
||||
// p.print("var ");
|
||||
// p.printSymbol(nameRef);
|
||||
// p.@"print = "();
|
||||
// p.printSymbol(nameRef);
|
||||
// p.printSemicolonAfterStatement();
|
||||
// } else {
|
||||
p.printNewline();
|
||||
// }
|
||||
|
||||
if (rewrite_esm_to_cjs and s.func.flags.contains(.is_export)) {
|
||||
p.printIndent();
|
||||
p.printBundledExport(local_name, local_name);
|
||||
p.printBundledExport(p.renamer.nameForSymbol(nameRef), p.renamer.nameForSymbol(nameRef));
|
||||
p.printSemicolonAfterStatement();
|
||||
}
|
||||
},
|
||||
@@ -3849,20 +3767,9 @@ fn NewPrinter(
|
||||
|
||||
p.print("class ");
|
||||
p.addSourceMapping(s.class.class_name.?.loc);
|
||||
const nameStr = p.renamer.nameForSymbol(nameRef);
|
||||
p.printIdentifier(nameStr);
|
||||
p.printSymbol(nameRef);
|
||||
p.printClass(s.class);
|
||||
|
||||
if (may_have_module_info) {
|
||||
if (p.moduleInfo()) |mi| {
|
||||
const name_id = bun.handleOom(mi.str(nameStr));
|
||||
// class declarations are lexical (block-scoped in modules);
|
||||
// only record at true top-level, not inside blocks.
|
||||
if (tlmtlo.is_top_level == .yes) bun.handleOom(mi.addVar(name_id, .lexical));
|
||||
if (s.is_export) bun.handleOom(mi.addExportInfoLocal(name_id, name_id));
|
||||
}
|
||||
}
|
||||
|
||||
if (rewrite_esm_to_cjs and s.is_export) {
|
||||
p.printSemicolonAfterStatement();
|
||||
} else {
|
||||
@@ -3898,13 +3805,6 @@ fn NewPrinter(
|
||||
p.export_default_start = p.writer.written;
|
||||
p.printExpr(expr, .comma, ExprFlag.None());
|
||||
p.printSemicolonAfterStatement();
|
||||
|
||||
if (may_have_module_info) {
|
||||
if (p.moduleInfo()) |mi| {
|
||||
bun.handleOom(mi.addExportInfoLocal(bun.handleOom(mi.str("default")), .star_default));
|
||||
bun.handleOom(mi.addVar(.star_default, .lexical));
|
||||
}
|
||||
}
|
||||
return;
|
||||
},
|
||||
|
||||
@@ -3925,44 +3825,26 @@ fn NewPrinter(
|
||||
p.maybePrintSpace();
|
||||
}
|
||||
|
||||
const func_name: ?[]const u8 = if (func.func.name) |name| p.renamer.nameForSymbol(name.ref.?) else null;
|
||||
if (func_name) |fn_name| {
|
||||
p.printIdentifier(fn_name);
|
||||
if (func.func.name) |name| {
|
||||
p.printSymbol(name.ref.?);
|
||||
}
|
||||
|
||||
p.printFunc(func.func);
|
||||
|
||||
if (may_have_module_info) {
|
||||
if (p.moduleInfo()) |mi| {
|
||||
const local_name: analyze_transpiled_module.StringID = if (func_name) |f| bun.handleOom(mi.str(f)) else .star_default;
|
||||
bun.handleOom(mi.addExportInfoLocal(bun.handleOom(mi.str("default")), local_name));
|
||||
bun.handleOom(mi.addVar(local_name, .lexical));
|
||||
}
|
||||
}
|
||||
|
||||
p.printNewline();
|
||||
},
|
||||
.s_class => |class| {
|
||||
p.printSpaceBeforeIdentifier();
|
||||
|
||||
const class_name: ?[]const u8 = if (class.class.class_name) |name| p.renamer.nameForSymbol(name.ref orelse Output.panic("Internal error: Expected class to have a name ref", .{})) else null;
|
||||
if (class.class.class_name) |name| {
|
||||
p.print("class ");
|
||||
p.printIdentifier(p.renamer.nameForSymbol(name.ref.?));
|
||||
p.printSymbol(name.ref orelse Output.panic("Internal error: Expected class to have a name ref", .{}));
|
||||
} else {
|
||||
p.print("class");
|
||||
}
|
||||
|
||||
p.printClass(class.class);
|
||||
|
||||
if (may_have_module_info) {
|
||||
if (p.moduleInfo()) |mi| {
|
||||
const local_name: analyze_transpiled_module.StringID = if (class_name) |f| bun.handleOom(mi.str(f)) else .star_default;
|
||||
bun.handleOom(mi.addExportInfoLocal(bun.handleOom(mi.str("default")), local_name));
|
||||
bun.handleOom(mi.addVar(local_name, .lexical));
|
||||
}
|
||||
}
|
||||
|
||||
p.printNewline();
|
||||
},
|
||||
else => {
|
||||
@@ -3993,21 +3875,8 @@ fn NewPrinter(
|
||||
p.printWhitespacer(ws("from "));
|
||||
}
|
||||
|
||||
const irp = p.importRecord(s.import_record_index).path.text;
|
||||
p.printImportRecordPath(p.importRecord(s.import_record_index));
|
||||
p.printSemicolonAfterStatement();
|
||||
|
||||
if (may_have_module_info) {
|
||||
if (p.moduleInfo()) |mi| {
|
||||
const irp_id = bun.handleOom(mi.str(irp));
|
||||
bun.handleOom(mi.requestModule(irp_id, .none));
|
||||
if (s.alias) |alias| {
|
||||
bun.handleOom(mi.addExportInfoNamespace(bun.handleOom(mi.str(alias.original_name)), irp_id));
|
||||
} else {
|
||||
bun.handleOom(mi.addExportInfoStar(irp_id));
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
.s_export_clause => |s| {
|
||||
if (rewrite_esm_to_cjs) {
|
||||
@@ -4157,14 +4026,7 @@ fn NewPrinter(
|
||||
p.printIndent();
|
||||
}
|
||||
|
||||
const name = p.renamer.nameForSymbol(item.name.ref.?);
|
||||
p.printExportClauseItem(item);
|
||||
|
||||
if (may_have_module_info) {
|
||||
if (p.moduleInfo()) |mi| {
|
||||
bun.handleOom(mi.addExportInfoLocal(bun.handleOom(mi.str(item.alias)), bun.handleOom(mi.str(name))));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!s.is_single_line) {
|
||||
@@ -4217,20 +4079,8 @@ fn NewPrinter(
|
||||
}
|
||||
|
||||
p.printWhitespacer(ws("} from "));
|
||||
const irp = import_record.path.text;
|
||||
p.printImportRecordPath(import_record);
|
||||
p.printSemicolonAfterStatement();
|
||||
|
||||
if (may_have_module_info) {
|
||||
if (p.moduleInfo()) |mi| {
|
||||
const irp_id = bun.handleOom(mi.str(irp));
|
||||
bun.handleOom(mi.requestModule(irp_id, .none));
|
||||
for (s.items) |item| {
|
||||
const name = p.renamer.nameForSymbol(item.name.ref.?);
|
||||
bun.handleOom(mi.addExportInfoIndirect(bun.handleOom(mi.str(item.alias)), bun.handleOom(mi.str(name)), irp_id));
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
.s_local => |s| {
|
||||
p.printIndent();
|
||||
@@ -4238,42 +4088,41 @@ fn NewPrinter(
|
||||
p.addSourceMapping(stmt.loc);
|
||||
switch (s.kind) {
|
||||
.k_const => {
|
||||
p.printDeclStmt(s.is_export, "const", s.decls.slice(), tlmtlo);
|
||||
p.printDeclStmt(s.is_export, "const", s.decls.slice());
|
||||
},
|
||||
.k_let => {
|
||||
p.printDeclStmt(s.is_export, "let", s.decls.slice(), tlmtlo);
|
||||
p.printDeclStmt(s.is_export, "let", s.decls.slice());
|
||||
},
|
||||
.k_var => {
|
||||
p.printDeclStmt(s.is_export, "var", s.decls.slice(), tlmtlo);
|
||||
p.printDeclStmt(s.is_export, "var", s.decls.slice());
|
||||
},
|
||||
.k_using => {
|
||||
p.printDeclStmt(s.is_export, "using", s.decls.slice(), tlmtlo);
|
||||
p.printDeclStmt(s.is_export, "using", s.decls.slice());
|
||||
},
|
||||
.k_await_using => {
|
||||
p.printDeclStmt(s.is_export, "await using", s.decls.slice(), tlmtlo);
|
||||
p.printDeclStmt(s.is_export, "await using", s.decls.slice());
|
||||
},
|
||||
}
|
||||
},
|
||||
.s_if => |s| {
|
||||
p.printIndent();
|
||||
p.printIf(s, stmt.loc, tlmtlo.subVar());
|
||||
p.printIf(s, stmt.loc);
|
||||
},
|
||||
.s_do_while => |s| {
|
||||
p.printIndent();
|
||||
p.printSpaceBeforeIdentifier();
|
||||
p.addSourceMapping(stmt.loc);
|
||||
p.print("do");
|
||||
const sub_var = tlmtlo.subVar();
|
||||
switch (s.body.data) {
|
||||
.s_block => {
|
||||
p.printSpace();
|
||||
p.printBlock(s.body.loc, s.body.data.s_block.stmts, s.body.data.s_block.close_brace_loc, sub_var);
|
||||
p.printBlock(s.body.loc, s.body.data.s_block.stmts, s.body.data.s_block.close_brace_loc);
|
||||
p.printSpace();
|
||||
},
|
||||
else => {
|
||||
p.printNewline();
|
||||
p.indent();
|
||||
p.printStmt(s.body, sub_var) catch unreachable;
|
||||
p.printStmt(s.body) catch unreachable;
|
||||
p.printSemicolonIfNeeded();
|
||||
p.unindent();
|
||||
p.printIndent();
|
||||
@@ -4301,7 +4150,7 @@ fn NewPrinter(
|
||||
p.printSpace();
|
||||
p.printExpr(s.value, .lowest, ExprFlag.None());
|
||||
p.print(")");
|
||||
p.printBody(s.body, tlmtlo.subVar());
|
||||
p.printBody(s.body);
|
||||
},
|
||||
.s_for_of => |s| {
|
||||
p.printIndent();
|
||||
@@ -4321,7 +4170,7 @@ fn NewPrinter(
|
||||
p.printSpace();
|
||||
p.printExpr(s.value, .comma, ExprFlag.None());
|
||||
p.print(")");
|
||||
p.printBody(s.body, tlmtlo.subVar());
|
||||
p.printBody(s.body);
|
||||
},
|
||||
.s_while => |s| {
|
||||
p.printIndent();
|
||||
@@ -4332,7 +4181,7 @@ fn NewPrinter(
|
||||
p.print("(");
|
||||
p.printExpr(s.test_, .lowest, ExprFlag.None());
|
||||
p.print(")");
|
||||
p.printBody(s.body, tlmtlo.subVar());
|
||||
p.printBody(s.body);
|
||||
},
|
||||
.s_with => |s| {
|
||||
p.printIndent();
|
||||
@@ -4343,7 +4192,7 @@ fn NewPrinter(
|
||||
p.print("(");
|
||||
p.printExpr(s.value, .lowest, ExprFlag.None());
|
||||
p.print(")");
|
||||
p.printBody(s.body, tlmtlo.subVar());
|
||||
p.printBody(s.body);
|
||||
},
|
||||
.s_label => |s| {
|
||||
if (!p.options.minify_whitespace and p.options.indent.count > 0) {
|
||||
@@ -4353,7 +4202,7 @@ fn NewPrinter(
|
||||
p.addSourceMapping(stmt.loc);
|
||||
p.printSymbol(s.name.ref orelse Output.panic("Internal error: expected label to have a name", .{}));
|
||||
p.print(":");
|
||||
p.printBody(s.stmt, tlmtlo.subVar());
|
||||
p.printBody(s.stmt);
|
||||
},
|
||||
.s_try => |s| {
|
||||
p.printIndent();
|
||||
@@ -4361,8 +4210,7 @@ fn NewPrinter(
|
||||
p.addSourceMapping(stmt.loc);
|
||||
p.print("try");
|
||||
p.printSpace();
|
||||
const sub_var_try = tlmtlo.subVar();
|
||||
p.printBlock(s.body_loc, s.body, null, sub_var_try);
|
||||
p.printBlock(s.body_loc, s.body, null);
|
||||
|
||||
if (s.catch_) |catch_| {
|
||||
p.printSpace();
|
||||
@@ -4371,18 +4219,18 @@ fn NewPrinter(
|
||||
if (catch_.binding) |binding| {
|
||||
p.printSpace();
|
||||
p.print("(");
|
||||
p.printBinding(binding, .{});
|
||||
p.printBinding(binding);
|
||||
p.print(")");
|
||||
}
|
||||
p.printSpace();
|
||||
p.printBlock(catch_.body_loc, catch_.body, null, sub_var_try);
|
||||
p.printBlock(catch_.body_loc, catch_.body, null);
|
||||
}
|
||||
|
||||
if (s.finally) |finally| {
|
||||
p.printSpace();
|
||||
p.print("finally");
|
||||
p.printSpace();
|
||||
p.printBlock(finally.loc, finally.stmts, null, sub_var_try);
|
||||
p.printBlock(finally.loc, finally.stmts, null);
|
||||
}
|
||||
|
||||
p.printNewline();
|
||||
@@ -4413,7 +4261,7 @@ fn NewPrinter(
|
||||
}
|
||||
|
||||
p.print(")");
|
||||
p.printBody(s.body, tlmtlo.subVar());
|
||||
p.printBody(s.body);
|
||||
},
|
||||
.s_switch => |s| {
|
||||
p.printIndent();
|
||||
@@ -4445,12 +4293,11 @@ fn NewPrinter(
|
||||
|
||||
p.print(":");
|
||||
|
||||
const sub_var_case = tlmtlo.subVar();
|
||||
if (c.body.len == 1) {
|
||||
switch (c.body[0].data) {
|
||||
.s_block => {
|
||||
p.printSpace();
|
||||
p.printBlock(c.body[0].loc, c.body[0].data.s_block.stmts, c.body[0].data.s_block.close_brace_loc, sub_var_case);
|
||||
p.printBlock(c.body[0].loc, c.body[0].data.s_block.stmts, c.body[0].data.s_block.close_brace_loc);
|
||||
p.printNewline();
|
||||
continue;
|
||||
},
|
||||
@@ -4462,7 +4309,7 @@ fn NewPrinter(
|
||||
p.indent();
|
||||
for (c.body) |st| {
|
||||
p.printSemicolonIfNeeded();
|
||||
p.printStmt(st, sub_var_case) catch unreachable;
|
||||
p.printStmt(st) catch unreachable;
|
||||
}
|
||||
p.unindent();
|
||||
}
|
||||
@@ -4647,68 +4494,16 @@ fn NewPrinter(
|
||||
.dataurl => p.printWhitespacer(ws(" with { type: \"dataurl\" }")),
|
||||
.text => p.printWhitespacer(ws(" with { type: \"text\" }")),
|
||||
.bunsh => p.printWhitespacer(ws(" with { type: \"sh\" }")),
|
||||
// sqlite_embedded only relevant when bundling
|
||||
.sqlite, .sqlite_embedded => p.printWhitespacer(ws(" with { type: \"sqlite\" }")),
|
||||
.html => p.printWhitespacer(ws(" with { type: \"html\" }")),
|
||||
.md => p.printWhitespacer(ws(" with { type: \"md\" }")),
|
||||
};
|
||||
p.printSemicolonAfterStatement();
|
||||
|
||||
if (may_have_module_info) {
|
||||
if (p.moduleInfo()) |mi| {
|
||||
const import_record_path = record.path.text;
|
||||
const irp_id = bun.handleOom(mi.str(import_record_path));
|
||||
const fetch_parameters: analyze_transpiled_module.ModuleInfo.FetchParameters = if (comptime is_bun_platform) (if (record.loader) |loader| switch (loader) {
|
||||
.json => .json,
|
||||
.jsx => analyze_transpiled_module.ModuleInfo.FetchParameters.hostDefined(bun.handleOom(mi.str("jsx"))),
|
||||
.js => analyze_transpiled_module.ModuleInfo.FetchParameters.hostDefined(bun.handleOom(mi.str("js"))),
|
||||
.ts => analyze_transpiled_module.ModuleInfo.FetchParameters.hostDefined(bun.handleOom(mi.str("ts"))),
|
||||
.tsx => analyze_transpiled_module.ModuleInfo.FetchParameters.hostDefined(bun.handleOom(mi.str("tsx"))),
|
||||
.css => analyze_transpiled_module.ModuleInfo.FetchParameters.hostDefined(bun.handleOom(mi.str("css"))),
|
||||
.file => analyze_transpiled_module.ModuleInfo.FetchParameters.hostDefined(bun.handleOom(mi.str("file"))),
|
||||
.jsonc => analyze_transpiled_module.ModuleInfo.FetchParameters.hostDefined(bun.handleOom(mi.str("jsonc"))),
|
||||
.toml => analyze_transpiled_module.ModuleInfo.FetchParameters.hostDefined(bun.handleOom(mi.str("toml"))),
|
||||
.yaml => analyze_transpiled_module.ModuleInfo.FetchParameters.hostDefined(bun.handleOom(mi.str("yaml"))),
|
||||
.wasm => analyze_transpiled_module.ModuleInfo.FetchParameters.hostDefined(bun.handleOom(mi.str("wasm"))),
|
||||
.napi => analyze_transpiled_module.ModuleInfo.FetchParameters.hostDefined(bun.handleOom(mi.str("napi"))),
|
||||
.base64 => analyze_transpiled_module.ModuleInfo.FetchParameters.hostDefined(bun.handleOom(mi.str("base64"))),
|
||||
.dataurl => analyze_transpiled_module.ModuleInfo.FetchParameters.hostDefined(bun.handleOom(mi.str("dataurl"))),
|
||||
.text => analyze_transpiled_module.ModuleInfo.FetchParameters.hostDefined(bun.handleOom(mi.str("text"))),
|
||||
.bunsh => analyze_transpiled_module.ModuleInfo.FetchParameters.hostDefined(bun.handleOom(mi.str("sh"))),
|
||||
.sqlite, .sqlite_embedded => analyze_transpiled_module.ModuleInfo.FetchParameters.hostDefined(bun.handleOom(mi.str("sqlite"))),
|
||||
.html => analyze_transpiled_module.ModuleInfo.FetchParameters.hostDefined(bun.handleOom(mi.str("html"))),
|
||||
.json5 => analyze_transpiled_module.ModuleInfo.FetchParameters.hostDefined(bun.handleOom(mi.str("json5"))),
|
||||
.md => analyze_transpiled_module.ModuleInfo.FetchParameters.hostDefined(bun.handleOom(mi.str("md"))),
|
||||
} else .none) else .none;
|
||||
bun.handleOom(mi.requestModule(irp_id, fetch_parameters));
|
||||
|
||||
if (s.default_name) |name| {
|
||||
const local_name = p.renamer.nameForSymbol(name.ref.?);
|
||||
const local_name_id = bun.handleOom(mi.str(local_name));
|
||||
bun.handleOom(mi.addVar(local_name_id, .lexical));
|
||||
bun.handleOom(mi.addImportInfoSingle(irp_id, bun.handleOom(mi.str("default")), local_name_id, false));
|
||||
}
|
||||
|
||||
for (s.items) |item| {
|
||||
const local_name = p.renamer.nameForSymbol(item.name.ref.?);
|
||||
const local_name_id = bun.handleOom(mi.str(local_name));
|
||||
bun.handleOom(mi.addVar(local_name_id, .lexical));
|
||||
// In bundled output, all surviving imports are value imports
|
||||
// (tree-shaking already removed type-only ones). The finalize()
|
||||
// step handles re-export type-script conversion separately.
|
||||
bun.handleOom(mi.addImportInfoSingle(irp_id, bun.handleOom(mi.str(item.alias)), local_name_id, false));
|
||||
}
|
||||
|
||||
if (record.flags.contains_import_star) {
|
||||
const local_name = p.renamer.nameForSymbol(s.namespace_ref);
|
||||
bun.handleOom(mi.addVar(bun.handleOom(mi.str(local_name)), .lexical));
|
||||
bun.handleOom(mi.addImportInfoNamespace(irp_id, bun.handleOom(mi.str(local_name))));
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
.s_block => |s| {
|
||||
p.printIndent();
|
||||
p.printBlock(stmt.loc, s.stmts, s.close_brace_loc, tlmtlo.subVar());
|
||||
p.printBlock(stmt.loc, s.stmts, s.close_brace_loc);
|
||||
p.printNewline();
|
||||
},
|
||||
.s_debugger => {
|
||||
@@ -4984,19 +4779,19 @@ fn NewPrinter(
|
||||
.s_local => |s| {
|
||||
switch (s.kind) {
|
||||
.k_var => {
|
||||
p.printDecls("var", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }), .{});
|
||||
p.printDecls("var", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }));
|
||||
},
|
||||
.k_let => {
|
||||
p.printDecls("let", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }), .{});
|
||||
p.printDecls("let", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }));
|
||||
},
|
||||
.k_const => {
|
||||
p.printDecls("const", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }), .{});
|
||||
p.printDecls("const", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }));
|
||||
},
|
||||
.k_using => {
|
||||
p.printDecls("using", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }), .{});
|
||||
p.printDecls("using", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }));
|
||||
},
|
||||
.k_await_using => {
|
||||
p.printDecls("await using", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }), .{});
|
||||
p.printDecls("await using", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }));
|
||||
},
|
||||
}
|
||||
},
|
||||
@@ -5007,7 +4802,7 @@ fn NewPrinter(
|
||||
},
|
||||
}
|
||||
}
|
||||
pub fn printIf(p: *Printer, s: *const S.If, loc: logger.Loc, tlmtlo: TopLevel) void {
|
||||
pub fn printIf(p: *Printer, s: *const S.If, loc: logger.Loc) void {
|
||||
p.printSpaceBeforeIdentifier();
|
||||
p.addSourceMapping(loc);
|
||||
p.print("if");
|
||||
@@ -5019,7 +4814,7 @@ fn NewPrinter(
|
||||
switch (s.yes.data) {
|
||||
.s_block => |block| {
|
||||
p.printSpace();
|
||||
p.printBlock(s.yes.loc, block.stmts, block.close_brace_loc, tlmtlo);
|
||||
p.printBlock(s.yes.loc, block.stmts, block.close_brace_loc);
|
||||
|
||||
if (s.no != null) {
|
||||
p.printSpace();
|
||||
@@ -5034,7 +4829,7 @@ fn NewPrinter(
|
||||
p.printNewline();
|
||||
|
||||
p.indent();
|
||||
p.printStmt(s.yes, tlmtlo) catch unreachable;
|
||||
p.printStmt(s.yes) catch unreachable;
|
||||
p.unindent();
|
||||
p.needs_semicolon = false;
|
||||
|
||||
@@ -5049,7 +4844,7 @@ fn NewPrinter(
|
||||
} else {
|
||||
p.printNewline();
|
||||
p.indent();
|
||||
p.printStmt(s.yes, tlmtlo) catch unreachable;
|
||||
p.printStmt(s.yes) catch unreachable;
|
||||
p.unindent();
|
||||
|
||||
if (s.no != null) {
|
||||
@@ -5068,16 +4863,16 @@ fn NewPrinter(
|
||||
switch (no_block.data) {
|
||||
.s_block => {
|
||||
p.printSpace();
|
||||
p.printBlock(no_block.loc, no_block.data.s_block.stmts, null, tlmtlo);
|
||||
p.printBlock(no_block.loc, no_block.data.s_block.stmts, null);
|
||||
p.printNewline();
|
||||
},
|
||||
.s_if => {
|
||||
p.printIf(no_block.data.s_if, no_block.loc, tlmtlo);
|
||||
p.printIf(no_block.data.s_if, no_block.loc);
|
||||
},
|
||||
else => {
|
||||
p.printNewline();
|
||||
p.indent();
|
||||
p.printStmt(no_block, tlmtlo) catch unreachable;
|
||||
p.printStmt(no_block) catch unreachable;
|
||||
p.unindent();
|
||||
},
|
||||
}
|
||||
@@ -5158,20 +4953,11 @@ fn NewPrinter(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn printDeclStmt(p: *Printer, is_export: bool, comptime keyword: string, decls: []G.Decl, tlmtlo: TopLevel) void {
|
||||
pub fn printDeclStmt(p: *Printer, is_export: bool, comptime keyword: string, decls: []G.Decl) void {
|
||||
if (!rewrite_esm_to_cjs and is_export) {
|
||||
p.print("export ");
|
||||
}
|
||||
const tlm: TopLevelAndIsExport = if (may_have_module_info) .{
|
||||
.is_export = is_export,
|
||||
.is_top_level = if (comptime strings.eqlComptime(keyword, "var"))
|
||||
(if (tlmtlo.isTopLevel()) .declared else null)
|
||||
else
|
||||
// let/const are block-scoped: only record at true top-level,
|
||||
// not inside blocks where subVar() downgrades to .var_only.
|
||||
(if (tlmtlo.is_top_level == .yes) .lexical else null),
|
||||
} else .{};
|
||||
p.printDecls(keyword, decls, ExprFlag.None(), tlm);
|
||||
p.printDecls(keyword, decls, ExprFlag.None());
|
||||
p.printSemicolonAfterStatement();
|
||||
if (rewrite_esm_to_cjs and is_export and decls.len > 0) {
|
||||
for (decls) |decl| {
|
||||
@@ -5216,7 +5002,7 @@ fn NewPrinter(
|
||||
p.print("}");
|
||||
},
|
||||
else => {
|
||||
p.printBinding(decl.binding, .{});
|
||||
p.printBinding(decl.binding);
|
||||
},
|
||||
}
|
||||
p.print(")");
|
||||
@@ -5549,7 +5335,7 @@ fn NewPrinter(
|
||||
p.printFnArgs(func.open_parens_loc, func.args, func.flags.contains(.has_rest_arg), false);
|
||||
p.print(" => {\n");
|
||||
p.indent();
|
||||
p.printBlockBody(func.body.stmts, TopLevel.init(.no));
|
||||
p.printBlockBody(func.body.stmts);
|
||||
p.unindent();
|
||||
p.printIndent();
|
||||
p.print("}, ");
|
||||
@@ -6006,9 +5792,6 @@ pub fn printAst(
|
||||
}
|
||||
}
|
||||
printer.was_lazy_export = tree.has_lazy_export;
|
||||
if (PrinterType.may_have_module_info) {
|
||||
printer.module_info = opts.module_info;
|
||||
}
|
||||
var bin_stack_heap = std.heap.stackFallback(1024, bun.default_allocator);
|
||||
printer.binary_expression_stack = std.array_list.Managed(PrinterType.BinaryExpressionVisitor).init(bin_stack_heap.get());
|
||||
defer printer.binary_expression_stack.clearAndFree();
|
||||
@@ -6030,18 +5813,11 @@ pub fn printAst(
|
||||
// This is never a symbol collision because `uses_require_ref` means
|
||||
// `require` must be an unbound variable.
|
||||
printer.print("var {require}=import.meta;");
|
||||
|
||||
if (PrinterType.may_have_module_info) {
|
||||
if (printer.moduleInfo()) |mi| {
|
||||
mi.flags.contains_import_meta = true;
|
||||
bun.handleOom(mi.addVar(bun.handleOom(mi.str("require")), .declared));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (tree.parts.slice()) |part| {
|
||||
for (part.stmts) |stmt| {
|
||||
try printer.printStmt(stmt, PrinterType.TopLevel.init(.yes));
|
||||
try printer.printStmt(stmt);
|
||||
if (printer.writer.getError()) {} else |err| {
|
||||
return err;
|
||||
}
|
||||
@@ -6049,30 +5825,26 @@ pub fn printAst(
|
||||
}
|
||||
}
|
||||
|
||||
const have_module_info = PrinterType.may_have_module_info and opts.module_info != null;
|
||||
if (have_module_info) {
|
||||
try opts.module_info.?.finalize();
|
||||
}
|
||||
|
||||
var source_maps_chunk: ?SourceMap.Chunk = if (comptime generate_source_map)
|
||||
if (opts.source_map_handler != null)
|
||||
printer.source_map_builder.generateChunk(printer.writer.ctx.getWritten())
|
||||
else
|
||||
null
|
||||
else
|
||||
null;
|
||||
defer if (source_maps_chunk) |*chunk| chunk.deinit();
|
||||
|
||||
if (opts.runtime_transpiler_cache) |cache| {
|
||||
var srlz_res = std.array_list.Managed(u8).init(bun.default_allocator);
|
||||
defer srlz_res.deinit();
|
||||
if (have_module_info) try opts.module_info.?.asDeserialized().serialize(srlz_res.writer());
|
||||
cache.put(printer.writer.ctx.getWritten(), if (source_maps_chunk) |chunk| chunk.buffer.list.items else "", srlz_res.items);
|
||||
}
|
||||
|
||||
if (comptime generate_source_map) {
|
||||
if (comptime FeatureFlags.runtime_transpiler_cache and generate_source_map) {
|
||||
if (opts.source_map_handler) |handler| {
|
||||
try handler.onSourceMapChunk(source_maps_chunk.?, source);
|
||||
var source_maps_chunk = printer.source_map_builder.generateChunk(printer.writer.ctx.getWritten());
|
||||
if (opts.runtime_transpiler_cache) |cache| {
|
||||
cache.put(printer.writer.ctx.getWritten(), source_maps_chunk.buffer.list.items);
|
||||
}
|
||||
|
||||
defer source_maps_chunk.deinit();
|
||||
|
||||
try handler.onSourceMapChunk(source_maps_chunk, source);
|
||||
} else {
|
||||
if (opts.runtime_transpiler_cache) |cache| {
|
||||
cache.put(printer.writer.ctx.getWritten(), "");
|
||||
}
|
||||
}
|
||||
} else if (comptime generate_source_map) {
|
||||
if (opts.source_map_handler) |handler| {
|
||||
var chunk = printer.source_map_builder.generateChunk(printer.writer.ctx.getWritten());
|
||||
defer chunk.deinit();
|
||||
try handler.onSourceMapChunk(chunk, source);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6214,9 +5986,6 @@ pub fn printWithWriterAndPlatform(
|
||||
getSourceMapBuilder(if (generate_source_maps) .eager else .disable, is_bun_platform, opts, source, &ast),
|
||||
);
|
||||
printer.was_lazy_export = ast.has_lazy_export;
|
||||
if (PrinterType.may_have_module_info) {
|
||||
printer.module_info = opts.module_info;
|
||||
}
|
||||
var bin_stack_heap = std.heap.stackFallback(1024, bun.default_allocator);
|
||||
printer.binary_expression_stack = std.array_list.Managed(PrinterType.BinaryExpressionVisitor).init(bin_stack_heap.get());
|
||||
defer printer.binary_expression_stack.clearAndFree();
|
||||
@@ -6235,7 +6004,7 @@ pub fn printWithWriterAndPlatform(
|
||||
|
||||
for (parts) |part| {
|
||||
for (part.stmts) |stmt| {
|
||||
printer.printStmt(stmt, PrinterType.TopLevel.init(.yes)) catch |err| {
|
||||
printer.printStmt(stmt) catch |err| {
|
||||
return .{ .err = err };
|
||||
};
|
||||
if (printer.writer.getError()) {} else |err| {
|
||||
@@ -6305,7 +6074,7 @@ pub fn printCommonJS(
|
||||
|
||||
for (tree.parts.slice()) |part| {
|
||||
for (part.stmts) |stmt| {
|
||||
try printer.printStmt(stmt, PrinterType.TopLevel.init(.yes));
|
||||
try printer.printStmt(stmt);
|
||||
if (printer.writer.getError()) {} else |err| {
|
||||
return err;
|
||||
}
|
||||
@@ -6329,24 +6098,9 @@ pub fn printCommonJS(
|
||||
return @as(usize, @intCast(@max(printer.writer.written, 0)));
|
||||
}
|
||||
|
||||
/// Serializes ModuleInfo to an owned byte slice. Returns null on failure.
|
||||
/// The caller is responsible for freeing the returned slice with bun.default_allocator.
|
||||
pub fn serializeModuleInfo(module_info: ?*analyze_transpiled_module.ModuleInfo) ?[]const u8 {
|
||||
const mi = module_info orelse return null;
|
||||
if (!mi.finalized) {
|
||||
mi.finalize() catch return null;
|
||||
}
|
||||
const deserialized = mi.asDeserialized();
|
||||
var buf: std.ArrayList(u8) = .empty;
|
||||
defer buf.deinit(bun.default_allocator);
|
||||
deserialized.serialize(buf.writer(bun.default_allocator)) catch return null;
|
||||
return buf.toOwnedSlice(bun.default_allocator) catch null;
|
||||
}
|
||||
|
||||
const string = []const u8;
|
||||
|
||||
const SourceMap = @import("./sourcemap/sourcemap.zig");
|
||||
const analyze_transpiled_module = @import("./analyze_transpiled_module.zig");
|
||||
const fs = @import("./fs.zig");
|
||||
const importRecord = @import("./import_record.zig");
|
||||
const options = @import("./options.zig");
|
||||
|
||||
@@ -783,7 +783,6 @@ pub const Transpiler = struct {
|
||||
comptime enable_source_map: bool,
|
||||
source_map_context: ?js_printer.SourceMapHandler,
|
||||
runtime_transpiler_cache: ?*bun.jsc.RuntimeTranspilerCache,
|
||||
module_info: ?*analyze_transpiled_module.ModuleInfo,
|
||||
) !usize {
|
||||
const tracer = if (enable_source_map)
|
||||
bun.perf.trace("JSPrinter.printWithSourceMap")
|
||||
@@ -873,7 +872,6 @@ pub const Transpiler = struct {
|
||||
.inline_require_and_import_errors = false,
|
||||
.import_meta_ref = ast.import_meta_ref,
|
||||
.runtime_transpiler_cache = runtime_transpiler_cache,
|
||||
.module_info = module_info,
|
||||
.target = transpiler.options.target,
|
||||
.print_dce_annotations = transpiler.options.emit_dce_annotations,
|
||||
.hmr_ref = ast.wrapper_ref,
|
||||
@@ -902,7 +900,6 @@ pub const Transpiler = struct {
|
||||
false,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -913,7 +910,6 @@ pub const Transpiler = struct {
|
||||
writer: Writer,
|
||||
comptime format: js_printer.Format,
|
||||
handler: js_printer.SourceMapHandler,
|
||||
module_info: ?*analyze_transpiled_module.ModuleInfo,
|
||||
) !usize {
|
||||
if (bun.feature_flag.BUN_FEATURE_FLAG_DISABLE_SOURCE_MAPS.get()) {
|
||||
return transpiler.printWithSourceMapMaybe(
|
||||
@@ -925,7 +921,6 @@ pub const Transpiler = struct {
|
||||
false,
|
||||
handler,
|
||||
result.runtime_transpiler_cache,
|
||||
module_info,
|
||||
);
|
||||
}
|
||||
return transpiler.printWithSourceMapMaybe(
|
||||
@@ -937,7 +932,6 @@ pub const Transpiler = struct {
|
||||
true,
|
||||
handler,
|
||||
result.runtime_transpiler_cache,
|
||||
module_info,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1627,7 +1621,6 @@ const Fs = @import("./fs.zig");
|
||||
const MimeType = @import("./http/MimeType.zig");
|
||||
const NodeFallbackModules = @import("./node_fallbacks.zig");
|
||||
const Router = @import("./router.zig");
|
||||
const analyze_transpiled_module = @import("./analyze_transpiled_module.zig");
|
||||
const runtime = @import("./runtime.zig");
|
||||
const std = @import("std");
|
||||
const DataURL = @import("./resolver/data_url.zig").DataURL;
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import { Database } from "bun:sqlite";
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { rmSync } from "fs";
|
||||
import { bunEnv, bunExe, isWindows, tempDir, tempDirWithFiles } from "harness";
|
||||
import { join } from "path";
|
||||
import { bunEnv, bunExe, isWindows, tempDirWithFiles } from "harness";
|
||||
import { itBundled } from "./expectBundled";
|
||||
|
||||
describe("bundler", () => {
|
||||
@@ -90,135 +89,6 @@ describe("bundler", () => {
|
||||
},
|
||||
},
|
||||
});
|
||||
// ESM bytecode test matrix: each scenario × {default, minified} = 2 tests per scenario.
|
||||
// With --compile, static imports are inlined into one chunk, but dynamic imports
|
||||
// create separate modules in the standalone graph — each with its own bytecode + ModuleInfo.
|
||||
const esmBytecodeScenarios: Array<{
|
||||
name: string;
|
||||
files: Record<string, string>;
|
||||
stdout: string;
|
||||
}> = [
|
||||
{
|
||||
name: "HelloWorld",
|
||||
files: {
|
||||
"/entry.ts": `console.log("Hello, world!");`,
|
||||
},
|
||||
stdout: "Hello, world!",
|
||||
},
|
||||
{
|
||||
// top-level await is ESM-only; if ModuleInfo or bytecode generation
|
||||
// mishandles async modules, this breaks.
|
||||
name: "TopLevelAwait",
|
||||
files: {
|
||||
"/entry.ts": `
|
||||
const result = await Promise.resolve("tla works");
|
||||
console.log(result);
|
||||
`,
|
||||
},
|
||||
stdout: "tla works",
|
||||
},
|
||||
{
|
||||
// import.meta is ESM-only.
|
||||
name: "ImportMeta",
|
||||
files: {
|
||||
"/entry.ts": `
|
||||
console.log(typeof import.meta.url === "string" ? "ok" : "fail");
|
||||
console.log(typeof import.meta.dir === "string" ? "ok" : "fail");
|
||||
`,
|
||||
},
|
||||
stdout: "ok\nok",
|
||||
},
|
||||
{
|
||||
// Dynamic import creates a separate module in the standalone graph,
|
||||
// exercising per-module bytecode + ModuleInfo.
|
||||
name: "DynamicImport",
|
||||
files: {
|
||||
"/entry.ts": `
|
||||
const { value } = await import("./lazy.ts");
|
||||
console.log("lazy:", value);
|
||||
`,
|
||||
"/lazy.ts": `export const value = 42;`,
|
||||
},
|
||||
stdout: "lazy: 42",
|
||||
},
|
||||
{
|
||||
// Dynamic import of a module that itself uses top-level await.
|
||||
// The dynamically imported module is a separate chunk with async
|
||||
// evaluation — stresses both ModuleInfo and async bytecode loading.
|
||||
name: "DynamicImportTLA",
|
||||
files: {
|
||||
"/entry.ts": `
|
||||
const mod = await import("./async-mod.ts");
|
||||
console.log("value:", mod.value);
|
||||
`,
|
||||
"/async-mod.ts": `export const value = await Promise.resolve(99);`,
|
||||
},
|
||||
stdout: "value: 99",
|
||||
},
|
||||
{
|
||||
// Multiple dynamic imports: several separate modules in the graph,
|
||||
// each with its own bytecode + ModuleInfo.
|
||||
name: "MultipleDynamicImports",
|
||||
files: {
|
||||
"/entry.ts": `
|
||||
const [a, b] = await Promise.all([
|
||||
import("./mod-a.ts"),
|
||||
import("./mod-b.ts"),
|
||||
]);
|
||||
console.log(a.value, b.value);
|
||||
`,
|
||||
"/mod-a.ts": `export const value = "a";`,
|
||||
"/mod-b.ts": `export const value = "b";`,
|
||||
},
|
||||
stdout: "a b",
|
||||
},
|
||||
];
|
||||
|
||||
for (const scenario of esmBytecodeScenarios) {
|
||||
for (const minify of [false, true]) {
|
||||
itBundled(`compile/ESMBytecode+${scenario.name}${minify ? "+minify" : ""}`, {
|
||||
compile: true,
|
||||
bytecode: true,
|
||||
format: "esm",
|
||||
...(minify && {
|
||||
minifySyntax: true,
|
||||
minifyIdentifiers: true,
|
||||
minifyWhitespace: true,
|
||||
}),
|
||||
files: scenario.files,
|
||||
run: { stdout: scenario.stdout },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Multi-entry ESM bytecode with Worker (can't be in the matrix — needs
|
||||
// entryPointsRaw, outfile, setCwd). Each entry becomes a separate module
|
||||
// in the standalone graph with its own bytecode + ModuleInfo.
|
||||
itBundled("compile/WorkerBytecodeESM", {
|
||||
backend: "cli",
|
||||
compile: true,
|
||||
bytecode: true,
|
||||
format: "esm",
|
||||
files: {
|
||||
"/entry.ts": /* js */ `
|
||||
import {rmSync} from 'fs';
|
||||
// Verify we're not just importing from the filesystem
|
||||
rmSync("./worker.ts", {force: true});
|
||||
console.log("Hello, world!");
|
||||
new Worker("./worker.ts");
|
||||
`,
|
||||
"/worker.ts": /* js */ `
|
||||
console.log("Worker loaded!");
|
||||
`.trim(),
|
||||
},
|
||||
entryPointsRaw: ["./entry.ts", "./worker.ts"],
|
||||
outfile: "dist/out",
|
||||
run: {
|
||||
stdout: "Hello, world!\nWorker loaded!\n",
|
||||
file: "dist/out",
|
||||
setCwd: true,
|
||||
},
|
||||
});
|
||||
// https://github.com/oven-sh/bun/issues/8697
|
||||
itBundled("compile/EmbeddedFileOutfile", {
|
||||
compile: true,
|
||||
@@ -441,8 +311,6 @@ describe("bundler", () => {
|
||||
format: "cjs" | "esm";
|
||||
}> = [
|
||||
{ bytecode: true, minify: true, format: "cjs" },
|
||||
{ bytecode: true, format: "esm" },
|
||||
{ bytecode: true, minify: true, format: "esm" },
|
||||
{ format: "cjs" },
|
||||
{ format: "cjs", minify: true },
|
||||
{ format: "esm" },
|
||||
@@ -868,54 +736,6 @@ const server = serve({
|
||||
.throws(true);
|
||||
});
|
||||
|
||||
// Verify ESM bytecode is actually loaded from the cache at runtime, not just generated.
|
||||
// Uses regex matching on stderr (not itBundled) since we don't know the exact
|
||||
// number of cache hit/miss lines for ESM standalone.
|
||||
test("ESM bytecode cache is used at runtime", async () => {
|
||||
const ext = isWindows ? ".exe" : "";
|
||||
using dir = tempDir("esm-bytecode-cache", {
|
||||
"entry.js": `console.log("esm bytecode loaded");`,
|
||||
});
|
||||
|
||||
const outfile = join(String(dir), `app${ext}`);
|
||||
|
||||
// Build with ESM + bytecode
|
||||
await using build = Bun.spawn({
|
||||
cmd: [
|
||||
bunExe(),
|
||||
"build",
|
||||
"--compile",
|
||||
"--bytecode",
|
||||
"--format=esm",
|
||||
join(String(dir), "entry.js"),
|
||||
"--outfile",
|
||||
outfile,
|
||||
],
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [, buildStderr, buildExitCode] = await Promise.all([build.stdout.text(), build.stderr.text(), build.exited]);
|
||||
|
||||
expect(buildStderr).toBe("");
|
||||
expect(buildExitCode).toBe(0);
|
||||
|
||||
// Run with verbose disk cache to verify bytecode is loaded
|
||||
await using exe = Bun.spawn({
|
||||
cmd: [outfile],
|
||||
env: { ...bunEnv, BUN_JSC_verboseDiskCache: "1" },
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [exeStdout, exeStderr, exeExitCode] = await Promise.all([exe.stdout.text(), exe.stderr.text(), exe.exited]);
|
||||
|
||||
expect(exeStdout).toContain("esm bytecode loaded");
|
||||
expect(exeStderr).toMatch(/\[Disk Cache\].*Cache hit/i);
|
||||
expect(exeExitCode).toBe(0);
|
||||
});
|
||||
|
||||
// When compiling with 8+ entry points, the main entry point should still run correctly.
|
||||
test("compile with 8+ entry points runs main entry correctly", async () => {
|
||||
const dir = tempDirWithFiles("compile-many-entries", {
|
||||
|
||||
@@ -36,30 +36,5 @@ describe("bundler", () => {
|
||||
stdout: "app entry\nheader rendering\nmenu showing\nitems: home,about,contact",
|
||||
},
|
||||
});
|
||||
|
||||
for (const minify of [false, true]) {
|
||||
itBundled(`compile/splitting/ImportMetaInSplitChunk${minify ? "+minify" : ""}`, {
|
||||
compile: true,
|
||||
splitting: true,
|
||||
bytecode: true,
|
||||
format: "esm",
|
||||
...(minify ? { minifySyntax: true, minifyIdentifiers: true, minifyWhitespace: true } : {}),
|
||||
files: {
|
||||
"/entry.ts": /* js */ `
|
||||
const mod = await import("./worker.ts");
|
||||
mod.run();
|
||||
`,
|
||||
"/worker.ts": /* js */ `
|
||||
export function run() {
|
||||
console.log(typeof import.meta.url === "string" ? "ok" : "fail");
|
||||
console.log(typeof import.meta.dir === "string" ? "ok" : "fail");
|
||||
}
|
||||
`,
|
||||
},
|
||||
run: {
|
||||
stdout: "ok\nok",
|
||||
},
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@@ -163,8 +163,8 @@ describe.skipIf(!isWindows).concurrent("Windows compile metadata", () => {
|
||||
const [stderr, exitCode] = await Promise.all([proc.stderr.text(), proc.exited]);
|
||||
|
||||
expect(exitCode).not.toBe(0);
|
||||
// Windows flags require a Windows compile target
|
||||
expect(stderr.toLowerCase()).toContain("windows compile target");
|
||||
// When cross-compiling to non-Windows, it tries to download the target but fails
|
||||
expect(stderr.toLowerCase()).toContain("target platform");
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,4 @@
|
||||
import { expect, test } from "bun:test";
|
||||
import { expect, test } from "bun:test" with { todo: "true" };
|
||||
import "reflect-metadata";
|
||||
function Abc() {
|
||||
return (target: any, field: string) => {};
|
||||
|
||||
@@ -1,112 +1,7 @@
|
||||
import type { Socket } from "bun";
|
||||
import { setSocketOptions } from "bun:internal-for-testing";
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { bunEnv, bunExe, isPosix } from "harness";
|
||||
|
||||
describe.if(isPosix)("HTTP server handles chunked transfer encoding", () => {
|
||||
test("handles fragmented chunk terminators", async () => {
|
||||
const script = `
|
||||
const server = Bun.serve({
|
||||
port: 0,
|
||||
async fetch(req) {
|
||||
const body = await req.text();
|
||||
return new Response("Got: " + body);
|
||||
},
|
||||
});
|
||||
const { promise, resolve } = Promise.withResolvers();
|
||||
const socket = await Bun.connect({
|
||||
hostname: "localhost",
|
||||
port: server.port,
|
||||
socket: {
|
||||
data(socket, data) {
|
||||
console.log(data.toString());
|
||||
socket.end();
|
||||
},
|
||||
open(socket) {
|
||||
socket.write("POST / HTTP/1.1\\r\\nHost: localhost\\r\\nTransfer-Encoding: chunked\\r\\n\\r\\n4\\r\\nWiki\\r");
|
||||
socket.flush();
|
||||
setTimeout(() => {
|
||||
socket.write("\\n0\\r\\n\\r\\n");
|
||||
socket.flush();
|
||||
}, 50);
|
||||
},
|
||||
error() {},
|
||||
close() { resolve(); },
|
||||
},
|
||||
});
|
||||
await promise;
|
||||
server.stop();
|
||||
`;
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "-e", script],
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
expect(stdout).toContain("200 OK");
|
||||
expect(stdout).toContain("Got: Wiki");
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
|
||||
test("rejects invalid terminator in fragmented reads", async () => {
|
||||
const script = `
|
||||
const server = Bun.serve({
|
||||
port: 0,
|
||||
async fetch(req) {
|
||||
const body = await req.text();
|
||||
return new Response("Got: " + body);
|
||||
},
|
||||
});
|
||||
const { promise, resolve } = Promise.withResolvers();
|
||||
const socket = await Bun.connect({
|
||||
hostname: "localhost",
|
||||
port: server.port,
|
||||
socket: {
|
||||
data(socket, data) {
|
||||
console.log(data.toString());
|
||||
socket.end();
|
||||
},
|
||||
open(socket) {
|
||||
socket.write("POST / HTTP/1.1\\r\\nHost: localhost\\r\\nTransfer-Encoding: chunked\\r\\n\\r\\n4\\r\\nTestX");
|
||||
socket.flush();
|
||||
setTimeout(() => {
|
||||
socket.write("\\n0\\r\\n\\r\\n");
|
||||
socket.flush();
|
||||
}, 50);
|
||||
},
|
||||
error() {},
|
||||
close() { resolve(); },
|
||||
},
|
||||
});
|
||||
await promise;
|
||||
server.stop();
|
||||
`;
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "-e", script],
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
expect(stdout).toContain("400");
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
});
|
||||
import { describe, test } from "bun:test";
|
||||
import { isPosix } from "harness";
|
||||
|
||||
describe.if(isPosix)("HTTP server handles fragmented requests", () => {
|
||||
test("handles requests with tiny send buffer (regression test)", async () => {
|
||||
|
||||
@@ -1,499 +0,0 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { bunEnv, bunExe, isWindows, tempDirWithFiles } from "harness";
|
||||
|
||||
const ext = isWindows ? ".exe" : "";
|
||||
|
||||
function compileAndRun(dir: string, entrypoint: string) {
|
||||
const outfile = dir + `/compiled${ext}`;
|
||||
const buildResult = Bun.spawnSync({
|
||||
cmd: [bunExe(), "build", "--compile", "--bytecode", "--format=esm", entrypoint, "--outfile", outfile],
|
||||
env: bunEnv,
|
||||
cwd: dir,
|
||||
stdio: ["inherit", "pipe", "pipe"],
|
||||
});
|
||||
expect(buildResult.stderr.toString()).toBe("");
|
||||
expect(buildResult.exitCode).toBe(0);
|
||||
|
||||
return Bun.spawnSync({
|
||||
cmd: [outfile],
|
||||
env: bunEnv,
|
||||
cwd: dir,
|
||||
stdio: ["inherit", "pipe", "pipe"],
|
||||
});
|
||||
}
|
||||
|
||||
const a_file = `
|
||||
export type my_string = "1";
|
||||
|
||||
export type my_value = "2";
|
||||
export const my_value = "2";
|
||||
|
||||
export const my_only = "3";
|
||||
`;
|
||||
|
||||
const a_no_value = `
|
||||
export type my_string = "1";
|
||||
export type my_value = "2";
|
||||
export const my_only = "3";
|
||||
`;
|
||||
|
||||
const a_with_value = `
|
||||
export type my_string = "1";
|
||||
export const my_value = "2";
|
||||
`;
|
||||
|
||||
const b_files = [
|
||||
{
|
||||
name: "export from",
|
||||
value: `export { my_string, my_value, my_only } from "./a.ts";`,
|
||||
},
|
||||
{
|
||||
name: "import then export",
|
||||
value: `
|
||||
import { my_string, my_value, my_only } from "./a.ts";
|
||||
export { my_string, my_value, my_only };
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "export star",
|
||||
value: `export * from "./a.ts";`,
|
||||
},
|
||||
{
|
||||
name: "export merge",
|
||||
value: `export * from "./a_no_value.ts"; export * from "./a_with_value.ts"`,
|
||||
},
|
||||
];
|
||||
|
||||
const c_files = [
|
||||
{ name: "require", value: `console.log(JSON.stringify(require("./b")));` },
|
||||
{ name: "import star", value: `import * as b from "./b"; console.log(JSON.stringify(b));` },
|
||||
{ name: "await import", value: `console.log(JSON.stringify(await import("./b")));` },
|
||||
{
|
||||
name: "import individual",
|
||||
value: `
|
||||
import { my_string, my_value, my_only } from "./b";
|
||||
console.log(JSON.stringify({ my_only, my_value }));
|
||||
`,
|
||||
},
|
||||
];
|
||||
|
||||
for (const b_file of b_files) {
|
||||
describe(`re-export with ${b_file.name}`, () => {
|
||||
for (const c_file of c_files) {
|
||||
describe(`import with ${c_file.name}`, () => {
|
||||
const dir = tempDirWithFiles("type-export", {
|
||||
"a.ts": a_file,
|
||||
"b.ts": b_file.value,
|
||||
"c.ts": c_file.value,
|
||||
"a_no_value.ts": a_no_value,
|
||||
"a_with_value.ts": a_with_value,
|
||||
});
|
||||
|
||||
describe.each(["run", "compile", "build"])("%s", mode => {
|
||||
// TODO: "run" is skipped until ESM module_info is enabled in the runtime transpiler.
|
||||
// Currently module_info is only generated for standalone ESM bytecode (--compile).
|
||||
// Once enabled, flip this to include "run".
|
||||
test.skipIf(mode === "run")("works", async () => {
|
||||
let result: Bun.SyncSubprocess<"pipe", "inherit"> | Bun.SyncSubprocess<"pipe", "pipe">;
|
||||
if (mode === "compile") {
|
||||
result = compileAndRun(dir, dir + "/c.ts");
|
||||
} else if (mode === "build") {
|
||||
const build_result = await Bun.build({
|
||||
entrypoints: [dir + "/c.ts"],
|
||||
outdir: dir + "/dist",
|
||||
});
|
||||
expect(build_result.success).toBe(true);
|
||||
result = Bun.spawnSync({
|
||||
cmd: [bunExe(), "run", dir + "/dist/c.js"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdio: ["inherit", "pipe", "inherit"],
|
||||
});
|
||||
} else {
|
||||
result = Bun.spawnSync({
|
||||
cmd: [bunExe(), "run", "c.ts"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdio: ["inherit", "pipe", "inherit"],
|
||||
});
|
||||
}
|
||||
|
||||
const parsedOutput = JSON.parse(result.stdout.toString().trim());
|
||||
expect(parsedOutput).toEqual({ my_value: "2", my_only: "3" });
|
||||
expect(result.exitCode).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
describe("import not found", () => {
|
||||
for (const [ccase, target_value, name] of [
|
||||
[``, /SyntaxError: Export named 'not_found' not found in module '[^']+?'\./, "none"],
|
||||
[
|
||||
`export default function not_found() {};`,
|
||||
/SyntaxError: Export named 'not_found' not found in module '[^']+?'\. Did you mean to import default\?/,
|
||||
"default with same name",
|
||||
],
|
||||
[
|
||||
`export type not_found = "not_found";`,
|
||||
/SyntaxError: Export named 'not_found' not found in module '[^']+?'\./,
|
||||
"type",
|
||||
],
|
||||
] as const)
|
||||
test(`${name}`, () => {
|
||||
const dir = tempDirWithFiles("type-export", {
|
||||
"a.ts": ccase,
|
||||
"b.ts": /*js*/ `
|
||||
import { not_found } from "./a";
|
||||
console.log(not_found);
|
||||
`,
|
||||
"nf.ts": "",
|
||||
});
|
||||
|
||||
const result = Bun.spawnSync({
|
||||
cmd: [bunExe(), "run", "b.ts"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdio: ["inherit", "pipe", "pipe"],
|
||||
});
|
||||
|
||||
expect(result.stderr?.toString().trim()).toMatch(target_value);
|
||||
expect({
|
||||
exitCode: result.exitCode,
|
||||
stdout: result.stdout?.toString().trim(),
|
||||
}).toEqual({
|
||||
exitCode: 1,
|
||||
stdout: "",
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test("js file type export", () => {
|
||||
const dir = tempDirWithFiles("type-export", {
|
||||
"a.js": "export {not_found};",
|
||||
});
|
||||
|
||||
const result = Bun.spawnSync({
|
||||
cmd: [bunExe(), "a.js"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdio: ["inherit", "pipe", "pipe"],
|
||||
});
|
||||
|
||||
expect(result.stderr?.toString().trim()).toInclude('error: "not_found" is not declared in this file');
|
||||
expect(result.exitCode).toBe(1);
|
||||
});
|
||||
|
||||
test("js file type import", () => {
|
||||
const dir = tempDirWithFiles("type-import", {
|
||||
"b.js": "import {type_only} from './ts.ts';",
|
||||
"ts.ts": "export type type_only = 'type_only';",
|
||||
});
|
||||
|
||||
const result = Bun.spawnSync({
|
||||
cmd: [bunExe(), "b.js"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdio: ["inherit", "pipe", "pipe"],
|
||||
});
|
||||
|
||||
expect(result.stderr?.toString().trim()).toInclude("Export named 'type_only' not found in module '");
|
||||
expect(result.stderr?.toString().trim()).not.toInclude("Did you mean to import default?");
|
||||
expect(result.exitCode).toBe(1);
|
||||
});
|
||||
|
||||
test("js file type import with default export", () => {
|
||||
const dir = tempDirWithFiles("type-import", {
|
||||
"b.js": "import {type_only} from './ts.ts';",
|
||||
"ts.ts": "export type type_only = 'type_only'; export default function type_only() {};",
|
||||
});
|
||||
|
||||
const result = Bun.spawnSync({
|
||||
cmd: [bunExe(), "b.js"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdio: ["inherit", "pipe", "pipe"],
|
||||
});
|
||||
|
||||
expect(result.stderr?.toString().trim()).toInclude("Export named 'type_only' not found in module '");
|
||||
expect(result.stderr?.toString().trim()).toInclude("Did you mean to import default?");
|
||||
expect(result.exitCode).toBe(1);
|
||||
});
|
||||
|
||||
test("js file with through export", () => {
|
||||
const dir = tempDirWithFiles("type-import", {
|
||||
"b.js": "export {type_only} from './ts.ts';",
|
||||
"ts.ts": "export type type_only = 'type_only'; export default function type_only() {};",
|
||||
});
|
||||
|
||||
const result = Bun.spawnSync({
|
||||
cmd: [bunExe(), "b.js"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdio: ["inherit", "pipe", "pipe"],
|
||||
});
|
||||
|
||||
expect(result.stderr?.toString().trim()).toInclude("SyntaxError: export 'type_only' not found in './ts.ts'");
|
||||
expect(result.exitCode).toBe(1);
|
||||
});
|
||||
|
||||
test("js file with through export 2", () => {
|
||||
const dir = tempDirWithFiles("type-import", {
|
||||
"b.js": "import {type_only} from './ts.ts'; export {type_only};",
|
||||
"ts.ts": "export type type_only = 'type_only'; export default function type_only() {};",
|
||||
});
|
||||
|
||||
const result = Bun.spawnSync({
|
||||
cmd: [bunExe(), "b.js"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdio: ["inherit", "pipe", "pipe"],
|
||||
});
|
||||
|
||||
expect(result.stderr?.toString().trim()).toInclude("SyntaxError: export 'type_only' not found in './ts.ts'");
|
||||
expect(result.exitCode).toBe(1);
|
||||
});
|
||||
|
||||
describe("through export merge", () => {
|
||||
// this isn't allowed, even in typescript (tsc emits "Duplicate identifier 'value'.")
|
||||
for (const fmt of ["js", "ts"]) {
|
||||
describe(fmt, () => {
|
||||
for (const [name, mode] of [
|
||||
["through", "export {value} from './b'; export {value} from './c';"],
|
||||
["direct", "export {value} from './b'; export const value = 'abc';"],
|
||||
["direct2", "export const value = 'abc'; export {value};"],
|
||||
["ns", "export * as value from './c'; export * as value from './c';"],
|
||||
]) {
|
||||
describe(name, () => {
|
||||
const dir = tempDirWithFiles("type-import", {
|
||||
["main." + fmt]: "import {value} from './a'; console.log(value);",
|
||||
["a." + fmt]: mode,
|
||||
["b." + fmt]: fmt === "ts" ? "export type value = 'b';" : "",
|
||||
["c." + fmt]: "export const value = 'c';",
|
||||
});
|
||||
|
||||
for (const file of ["main." + fmt, "a." + fmt]) {
|
||||
test(file, () => {
|
||||
const result = Bun.spawnSync({
|
||||
cmd: [bunExe(), file],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdio: ["inherit", "pipe", "pipe"],
|
||||
});
|
||||
|
||||
expect(result.stderr?.toString().trim()).toInclude(
|
||||
file === "a." + fmt
|
||||
? 'error: Multiple exports with the same name "value"\n' // bun's syntax error
|
||||
: "SyntaxError: Cannot export a duplicate name 'value'.\n", // jsc's syntax error
|
||||
);
|
||||
|
||||
expect(result.exitCode).toBe(1);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
describe("check ownkeys from a star import", () => {
|
||||
const dir = tempDirWithFiles("ownkeys-star-import", {
|
||||
["main.ts"]: `
|
||||
import * as ns from './a';
|
||||
console.log(JSON.stringify({
|
||||
keys: Object.keys(ns).sort(),
|
||||
ns,
|
||||
has_sometype: Object.hasOwn(ns, 'sometype'),
|
||||
}));
|
||||
`,
|
||||
["a.ts"]: "export * from './b'; export {sometype} from './b';",
|
||||
["b.ts"]: "export const value = 'b'; export const anotherValue = 'another'; export type sometype = 'sometype';",
|
||||
});
|
||||
|
||||
const expected = {
|
||||
keys: ["anotherValue", "value"],
|
||||
ns: {
|
||||
anotherValue: "another",
|
||||
value: "b",
|
||||
},
|
||||
has_sometype: false,
|
||||
};
|
||||
|
||||
describe.each(["run", "compile"] as const)("%s", mode => {
|
||||
const testFn = mode === "run" ? test.skip : test;
|
||||
|
||||
testFn("works", () => {
|
||||
const result =
|
||||
mode === "compile"
|
||||
? compileAndRun(dir, dir + "/main.ts")
|
||||
: Bun.spawnSync({
|
||||
cmd: [bunExe(), "main.ts"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdio: ["inherit", "pipe", "pipe"],
|
||||
});
|
||||
|
||||
expect(result.stderr?.toString().trim()).toBe("");
|
||||
expect(JSON.parse(result.stdout?.toString().trim())).toEqual(expected);
|
||||
expect(result.exitCode).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test("check commonjs", () => {
|
||||
const dir = tempDirWithFiles("commonjs", {
|
||||
["main.ts"]: "const {my_value, my_type} = require('./a'); console.log(my_value, my_type);",
|
||||
["a.ts"]: "module.exports = require('./b');",
|
||||
["b.ts"]: "export const my_value = 'my_value'; export type my_type = 'my_type';",
|
||||
});
|
||||
const result = Bun.spawnSync({
|
||||
cmd: [bunExe(), "main.ts"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdio: ["inherit", "pipe", "pipe"],
|
||||
});
|
||||
expect(result.stderr?.toString().trim()).toBe("");
|
||||
expect(result.stdout?.toString().trim()).toBe("my_value undefined");
|
||||
expect(result.exitCode).toBe(0);
|
||||
});
|
||||
|
||||
test("check merge", () => {
|
||||
const dir = tempDirWithFiles("merge", {
|
||||
["main.ts"]: "import {value} from './a'; console.log(value);",
|
||||
["a.ts"]: "export * from './b'; export * from './c';",
|
||||
["b.ts"]: "export const value = 'b';",
|
||||
["c.ts"]: "export const value = 'c';",
|
||||
});
|
||||
const result = Bun.spawnSync({
|
||||
cmd: [bunExe(), "main.ts"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdio: ["inherit", "pipe", "pipe"],
|
||||
});
|
||||
expect(result.stderr?.toString().trim()).toInclude(
|
||||
"SyntaxError: Export named 'value' cannot be resolved due to ambiguous multiple bindings in module",
|
||||
);
|
||||
expect(result.exitCode).toBe(1);
|
||||
});
|
||||
|
||||
describe("export * from './module'", () => {
|
||||
for (const fmt of ["js", "ts"]) {
|
||||
describe(fmt, () => {
|
||||
const dir = tempDirWithFiles("export-star", {
|
||||
["main." + fmt]: "import {value} from './a'; console.log(value);",
|
||||
["a." + fmt]: "export * from './b';",
|
||||
["b." + fmt]: "export const value = 'b';",
|
||||
});
|
||||
for (const file of ["main." + fmt, "a." + fmt]) {
|
||||
test(file, () => {
|
||||
const result = Bun.spawnSync({
|
||||
cmd: [bunExe(), file],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdio: ["inherit", "pipe", "pipe"],
|
||||
});
|
||||
expect(result.stderr?.toString().trim()).toBe("");
|
||||
expect(result.exitCode).toBe(0);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
describe("export * as ns from './module'", () => {
|
||||
for (const fmt of ["js", "ts"]) {
|
||||
describe(fmt, () => {
|
||||
const dir = tempDirWithFiles("export-star-as", {
|
||||
["main." + fmt]: "import {ns} from './a'; console.log(ns.value);",
|
||||
["a." + fmt]: "export * as ns from './b';",
|
||||
["b." + fmt]: "export const value = 'b';",
|
||||
});
|
||||
for (const file of ["main." + fmt, "a." + fmt]) {
|
||||
test(file, () => {
|
||||
const result = Bun.spawnSync({
|
||||
cmd: [bunExe(), file],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdio: ["inherit", "pipe", "pipe"],
|
||||
});
|
||||
expect(result.stderr?.toString().trim()).toBe("");
|
||||
expect(result.exitCode).toBe(0);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
describe("export type {Type} from './module'", () => {
|
||||
for (const fmt of ["ts"]) {
|
||||
describe(fmt, () => {
|
||||
const dir = tempDirWithFiles("export-type", {
|
||||
["main." + fmt]: "import {Type} from './a'; const x: Type = 'test'; console.log(x);",
|
||||
["a." + fmt]: "export type {Type} from './b';",
|
||||
["b." + fmt]: "export type Type = string;",
|
||||
});
|
||||
for (const file of ["main." + fmt, "a." + fmt]) {
|
||||
test(file, () => {
|
||||
const result = Bun.spawnSync({
|
||||
cmd: [bunExe(), file],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdio: ["inherit", "pipe", "pipe"],
|
||||
});
|
||||
expect(result.stderr?.toString().trim()).toBe("");
|
||||
expect(result.exitCode).toBe(0);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
describe("import only used in decorator (#8439)", () => {
|
||||
const dir = tempDirWithFiles("import-only-used-in-decorator", {
|
||||
["index.ts"]: /*js*/ `
|
||||
import { TestInterface } from "./interface.ts";
|
||||
|
||||
function Decorator(): PropertyDecorator {
|
||||
return () => {};
|
||||
}
|
||||
|
||||
class TestClass {
|
||||
@Decorator()
|
||||
test?: TestInterface;
|
||||
}
|
||||
class OtherClass {
|
||||
other?: TestInterface;
|
||||
}
|
||||
|
||||
export {TestInterface};
|
||||
`,
|
||||
["interface.ts"]: "export interface TestInterface {};",
|
||||
"tsconfig.json": JSON.stringify({
|
||||
compilerOptions: {
|
||||
experimentalDecorators: true,
|
||||
emitDecoratorMetadata: true,
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
describe.each(["run", "compile"] as const)("%s", mode => {
|
||||
const testFn = mode === "run" ? test.skip : test;
|
||||
|
||||
testFn("works", () => {
|
||||
const result =
|
||||
mode === "compile"
|
||||
? compileAndRun(dir, dir + "/index.ts")
|
||||
: Bun.spawnSync({
|
||||
cmd: [bunExe(), "index.ts"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdio: ["inherit", "pipe", "pipe"],
|
||||
});
|
||||
|
||||
expect(result.stderr?.toString().trim()).toBe("");
|
||||
expect(result.exitCode).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -32,6 +32,34 @@ test("support eval in worker", async () => {
|
||||
await worker.terminate();
|
||||
});
|
||||
|
||||
// In Node.js worker_threads, messages go to parentPort only, not self.onmessage.
|
||||
// Libraries like fflate set both handlers, expecting only parentPort to fire.
|
||||
test("worker_threads messages should not trigger self.onmessage", async () => {
|
||||
const workerCode = `
|
||||
const { parentPort } = require('worker_threads');
|
||||
let selfOnMessageCount = 0;
|
||||
let parentPortOnMessageCount = 0;
|
||||
|
||||
self.onmessage = () => { selfOnMessageCount++; };
|
||||
parentPort.on('message', () => {
|
||||
parentPortOnMessageCount++;
|
||||
parentPort.postMessage({ selfOnMessageCount, parentPortOnMessageCount });
|
||||
});
|
||||
`;
|
||||
const worker = new Worker(workerCode, { eval: true });
|
||||
const result = await new Promise<{ selfOnMessageCount: number; parentPortOnMessageCount: number }>(
|
||||
(resolve, reject) => {
|
||||
worker.on("message", resolve);
|
||||
worker.on("error", reject);
|
||||
worker.postMessage({ test: 1 });
|
||||
},
|
||||
);
|
||||
await worker.terminate();
|
||||
|
||||
expect(result.parentPortOnMessageCount).toBe(1);
|
||||
expect(result.selfOnMessageCount).toBe(0);
|
||||
});
|
||||
|
||||
test("all worker_threads module properties are present", () => {
|
||||
expect(wt).toHaveProperty("getEnvironmentData");
|
||||
expect(wt).toHaveProperty("isMainThread");
|
||||
|
||||
27
test/js/web/workers/worker-fixture-argv.js
generated
27
test/js/web/workers/worker-fixture-argv.js
generated
@@ -1,7 +1,22 @@
|
||||
(globalThis.addEventListener || require("node:worker_threads").parentPort.on)("message", () => {
|
||||
const postMessage = globalThis.postMessage || require("node:worker_threads").parentPort.postMessage;
|
||||
postMessage({
|
||||
argv: process.argv,
|
||||
execArgv: process.execArgv,
|
||||
// For Node workers, parentPort receives messages (not globalThis/self).
|
||||
// For Web Workers, globalThis.addEventListener receives messages.
|
||||
const wt = require("node:worker_threads");
|
||||
const parentPort = wt.parentPort;
|
||||
|
||||
if (parentPort) {
|
||||
// Node worker_threads
|
||||
parentPort.on("message", () => {
|
||||
parentPort.postMessage({
|
||||
argv: process.argv,
|
||||
execArgv: process.execArgv,
|
||||
});
|
||||
});
|
||||
});
|
||||
} else {
|
||||
// Web Worker
|
||||
globalThis.addEventListener("message", () => {
|
||||
globalThis.postMessage({
|
||||
argv: process.argv,
|
||||
execArgv: process.execArgv,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user