mirror of
https://github.com/oven-sh/bun
synced 2026-02-03 15:38:46 +00:00
Compare commits
2 Commits
dylan/test
...
dylan/byte
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7ad6973fe4 | ||
|
|
caef7fceea |
19
.aikido
19
.aikido
@@ -1,19 +0,0 @@
|
||||
exclude:
|
||||
paths:
|
||||
- test
|
||||
- scripts
|
||||
- bench
|
||||
- packages/bun-lambda
|
||||
- packages/bun-release
|
||||
- packages/bun-wasm
|
||||
- packages/bun-vscode
|
||||
- packages/bun-plugin-yaml
|
||||
- packages/bun-plugin-svelte
|
||||
- packages/bun-native-plugin-rs
|
||||
- packages/bun-native-bundler-plugin-api
|
||||
- packages/bun-inspector-protocol
|
||||
- packages/bun-inspector-frontend
|
||||
- packages/bun-error
|
||||
- packages/bun-debug-adapter-protocol
|
||||
- packages/bun-build-mdx-rs
|
||||
- packages/@types/bun
|
||||
@@ -26,7 +26,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
wget curl git python3 python3-pip ninja-build \
|
||||
software-properties-common apt-transport-https \
|
||||
ca-certificates gnupg lsb-release unzip \
|
||||
libxml2-dev ruby ruby-dev bison gawk perl make golang ccache \
|
||||
libxml2-dev ruby ruby-dev bison gawk perl make golang \
|
||||
&& add-apt-repository ppa:ubuntu-toolchain-r/test \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y gcc-13 g++-13 libgcc-13-dev libstdc++-13-dev \
|
||||
@@ -35,8 +35,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
&& wget https://apt.llvm.org/llvm.sh \
|
||||
&& chmod +x llvm.sh \
|
||||
&& ./llvm.sh ${LLVM_VERSION} all \
|
||||
&& rm llvm.sh \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
&& rm llvm.sh
|
||||
|
||||
|
||||
RUN --mount=type=tmpfs,target=/tmp \
|
||||
@@ -111,14 +110,14 @@ ARG BUILDKITE_AGENT_TAGS
|
||||
|
||||
|
||||
# Install Rust nightly
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \
|
||||
&& export PATH=$HOME/.cargo/bin:$PATH \
|
||||
&& rustup install nightly \
|
||||
&& rustup default nightly
|
||||
|
||||
|
||||
RUN ARCH=$(if [ "$TARGETARCH" = "arm64" ]; then echo "arm64"; else echo "amd64"; fi) && \
|
||||
echo "Downloading buildkite" && \
|
||||
echo "Downloading buildkite" && \
|
||||
curl -fsSL "https://github.com/buildkite/agent/releases/download/v3.87.0/buildkite-agent-linux-${ARCH}-3.87.0.tar.gz" -o /tmp/buildkite-agent.tar.gz && \
|
||||
mkdir -p /tmp/buildkite-agent && \
|
||||
tar -xzf /tmp/buildkite-agent.tar.gz -C /tmp/buildkite-agent && \
|
||||
@@ -126,18 +125,6 @@ RUN ARCH=$(if [ "$TARGETARCH" = "arm64" ]; then echo "arm64"; else echo "amd64";
|
||||
|
||||
RUN mkdir -p /var/cache/buildkite-agent /var/log/buildkite-agent /var/run/buildkite-agent /etc/buildkite-agent /var/lib/buildkite-agent/cache/bun
|
||||
|
||||
# The following is necessary to configure buildkite to use a stable
|
||||
# checkout directory for ccache to be effective.
|
||||
RUN mkdir -p -m 755 /var/lib/buildkite-agent/hooks && \
|
||||
cat <<'EOF' > /var/lib/buildkite-agent/hooks/environment
|
||||
#!/bin/sh
|
||||
set -efu
|
||||
|
||||
export BUILDKITE_BUILD_CHECKOUT_PATH=/var/lib/buildkite-agent/build
|
||||
EOF
|
||||
|
||||
RUN chmod 744 /var/lib/buildkite-agent/hooks/environment
|
||||
|
||||
COPY ../*/agent.mjs /var/bun/scripts/
|
||||
|
||||
ENV BUN_INSTALL_CACHE=/var/lib/buildkite-agent/cache/bun
|
||||
@@ -160,7 +147,7 @@ COPY . /workspace/bun
|
||||
|
||||
|
||||
# Install Rust nightly
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \
|
||||
&& export PATH=$HOME/.cargo/bin:$PATH \
|
||||
&& rustup install nightly \
|
||||
&& rustup default nightly
|
||||
@@ -174,4 +161,4 @@ RUN --mount=type=tmpfs,target=/workspace/bun/build \
|
||||
ls -la \
|
||||
&& bun run build:release \
|
||||
&& mkdir -p /target \
|
||||
&& cp -r /workspace/bun/build/release/bun /target/bun
|
||||
&& cp -r /workspace/bun/build/release/bun /target/bun
|
||||
@@ -16,7 +16,6 @@ import {
|
||||
getEmoji,
|
||||
getEnv,
|
||||
getLastSuccessfulBuild,
|
||||
getSecret,
|
||||
isBuildkite,
|
||||
isBuildManual,
|
||||
isFork,
|
||||
@@ -109,9 +108,9 @@ const buildPlatforms = [
|
||||
{ os: "linux", arch: "x64", distro: "amazonlinux", release: "2023", features: ["docker"] },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "amazonlinux", release: "2023", features: ["docker"] },
|
||||
{ os: "linux", arch: "x64", profile: "asan", distro: "amazonlinux", release: "2023", features: ["docker"] },
|
||||
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.22" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.22" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.22" },
|
||||
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.21" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.21" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.21" },
|
||||
{ os: "windows", arch: "x64", release: "2019" },
|
||||
{ os: "windows", arch: "x64", baseline: true, release: "2019" },
|
||||
];
|
||||
@@ -124,16 +123,19 @@ const testPlatforms = [
|
||||
{ os: "darwin", arch: "aarch64", release: "13", tier: "previous" },
|
||||
{ os: "darwin", arch: "x64", release: "14", tier: "latest" },
|
||||
{ os: "darwin", arch: "x64", release: "13", tier: "previous" },
|
||||
{ os: "linux", arch: "aarch64", distro: "debian", release: "13", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", distro: "debian", release: "13", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "debian", release: "13", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", profile: "asan", distro: "debian", release: "13", tier: "latest" },
|
||||
{ os: "linux", arch: "aarch64", distro: "debian", release: "12", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", distro: "debian", release: "12", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "debian", release: "12", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", profile: "asan", distro: "debian", release: "12", tier: "latest" },
|
||||
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "25.04", tier: "latest" },
|
||||
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "24.04", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", distro: "ubuntu", release: "25.04", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", distro: "ubuntu", release: "24.04", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "25.04", tier: "latest" },
|
||||
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.22", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.22", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.22", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "24.04", tier: "latest" },
|
||||
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.21", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.21", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.21", tier: "latest" },
|
||||
{ os: "windows", arch: "x64", release: "2019", tier: "oldest" },
|
||||
{ os: "windows", arch: "x64", release: "2019", baseline: true, tier: "oldest" },
|
||||
];
|
||||
@@ -221,7 +223,7 @@ function getImageName(platform, options) {
|
||||
* @param {number} [limit]
|
||||
* @link https://buildkite.com/docs/pipelines/command-step#retry-attributes
|
||||
*/
|
||||
function getRetry() {
|
||||
function getRetry(limit = 0) {
|
||||
return {
|
||||
manual: {
|
||||
permit_on_passed: true,
|
||||
@@ -290,7 +292,7 @@ function getEc2Agent(platform, options, ec2Options) {
|
||||
* @returns {string}
|
||||
*/
|
||||
function getCppAgent(platform, options) {
|
||||
const { os, arch } = platform;
|
||||
const { os, arch, distro } = platform;
|
||||
|
||||
if (os === "darwin") {
|
||||
return {
|
||||
@@ -311,7 +313,7 @@ function getCppAgent(platform, options) {
|
||||
* @returns {string}
|
||||
*/
|
||||
function getLinkBunAgent(platform, options) {
|
||||
const { os, arch } = platform;
|
||||
const { os, arch, distro } = platform;
|
||||
|
||||
if (os === "darwin") {
|
||||
return {
|
||||
@@ -341,7 +343,7 @@ function getZigPlatform() {
|
||||
arch: "aarch64",
|
||||
abi: "musl",
|
||||
distro: "alpine",
|
||||
release: "3.22",
|
||||
release: "3.21",
|
||||
};
|
||||
}
|
||||
|
||||
@@ -350,7 +352,14 @@ function getZigPlatform() {
|
||||
* @param {PipelineOptions} options
|
||||
* @returns {Agent}
|
||||
*/
|
||||
function getZigAgent(_platform, options) {
|
||||
function getZigAgent(platform, options) {
|
||||
const { arch } = platform;
|
||||
|
||||
// Uncomment to restore to using macOS on-prem for Zig.
|
||||
// return {
|
||||
// queue: "build-zig",
|
||||
// };
|
||||
|
||||
return getEc2Agent(getZigPlatform(), options, {
|
||||
instanceType: "r8g.large",
|
||||
});
|
||||
@@ -362,7 +371,7 @@ function getZigAgent(_platform, options) {
|
||||
* @returns {Agent}
|
||||
*/
|
||||
function getTestAgent(platform, options) {
|
||||
const { os, arch, profile } = platform;
|
||||
const { os, arch } = platform;
|
||||
|
||||
if (os === "darwin") {
|
||||
return {
|
||||
@@ -382,13 +391,6 @@ function getTestAgent(platform, options) {
|
||||
}
|
||||
|
||||
if (arch === "aarch64") {
|
||||
if (profile === "asan") {
|
||||
return getEc2Agent(platform, options, {
|
||||
instanceType: "c8g.2xlarge",
|
||||
cpuCount: 2,
|
||||
threadsPerCore: 1,
|
||||
});
|
||||
}
|
||||
return getEc2Agent(platform, options, {
|
||||
instanceType: "c8g.xlarge",
|
||||
cpuCount: 2,
|
||||
@@ -396,13 +398,6 @@ function getTestAgent(platform, options) {
|
||||
});
|
||||
}
|
||||
|
||||
if (profile === "asan") {
|
||||
return getEc2Agent(platform, options, {
|
||||
instanceType: "c7i.2xlarge",
|
||||
cpuCount: 2,
|
||||
threadsPerCore: 1,
|
||||
});
|
||||
}
|
||||
return getEc2Agent(platform, options, {
|
||||
instanceType: "c7i.xlarge",
|
||||
cpuCount: 2,
|
||||
@@ -452,6 +447,23 @@ function getBuildCommand(target, options, label) {
|
||||
return `bun run build:${buildProfile}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @param {PipelineOptions} options
|
||||
* @returns {Step}
|
||||
*/
|
||||
function getBuildVendorStep(platform, options) {
|
||||
return {
|
||||
key: `${getTargetKey(platform)}-build-vendor`,
|
||||
label: `${getTargetLabel(platform)} - build-vendor`,
|
||||
agents: getCppAgent(platform, options),
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
env: getBuildEnv(platform, options),
|
||||
command: `${getBuildCommand(platform, options)} --target dependencies`,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @param {PipelineOptions} options
|
||||
@@ -501,9 +513,9 @@ function getBuildZigStep(platform, options) {
|
||||
const toolchain = getBuildToolchain(platform);
|
||||
return {
|
||||
key: `${getTargetKey(platform)}-build-zig`,
|
||||
retry: getRetry(),
|
||||
label: `${getTargetLabel(platform)} - build-zig`,
|
||||
agents: getZigAgent(platform, options),
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
env: getBuildEnv(platform, options),
|
||||
command: `${getBuildCommand(platform, options)} --target bun-zig --toolchain ${toolchain}`,
|
||||
@@ -526,7 +538,6 @@ function getLinkBunStep(platform, options) {
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
env: {
|
||||
BUN_LINK_ONLY: "ON",
|
||||
ASAN_OPTIONS: "allow_user_segv_handler=1:disable_coredump=0:detect_leaks=0",
|
||||
...getBuildEnv(platform, options),
|
||||
},
|
||||
command: `${getBuildCommand(platform, options, "build-bun")} --target bun`,
|
||||
@@ -553,6 +564,7 @@ function getBuildBunStep(platform, options) {
|
||||
/**
|
||||
* @typedef {Object} TestOptions
|
||||
* @property {string} [buildId]
|
||||
* @property {boolean} [unifiedTests]
|
||||
* @property {string[]} [testFiles]
|
||||
* @property {boolean} [dryRun]
|
||||
*/
|
||||
@@ -565,13 +577,12 @@ function getBuildBunStep(platform, options) {
|
||||
*/
|
||||
function getTestBunStep(platform, options, testOptions = {}) {
|
||||
const { os, profile } = platform;
|
||||
const { buildId, testFiles } = testOptions;
|
||||
const { buildId, unifiedTests, testFiles } = testOptions;
|
||||
|
||||
const args = [`--step=${getTargetKey(platform)}-build-bun`];
|
||||
if (buildId) {
|
||||
args.push(`--build-id=${buildId}`);
|
||||
}
|
||||
|
||||
if (testFiles) {
|
||||
args.push(...testFiles.map(testFile => `--include=${testFile}`));
|
||||
}
|
||||
@@ -588,11 +599,8 @@ function getTestBunStep(platform, options, testOptions = {}) {
|
||||
agents: getTestAgent(platform, options),
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
parallelism: os === "darwin" ? 2 : 20,
|
||||
parallelism: unifiedTests ? undefined : os === "darwin" ? 2 : 10,
|
||||
timeout_in_minutes: profile === "asan" || os === "windows" ? 45 : 30,
|
||||
env: {
|
||||
ASAN_OPTIONS: "allow_user_segv_handler=1:disable_coredump=0:detect_leaks=0",
|
||||
},
|
||||
command:
|
||||
os === "windows"
|
||||
? `node .\\scripts\\runner.node.mjs ${args.join(" ")}`
|
||||
@@ -770,6 +778,8 @@ function getBenchmarkStep() {
|
||||
* @property {Platform[]} [buildPlatforms]
|
||||
* @property {Platform[]} [testPlatforms]
|
||||
* @property {string[]} [testFiles]
|
||||
* @property {boolean} [unifiedBuilds]
|
||||
* @property {boolean} [unifiedTests]
|
||||
*/
|
||||
|
||||
/**
|
||||
@@ -940,6 +950,22 @@ function getOptionsStep() {
|
||||
default: "false",
|
||||
options: booleanOptions,
|
||||
},
|
||||
{
|
||||
key: "unified-builds",
|
||||
select: "Do you want to build each platform in a single step?",
|
||||
hint: "If true, builds will not be split into separate steps (this will likely slow down the build)",
|
||||
required: false,
|
||||
default: "false",
|
||||
options: booleanOptions,
|
||||
},
|
||||
{
|
||||
key: "unified-tests",
|
||||
select: "Do you want to run tests in a single step?",
|
||||
hint: "If true, tests will not be split into separate steps (this will be very slow)",
|
||||
required: false,
|
||||
default: "false",
|
||||
options: booleanOptions,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
@@ -1005,6 +1031,8 @@ async function getPipelineOptions() {
|
||||
buildImages: parseBoolean(options["build-images"]),
|
||||
publishImages: parseBoolean(options["publish-images"]),
|
||||
testFiles: parseArray(options["test-files"]),
|
||||
unifiedBuilds: parseBoolean(options["unified-builds"]),
|
||||
unifiedTests: parseBoolean(options["unified-tests"]),
|
||||
buildPlatforms: buildPlatformKeys?.length
|
||||
? buildPlatformKeys.flatMap(key => buildProfiles.map(profile => ({ ...buildPlatformsMap.get(key), profile })))
|
||||
: Array.from(buildPlatformsMap.values()),
|
||||
@@ -1070,7 +1098,7 @@ async function getPipeline(options = {}) {
|
||||
const imagePlatforms = new Map(
|
||||
buildImages || publishImages
|
||||
? [...buildPlatforms, ...testPlatforms]
|
||||
.filter(({ os }) => os !== "darwin")
|
||||
.filter(({ os }) => os === "linux" || os === "windows")
|
||||
.map(platform => [getImageKey(platform), platform])
|
||||
: [],
|
||||
);
|
||||
@@ -1086,7 +1114,7 @@ async function getPipeline(options = {}) {
|
||||
});
|
||||
}
|
||||
|
||||
let { skipBuilds, forceBuilds, dryRun } = options;
|
||||
let { skipBuilds, forceBuilds, unifiedBuilds, dryRun } = options;
|
||||
dryRun = dryRun || !!buildImages;
|
||||
|
||||
/** @type {string | undefined} */
|
||||
@@ -1104,7 +1132,7 @@ async function getPipeline(options = {}) {
|
||||
const includeASAN = !isMainBranch();
|
||||
|
||||
if (!buildId) {
|
||||
let relevantBuildPlatforms = includeASAN
|
||||
const relevantBuildPlatforms = includeASAN
|
||||
? buildPlatforms
|
||||
: buildPlatforms.filter(({ profile }) => profile !== "asan");
|
||||
|
||||
@@ -1117,16 +1145,13 @@ async function getPipeline(options = {}) {
|
||||
dependsOn.push(`${imageKey}-build-image`);
|
||||
}
|
||||
|
||||
const steps = [];
|
||||
steps.push(getBuildCppStep(target, options));
|
||||
steps.push(getBuildZigStep(target, options));
|
||||
steps.push(getLinkBunStep(target, options));
|
||||
|
||||
return getStepWithDependsOn(
|
||||
{
|
||||
key: getTargetKey(target),
|
||||
group: getTargetLabel(target),
|
||||
steps,
|
||||
steps: unifiedBuilds
|
||||
? [getBuildBunStep(target, options)]
|
||||
: [getBuildCppStep(target, options), getBuildZigStep(target, options), getLinkBunStep(target, options)],
|
||||
},
|
||||
...dependsOn,
|
||||
);
|
||||
@@ -1135,13 +1160,13 @@ async function getPipeline(options = {}) {
|
||||
}
|
||||
|
||||
if (!isMainBranch()) {
|
||||
const { skipTests, forceTests, testFiles } = options;
|
||||
const { skipTests, forceTests, unifiedTests, testFiles } = options;
|
||||
if (!skipTests || forceTests) {
|
||||
steps.push(
|
||||
...testPlatforms.map(target => ({
|
||||
key: getTargetKey(target),
|
||||
group: getTargetLabel(target),
|
||||
steps: [getTestBunStep(target, options, { testFiles, buildId })],
|
||||
steps: [getTestBunStep(target, options, { unifiedTests, testFiles, buildId })],
|
||||
})),
|
||||
);
|
||||
}
|
||||
@@ -1184,43 +1209,6 @@ async function main() {
|
||||
console.log("Generated options:", options);
|
||||
}
|
||||
|
||||
startGroup("Querying GitHub for files...");
|
||||
if (options && isBuildkite && !isMainBranch()) {
|
||||
/** @type {string[]} */
|
||||
let allFiles = [];
|
||||
/** @type {string[]} */
|
||||
let newFiles = [];
|
||||
let prFileCount = 0;
|
||||
try {
|
||||
console.log("on buildkite: collecting new files from PR");
|
||||
const per_page = 50;
|
||||
const { BUILDKITE_PULL_REQUEST } = process.env;
|
||||
for (let i = 1; i <= 10; i++) {
|
||||
const res = await fetch(
|
||||
`https://api.github.com/repos/oven-sh/bun/pulls/${BUILDKITE_PULL_REQUEST}/files?per_page=${per_page}&page=${i}`,
|
||||
{ headers: { Authorization: `Bearer ${getSecret("GITHUB_TOKEN")}` } },
|
||||
);
|
||||
const doc = await res.json();
|
||||
console.log(`-> page ${i}, found ${doc.length} items`);
|
||||
if (doc.length === 0) break;
|
||||
for (const { filename, status } of doc) {
|
||||
prFileCount += 1;
|
||||
allFiles.push(filename);
|
||||
if (status !== "added") continue;
|
||||
newFiles.push(filename);
|
||||
}
|
||||
if (doc.length < per_page) break;
|
||||
}
|
||||
console.log(`- PR ${BUILDKITE_PULL_REQUEST}, ${prFileCount} files, ${newFiles.length} new files`);
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
if (allFiles.every(filename => filename.startsWith("docs/"))) {
|
||||
console.log(`- PR is only docs, skipping tests!`);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
startGroup("Generating pipeline...");
|
||||
const pipeline = await getPipeline(options);
|
||||
if (!pipeline) {
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
---
|
||||
allowed-tools: Bash(gh issue view:*), Bash(gh search:*), Bash(gh issue list:*), Bash(gh api:*), Bash(gh issue comment:*)
|
||||
description: Find duplicate GitHub issues
|
||||
---
|
||||
|
||||
# Issue deduplication command
|
||||
|
||||
Find up to 3 likely duplicate issues for a given GitHub issue.
|
||||
|
||||
To do this, follow these steps precisely:
|
||||
|
||||
1. Use an agent to check if the GitHub issue (a) is closed, (b) does not need to be deduped (eg. because it is broad product feedback without a specific solution, or positive feedback), or (c) already has a duplicate detection comment (check for the exact HTML marker `<!-- dedupe-bot:marker -->` in the issue comments - ignore other bot comments). If so, do not proceed.
|
||||
2. Use an agent to view a GitHub issue, and ask the agent to return a summary of the issue
|
||||
3. Then, launch 5 parallel agents to search GitHub for duplicates of this issue, using diverse keywords and search approaches, using the summary from Step 2. **IMPORTANT**: Always scope searches with `repo:owner/repo` to constrain results to the current repository only.
|
||||
4. Next, feed the results from Steps 2 and 3 into another agent, so that it can filter out false positives, that are likely not actually duplicates of the original issue. If there are no duplicates remaining, do not proceed.
|
||||
5. Finally, comment back on the issue with a list of up to three duplicate issues (or zero, if there are no likely duplicates)
|
||||
|
||||
Notes (be sure to tell this to your agents, too):
|
||||
|
||||
- Use `gh` to interact with GitHub, rather than web fetch
|
||||
- Do not use other tools, beyond `gh` (eg. don't use other MCP servers, file edit, etc.)
|
||||
- Make a todo list first
|
||||
- Always scope searches with `repo:owner/repo` to prevent cross-repo false positives
|
||||
- For your comment, follow the following format precisely (assuming for this example that you found 3 suspected duplicates):
|
||||
|
||||
---
|
||||
|
||||
Found 3 possible duplicate issues:
|
||||
|
||||
1. <link to issue>
|
||||
2. <link to issue>
|
||||
3. <link to issue>
|
||||
|
||||
This issue will be automatically closed as a duplicate in 3 days.
|
||||
|
||||
- If your issue is a duplicate, please close it and 👍 the existing issue instead
|
||||
- To prevent auto-closure, add a comment or 👎 this comment
|
||||
|
||||
🤖 Generated with [Claude Code](https://claude.ai/code)
|
||||
|
||||
<!-- dedupe-bot:marker -->
|
||||
|
||||
---
|
||||
@@ -1,88 +0,0 @@
|
||||
#!/usr/bin/env bun
|
||||
import { extname } from "path";
|
||||
import { spawnSync } from "child_process";
|
||||
|
||||
const input = await Bun.stdin.json();
|
||||
|
||||
const toolName = input.tool_name;
|
||||
const toolInput = input.tool_input || {};
|
||||
const filePath = toolInput.file_path;
|
||||
|
||||
// Only process Write, Edit, and MultiEdit tools
|
||||
if (!["Write", "Edit", "MultiEdit"].includes(toolName)) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const ext = extname(filePath);
|
||||
|
||||
// Only format known files
|
||||
if (!filePath) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
function formatZigFile() {
|
||||
try {
|
||||
// Format the Zig file
|
||||
const result = spawnSync("vendor/zig/zig.exe", ["fmt", filePath], {
|
||||
cwd: process.env.CLAUDE_PROJECT_DIR || process.cwd(),
|
||||
encoding: "utf-8",
|
||||
});
|
||||
|
||||
if (result.error) {
|
||||
console.error(`Failed to format ${filePath}: ${result.error.message}`);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
if (result.status !== 0) {
|
||||
console.error(`zig fmt failed for ${filePath}:`);
|
||||
if (result.stderr) {
|
||||
console.error(result.stderr);
|
||||
}
|
||||
process.exit(0);
|
||||
}
|
||||
} catch (error) {}
|
||||
}
|
||||
|
||||
function formatTypeScriptFile() {
|
||||
try {
|
||||
// Format the TypeScript file
|
||||
const result = spawnSync(
|
||||
"./node_modules/.bin/prettier",
|
||||
["--plugin=prettier-plugin-organize-imports", "--config", ".prettierrc", "--write", filePath],
|
||||
{
|
||||
cwd: process.env.CLAUDE_PROJECT_DIR || process.cwd(),
|
||||
encoding: "utf-8",
|
||||
},
|
||||
);
|
||||
} catch (error) {}
|
||||
}
|
||||
|
||||
if (ext === ".zig") {
|
||||
formatZigFile();
|
||||
} else if (
|
||||
[
|
||||
".cjs",
|
||||
".css",
|
||||
".html",
|
||||
".js",
|
||||
".json",
|
||||
".jsonc",
|
||||
".jsx",
|
||||
".less",
|
||||
".mjs",
|
||||
".pcss",
|
||||
".postcss",
|
||||
".sass",
|
||||
".scss",
|
||||
".styl",
|
||||
".stylus",
|
||||
".toml",
|
||||
".ts",
|
||||
".tsx",
|
||||
".yaml",
|
||||
].includes(ext)
|
||||
) {
|
||||
formatTypeScriptFile();
|
||||
}
|
||||
|
||||
process.exit(0);
|
||||
@@ -1,207 +0,0 @@
|
||||
#!/usr/bin/env bun
|
||||
import { basename, extname } from "path";
|
||||
|
||||
const input = await Bun.stdin.json();
|
||||
|
||||
const toolName = input.tool_name;
|
||||
const toolInput = input.tool_input || {};
|
||||
const command = toolInput.command || "";
|
||||
const timeout = toolInput.timeout;
|
||||
const cwd = input.cwd || "";
|
||||
|
||||
// Get environment variables from the hook context
|
||||
// Note: We check process.env directly as env vars are inherited
|
||||
let useSystemBun = process.env.USE_SYSTEM_BUN;
|
||||
|
||||
if (toolName !== "Bash" || !command) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
function denyWithReason(reason) {
|
||||
const output = {
|
||||
hookSpecificOutput: {
|
||||
hookEventName: "PreToolUse",
|
||||
permissionDecision: "deny",
|
||||
permissionDecisionReason: reason,
|
||||
},
|
||||
};
|
||||
console.log(JSON.stringify(output));
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
// Parse the command to extract argv0 and positional args
|
||||
let tokens;
|
||||
try {
|
||||
// Simple shell parsing - split on spaces but respect quotes (both single and double)
|
||||
tokens = command.match(/(?:[^\s"']+|"[^"]*"|'[^']*')+/g)?.map(t => t.replace(/^['"]|['"]$/g, "")) || [];
|
||||
} catch {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
if (tokens.length === 0) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
// Strip inline environment variable assignments (e.g., FOO=1 bun test)
|
||||
const inlineEnv = new Map();
|
||||
let commandStart = 0;
|
||||
while (
|
||||
commandStart < tokens.length &&
|
||||
/^[A-Za-z_][A-Za-z0-9_]*=/.test(tokens[commandStart]) &&
|
||||
!tokens[commandStart].includes("/")
|
||||
) {
|
||||
const [name, value = ""] = tokens[commandStart].split("=", 2);
|
||||
inlineEnv.set(name, value);
|
||||
commandStart++;
|
||||
}
|
||||
if (commandStart >= tokens.length) {
|
||||
process.exit(0);
|
||||
}
|
||||
tokens = tokens.slice(commandStart);
|
||||
useSystemBun = inlineEnv.get("USE_SYSTEM_BUN") ?? useSystemBun;
|
||||
|
||||
// Get the executable name (argv0)
|
||||
const argv0 = basename(tokens[0], extname(tokens[0]));
|
||||
|
||||
// Check if it's zig or zig.exe
|
||||
if (argv0 === "zig") {
|
||||
// Filter out flags (starting with -) to get positional arguments
|
||||
const positionalArgs = tokens.slice(1).filter(arg => !arg.startsWith("-"));
|
||||
|
||||
// Check if the positional args contain "build" followed by "obj"
|
||||
if (positionalArgs.length >= 2 && positionalArgs[0] === "build" && positionalArgs[1] === "obj") {
|
||||
denyWithReason("error: Use `bun bd` to build Bun and wait patiently");
|
||||
}
|
||||
}
|
||||
|
||||
// Check if argv0 is timeout and the command is "bun bd"
|
||||
if (argv0 === "timeout") {
|
||||
// Find the actual command after timeout and its arguments
|
||||
const timeoutArgEndIndex = tokens.slice(1).findIndex(t => !t.startsWith("-") && !/^\d/.test(t));
|
||||
if (timeoutArgEndIndex === -1) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const actualCommandIndex = timeoutArgEndIndex + 1;
|
||||
if (actualCommandIndex >= tokens.length) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const actualCommand = basename(tokens[actualCommandIndex]);
|
||||
const restArgs = tokens.slice(actualCommandIndex + 1);
|
||||
|
||||
// Check if it's "bun bd" or "bun-debug bd" without other positional args
|
||||
if (actualCommand === "bun" || actualCommand.includes("bun-debug")) {
|
||||
// Claude is a sneaky fucker
|
||||
let positionalArgs = restArgs.filter(arg => !arg.startsWith("-"));
|
||||
const redirectStderrToStdoutIndex = positionalArgs.findIndex(arg => arg === "2>&1");
|
||||
if (redirectStderrToStdoutIndex !== -1) {
|
||||
positionalArgs.splice(redirectStderrToStdoutIndex, 1);
|
||||
}
|
||||
const redirectStdoutToStderrIndex = positionalArgs.findIndex(arg => arg === "1>&2");
|
||||
if (redirectStdoutToStderrIndex !== -1) {
|
||||
positionalArgs.splice(redirectStdoutToStderrIndex, 1);
|
||||
}
|
||||
|
||||
const redirectToFileIndex = positionalArgs.findIndex(arg => arg === ">");
|
||||
if (redirectToFileIndex !== -1) {
|
||||
positionalArgs.splice(redirectToFileIndex, 2);
|
||||
}
|
||||
|
||||
const redirectToFileAppendIndex = positionalArgs.findIndex(arg => arg === ">>");
|
||||
if (redirectToFileAppendIndex !== -1) {
|
||||
positionalArgs.splice(redirectToFileAppendIndex, 2);
|
||||
}
|
||||
|
||||
const redirectTOFileInlineIndex = positionalArgs.findIndex(arg => arg.startsWith(">"));
|
||||
if (redirectTOFileInlineIndex !== -1) {
|
||||
positionalArgs.splice(redirectTOFileInlineIndex, 1);
|
||||
}
|
||||
|
||||
const pipeIndex = positionalArgs.findIndex(arg => arg === "|");
|
||||
if (pipeIndex !== -1) {
|
||||
positionalArgs = positionalArgs.slice(0, pipeIndex);
|
||||
}
|
||||
|
||||
positionalArgs = positionalArgs.map(arg => arg.trim()).filter(Boolean);
|
||||
|
||||
if (positionalArgs.length === 1 && positionalArgs[0] === "bd") {
|
||||
denyWithReason("error: Run `bun bd` without a timeout");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if command is "bun .* test" or "bun-debug test" with -u/--update-snapshots AND -t/--test-name-pattern
|
||||
if (argv0 === "bun" || argv0.includes("bun-debug")) {
|
||||
const allArgs = tokens.slice(1);
|
||||
|
||||
// Check if "test" is in positional args or "bd" followed by "test"
|
||||
const positionalArgs = allArgs.filter(arg => !arg.startsWith("-"));
|
||||
const hasTest = positionalArgs.includes("test") || (positionalArgs[0] === "bd" && positionalArgs[1] === "test");
|
||||
|
||||
if (hasTest) {
|
||||
const hasUpdateSnapshots = allArgs.some(arg => arg === "-u" || arg === "--update-snapshots");
|
||||
const hasTestNamePattern = allArgs.some(arg => arg === "-t" || arg === "--test-name-pattern");
|
||||
|
||||
if (hasUpdateSnapshots && hasTestNamePattern) {
|
||||
denyWithReason("error: Cannot use -u/--update-snapshots with -t/--test-name-pattern");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if timeout option is set for "bun bd" command
|
||||
if (timeout !== undefined && (argv0 === "bun" || argv0.includes("bun-debug"))) {
|
||||
const positionalArgs = tokens.slice(1).filter(arg => !arg.startsWith("-"));
|
||||
if (positionalArgs.length === 1 && positionalArgs[0] === "bd") {
|
||||
denyWithReason("error: Run `bun bd` without a timeout");
|
||||
}
|
||||
}
|
||||
|
||||
// Check if running "bun test <file>" without USE_SYSTEM_BUN=1
|
||||
if ((argv0 === "bun" || argv0.includes("bun-debug")) && useSystemBun !== "1") {
|
||||
const allArgs = tokens.slice(1);
|
||||
const positionalArgs = allArgs.filter(arg => !arg.startsWith("-"));
|
||||
|
||||
// Check if it's "test" (not "bd test")
|
||||
if (positionalArgs.length >= 1 && positionalArgs[0] === "test" && positionalArgs[0] !== "bd") {
|
||||
denyWithReason(
|
||||
"error: In development, use `bun bd test <file>` to test your changes. If you meant to use a release version, set USE_SYSTEM_BUN=1",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Check if running "bun bd test" from bun repo root or test folder without a file path
|
||||
if (argv0 === "bun" || argv0.includes("bun-debug")) {
|
||||
const allArgs = tokens.slice(1);
|
||||
const positionalArgs = allArgs.filter(arg => !arg.startsWith("-"));
|
||||
|
||||
// Check if it's "bd test"
|
||||
if (positionalArgs.length >= 2 && positionalArgs[0] === "bd" && positionalArgs[1] === "test") {
|
||||
// Check if cwd is the bun repo root or test folder
|
||||
const isBunRepoRoot = cwd === "/workspace/bun" || cwd.endsWith("/bun");
|
||||
const isTestFolder = cwd.endsWith("/bun/test");
|
||||
|
||||
if (isBunRepoRoot || isTestFolder) {
|
||||
// Check if there's a file path argument (looks like a path: contains / or has test extension)
|
||||
const hasFilePath = positionalArgs
|
||||
.slice(2)
|
||||
.some(
|
||||
arg =>
|
||||
arg.includes("/") ||
|
||||
arg.endsWith(".test.ts") ||
|
||||
arg.endsWith(".test.js") ||
|
||||
arg.endsWith(".test.tsx") ||
|
||||
arg.endsWith(".test.jsx"),
|
||||
);
|
||||
|
||||
if (!hasFilePath) {
|
||||
denyWithReason(
|
||||
"error: `bun bd test` from repo root or test folder will run all tests. Use `bun bd test <path>` with a specific test file.",
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Allow the command to proceed
|
||||
process.exit(0);
|
||||
@@ -1,26 +0,0 @@
|
||||
{
|
||||
"hooks": {
|
||||
"PreToolUse": [
|
||||
{
|
||||
"matcher": "Bash",
|
||||
"hooks": [
|
||||
{
|
||||
"type": "command",
|
||||
"command": "\"$CLAUDE_PROJECT_DIR\"/.claude/hooks/pre-bash-zig-build.js"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"PostToolUse": [
|
||||
{
|
||||
"matcher": "Write|Edit|MultiEdit",
|
||||
"hooks": [
|
||||
{
|
||||
"type": "command",
|
||||
"command": "\"$CLAUDE_PROJECT_DIR\"/.claude/hooks/post-edit-zig-format.js"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -1,184 +0,0 @@
|
||||
---
|
||||
name: implementing-jsc-classes-cpp
|
||||
description: Implements JavaScript classes in C++ using JavaScriptCore. Use when creating new JS classes with C++ bindings, prototypes, or constructors.
|
||||
---
|
||||
|
||||
# Implementing JavaScript Classes in C++
|
||||
|
||||
## Class Structure
|
||||
|
||||
For publicly accessible Constructor and Prototype, create 3 classes:
|
||||
|
||||
1. **`class Foo : public JSC::DestructibleObject`** - if C++ fields exist; otherwise use `JSC::constructEmptyObject` with `putDirectOffset`
|
||||
2. **`class FooPrototype : public JSC::JSNonFinalObject`**
|
||||
3. **`class FooConstructor : public JSC::InternalFunction`**
|
||||
|
||||
No public constructor? Only Prototype and class needed.
|
||||
|
||||
## Iso Subspaces
|
||||
|
||||
Classes with C++ fields need subspaces in:
|
||||
|
||||
- `src/bun.js/bindings/webcore/DOMClientIsoSubspaces.h`
|
||||
- `src/bun.js/bindings/webcore/DOMIsoSubspaces.h`
|
||||
|
||||
```cpp
|
||||
template<typename MyClassT, JSC::SubspaceAccess mode>
|
||||
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm) {
|
||||
if constexpr (mode == JSC::SubspaceAccess::Concurrently)
|
||||
return nullptr;
|
||||
return WebCore::subspaceForImpl<MyClassT, WebCore::UseCustomHeapCellType::No>(
|
||||
vm,
|
||||
[](auto& spaces) { return spaces.m_clientSubspaceForMyClassT.get(); },
|
||||
[](auto& spaces, auto&& space) { spaces.m_clientSubspaceForMyClassT = std::forward<decltype(space)>(space); },
|
||||
[](auto& spaces) { return spaces.m_subspaceForMyClassT.get(); },
|
||||
[](auto& spaces, auto&& space) { spaces.m_subspaceForMyClassT = std::forward<decltype(space)>(space); });
|
||||
}
|
||||
```
|
||||
|
||||
## Property Definitions
|
||||
|
||||
```cpp
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsFooProtoFuncMethod);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsFooGetter_property);
|
||||
|
||||
static const HashTableValue JSFooPrototypeTableValues[] = {
|
||||
{ "property"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsFooGetter_property, 0 } },
|
||||
{ "method"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsFooProtoFuncMethod, 1 } },
|
||||
};
|
||||
```
|
||||
|
||||
## Prototype Class
|
||||
|
||||
```cpp
|
||||
class JSFooPrototype final : public JSC::JSNonFinalObject {
|
||||
public:
|
||||
using Base = JSC::JSNonFinalObject;
|
||||
static constexpr unsigned StructureFlags = Base::StructureFlags;
|
||||
|
||||
static JSFooPrototype* create(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::Structure* structure) {
|
||||
JSFooPrototype* prototype = new (NotNull, allocateCell<JSFooPrototype>(vm)) JSFooPrototype(vm, structure);
|
||||
prototype->finishCreation(vm);
|
||||
return prototype;
|
||||
}
|
||||
|
||||
template<typename, JSC::SubspaceAccess>
|
||||
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm) { return &vm.plainObjectSpace(); }
|
||||
|
||||
DECLARE_INFO;
|
||||
|
||||
static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype) {
|
||||
auto* structure = JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::ObjectType, StructureFlags), info());
|
||||
structure->setMayBePrototype(true);
|
||||
return structure;
|
||||
}
|
||||
|
||||
private:
|
||||
JSFooPrototype(JSC::VM& vm, JSC::Structure* structure) : Base(vm, structure) {}
|
||||
void finishCreation(JSC::VM& vm);
|
||||
};
|
||||
|
||||
void JSFooPrototype::finishCreation(VM& vm) {
|
||||
Base::finishCreation(vm);
|
||||
reifyStaticProperties(vm, JSFoo::info(), JSFooPrototypeTableValues, *this);
|
||||
JSC_TO_STRING_TAG_WITHOUT_TRANSITION();
|
||||
}
|
||||
```
|
||||
|
||||
## Getter/Setter/Function Definitions
|
||||
|
||||
```cpp
|
||||
// Getter
|
||||
JSC_DEFINE_CUSTOM_GETTER(jsFooGetter_prop, (JSGlobalObject* globalObject, EncodedJSValue thisValue, PropertyName)) {
|
||||
VM& vm = globalObject->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
JSFoo* thisObject = jsDynamicCast<JSFoo*>(JSValue::decode(thisValue));
|
||||
if (UNLIKELY(!thisObject)) {
|
||||
Bun::throwThisTypeError(*globalObject, scope, "JSFoo"_s, "prop"_s);
|
||||
return {};
|
||||
}
|
||||
return JSValue::encode(jsBoolean(thisObject->value()));
|
||||
}
|
||||
|
||||
// Function
|
||||
JSC_DEFINE_HOST_FUNCTION(jsFooProtoFuncMethod, (JSGlobalObject* globalObject, CallFrame* callFrame)) {
|
||||
VM& vm = globalObject->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
auto* thisObject = jsDynamicCast<JSFoo*>(callFrame->thisValue());
|
||||
if (UNLIKELY(!thisObject)) {
|
||||
Bun::throwThisTypeError(*globalObject, scope, "Foo"_s, "method"_s);
|
||||
return {};
|
||||
}
|
||||
return JSValue::encode(thisObject->doSomething(vm, globalObject));
|
||||
}
|
||||
```
|
||||
|
||||
## Constructor Class
|
||||
|
||||
```cpp
|
||||
class JSFooConstructor final : public JSC::InternalFunction {
|
||||
public:
|
||||
using Base = JSC::InternalFunction;
|
||||
static constexpr unsigned StructureFlags = Base::StructureFlags;
|
||||
|
||||
static JSFooConstructor* create(JSC::VM& vm, JSC::Structure* structure, JSC::JSObject* prototype) {
|
||||
JSFooConstructor* constructor = new (NotNull, JSC::allocateCell<JSFooConstructor>(vm)) JSFooConstructor(vm, structure);
|
||||
constructor->finishCreation(vm, prototype);
|
||||
return constructor;
|
||||
}
|
||||
|
||||
DECLARE_INFO;
|
||||
|
||||
template<typename CellType, JSC::SubspaceAccess>
|
||||
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm) { return &vm.internalFunctionSpace(); }
|
||||
|
||||
static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype) {
|
||||
return JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::InternalFunctionType, StructureFlags), info());
|
||||
}
|
||||
|
||||
private:
|
||||
JSFooConstructor(JSC::VM& vm, JSC::Structure* structure) : Base(vm, structure, callFoo, constructFoo) {}
|
||||
|
||||
void finishCreation(JSC::VM& vm, JSC::JSObject* prototype) {
|
||||
Base::finishCreation(vm, 0, "Foo"_s);
|
||||
putDirectWithoutTransition(vm, vm.propertyNames->prototype, prototype, JSC::PropertyAttribute::DontEnum | JSC::PropertyAttribute::DontDelete | JSC::PropertyAttribute::ReadOnly);
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
## Structure Caching
|
||||
|
||||
Add to `ZigGlobalObject.h`:
|
||||
|
||||
```cpp
|
||||
JSC::LazyClassStructure m_JSFooClassStructure;
|
||||
```
|
||||
|
||||
Initialize in `ZigGlobalObject.cpp`:
|
||||
|
||||
```cpp
|
||||
m_JSFooClassStructure.initLater([](LazyClassStructure::Initializer& init) {
|
||||
Bun::initJSFooClassStructure(init);
|
||||
});
|
||||
```
|
||||
|
||||
Visit in `visitChildrenImpl`:
|
||||
|
||||
```cpp
|
||||
m_JSFooClassStructure.visit(visitor);
|
||||
```
|
||||
|
||||
## Expose to Zig
|
||||
|
||||
```cpp
|
||||
extern "C" JSC::EncodedJSValue Bun__JSFooConstructor(Zig::GlobalObject* globalObject) {
|
||||
return JSValue::encode(globalObject->m_JSFooClassStructure.constructor(globalObject));
|
||||
}
|
||||
|
||||
extern "C" EncodedJSValue Bun__Foo__toJS(Zig::GlobalObject* globalObject, Foo* foo) {
|
||||
auto* structure = globalObject->m_JSFooClassStructure.get(globalObject);
|
||||
return JSValue::encode(JSFoo::create(globalObject->vm(), structure, globalObject, WTFMove(foo)));
|
||||
}
|
||||
```
|
||||
|
||||
Include `#include "root.h"` at the top of C++ files.
|
||||
@@ -1,206 +0,0 @@
|
||||
---
|
||||
name: implementing-jsc-classes-zig
|
||||
description: Creates JavaScript classes using Bun's Zig bindings generator (.classes.ts). Use when implementing new JS APIs in Zig with JSC integration.
|
||||
---
|
||||
|
||||
# Bun's JavaScriptCore Class Bindings Generator
|
||||
|
||||
Bridge JavaScript and Zig through `.classes.ts` definitions and Zig implementations.
|
||||
|
||||
## Architecture
|
||||
|
||||
1. **Zig Implementation** (.zig files)
|
||||
2. **JavaScript Interface Definition** (.classes.ts files)
|
||||
3. **Generated Code** (C++/Zig files connecting them)
|
||||
|
||||
## Class Definition (.classes.ts)
|
||||
|
||||
```typescript
|
||||
define({
|
||||
name: "TextDecoder",
|
||||
constructor: true,
|
||||
JSType: "object",
|
||||
finalize: true,
|
||||
proto: {
|
||||
decode: { args: 1 },
|
||||
encoding: { getter: true, cache: true },
|
||||
fatal: { getter: true },
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
Options:
|
||||
|
||||
- `name`: Class name
|
||||
- `constructor`: Has public constructor
|
||||
- `JSType`: "object", "function", etc.
|
||||
- `finalize`: Needs cleanup
|
||||
- `proto`: Properties/methods
|
||||
- `cache`: Cache property values via WriteBarrier
|
||||
|
||||
## Zig Implementation
|
||||
|
||||
```zig
|
||||
pub const TextDecoder = struct {
|
||||
pub const js = JSC.Codegen.JSTextDecoder;
|
||||
pub const toJS = js.toJS;
|
||||
pub const fromJS = js.fromJS;
|
||||
pub const fromJSDirect = js.fromJSDirect;
|
||||
|
||||
encoding: []const u8,
|
||||
fatal: bool,
|
||||
|
||||
pub fn constructor(
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame,
|
||||
) bun.JSError!*TextDecoder {
|
||||
return bun.new(TextDecoder, .{ .encoding = "utf-8", .fatal = false });
|
||||
}
|
||||
|
||||
pub fn decode(
|
||||
this: *TextDecoder,
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame,
|
||||
) bun.JSError!JSC.JSValue {
|
||||
const args = callFrame.arguments();
|
||||
if (args.len < 1 or args.ptr[0].isUndefinedOrNull()) {
|
||||
return globalObject.throw("Input cannot be null", .{});
|
||||
}
|
||||
return JSC.JSValue.jsString(globalObject, "result");
|
||||
}
|
||||
|
||||
pub fn getEncoding(this: *TextDecoder, globalObject: *JSGlobalObject) JSC.JSValue {
|
||||
return JSC.JSValue.createStringFromUTF8(globalObject, this.encoding);
|
||||
}
|
||||
|
||||
fn deinit(this: *TextDecoder) void {
|
||||
// Release resources
|
||||
}
|
||||
|
||||
pub fn finalize(this: *TextDecoder) void {
|
||||
this.deinit();
|
||||
bun.destroy(this);
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
**Key patterns:**
|
||||
|
||||
- Use `bun.JSError!JSValue` return type for error handling
|
||||
- Use `globalObject` not `ctx`
|
||||
- `deinit()` for cleanup, `finalize()` called by GC
|
||||
- Update `src/bun.js/bindings/generated_classes_list.zig`
|
||||
|
||||
## CallFrame Access
|
||||
|
||||
```zig
|
||||
const args = callFrame.arguments();
|
||||
const first_arg = args.ptr[0]; // Access as slice
|
||||
const argCount = args.len;
|
||||
const thisValue = callFrame.thisValue();
|
||||
```
|
||||
|
||||
## Property Caching
|
||||
|
||||
For `cache: true` properties, generated accessors:
|
||||
|
||||
```zig
|
||||
// Get cached value
|
||||
pub fn encodingGetCached(thisValue: JSC.JSValue) ?JSC.JSValue {
|
||||
const result = TextDecoderPrototype__encodingGetCachedValue(thisValue);
|
||||
if (result == .zero) return null;
|
||||
return result;
|
||||
}
|
||||
|
||||
// Set cached value
|
||||
pub fn encodingSetCached(thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) void {
|
||||
TextDecoderPrototype__encodingSetCachedValue(thisValue, globalObject, value);
|
||||
}
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
```zig
|
||||
pub fn method(this: *MyClass, globalObject: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSC.JSValue {
|
||||
const args = callFrame.arguments();
|
||||
if (args.len < 1) {
|
||||
return globalObject.throw("Missing required argument", .{});
|
||||
}
|
||||
return JSC.JSValue.jsString(globalObject, "Success!");
|
||||
}
|
||||
```
|
||||
|
||||
## Memory Management
|
||||
|
||||
```zig
|
||||
pub fn deinit(this: *TextDecoder) void {
|
||||
this._encoding.deref();
|
||||
if (this.buffer) |buffer| {
|
||||
bun.default_allocator.free(buffer);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn finalize(this: *TextDecoder) void {
|
||||
JSC.markBinding(@src());
|
||||
this.deinit();
|
||||
bun.default_allocator.destroy(this);
|
||||
}
|
||||
```
|
||||
|
||||
## Creating a New Binding
|
||||
|
||||
1. Define interface in `.classes.ts`:
|
||||
|
||||
```typescript
|
||||
define({
|
||||
name: "MyClass",
|
||||
constructor: true,
|
||||
finalize: true,
|
||||
proto: {
|
||||
myMethod: { args: 1 },
|
||||
myProperty: { getter: true, cache: true },
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
2. Implement in `.zig`:
|
||||
|
||||
```zig
|
||||
pub const MyClass = struct {
|
||||
pub const js = JSC.Codegen.JSMyClass;
|
||||
pub const toJS = js.toJS;
|
||||
pub const fromJS = js.fromJS;
|
||||
|
||||
value: []const u8,
|
||||
|
||||
pub const new = bun.TrivialNew(@This());
|
||||
|
||||
pub fn constructor(globalObject: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!*MyClass {
|
||||
return MyClass.new(.{ .value = "" });
|
||||
}
|
||||
|
||||
pub fn myMethod(this: *MyClass, globalObject: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSC.JSValue {
|
||||
return JSC.JSValue.jsUndefined();
|
||||
}
|
||||
|
||||
pub fn getMyProperty(this: *MyClass, globalObject: *JSGlobalObject) JSC.JSValue {
|
||||
return JSC.JSValue.jsString(globalObject, this.value);
|
||||
}
|
||||
|
||||
pub fn deinit(this: *MyClass) void {}
|
||||
|
||||
pub fn finalize(this: *MyClass) void {
|
||||
this.deinit();
|
||||
bun.destroy(this);
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
3. Add to `src/bun.js/bindings/generated_classes_list.zig`
|
||||
|
||||
## Generated Components
|
||||
|
||||
- **C++ Classes**: `JSMyClass`, `JSMyClassPrototype`, `JSMyClassConstructor`
|
||||
- **Method Bindings**: `MyClassPrototype__myMethodCallback`
|
||||
- **Property Accessors**: `MyClassPrototype__myPropertyGetterWrap`
|
||||
- **Zig Bindings**: External function declarations, cached value accessors
|
||||
@@ -1,222 +0,0 @@
|
||||
---
|
||||
name: writing-bundler-tests
|
||||
description: Guides writing bundler tests using itBundled/expectBundled in test/bundler/. Use when creating or modifying bundler, transpiler, or code transformation tests.
|
||||
---
|
||||
|
||||
# Writing Bundler Tests
|
||||
|
||||
Bundler tests use `itBundled()` from `test/bundler/expectBundled.ts` to test Bun's bundler.
|
||||
|
||||
## Basic Usage
|
||||
|
||||
```typescript
|
||||
import { describe } from "bun:test";
|
||||
import { itBundled, dedent } from "./expectBundled";
|
||||
|
||||
describe("bundler", () => {
|
||||
itBundled("category/TestName", {
|
||||
files: {
|
||||
"index.js": `console.log("hello");`,
|
||||
},
|
||||
run: {
|
||||
stdout: "hello",
|
||||
},
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
Test ID format: `category/TestName` (e.g., `banner/CommentBanner`, `minify/Empty`)
|
||||
|
||||
## File Setup
|
||||
|
||||
```typescript
|
||||
{
|
||||
files: {
|
||||
"index.js": `console.log("test");`,
|
||||
"lib.ts": `export const foo = 123;`,
|
||||
"nested/file.js": `export default {};`,
|
||||
},
|
||||
entryPoints: ["index.js"], // defaults to first file
|
||||
runtimeFiles: { // written AFTER bundling
|
||||
"extra.js": `console.log("added later");`,
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## Bundler Options
|
||||
|
||||
```typescript
|
||||
{
|
||||
outfile: "/out.js",
|
||||
outdir: "/out",
|
||||
format: "esm" | "cjs" | "iife",
|
||||
target: "bun" | "browser" | "node",
|
||||
|
||||
// Minification
|
||||
minifyWhitespace: true,
|
||||
minifyIdentifiers: true,
|
||||
minifySyntax: true,
|
||||
|
||||
// Code manipulation
|
||||
banner: "// copyright",
|
||||
footer: "// end",
|
||||
define: { "PROD": "true" },
|
||||
external: ["lodash"],
|
||||
|
||||
// Advanced
|
||||
sourceMap: "inline" | "external",
|
||||
splitting: true,
|
||||
treeShaking: true,
|
||||
drop: ["console"],
|
||||
}
|
||||
```
|
||||
|
||||
## Runtime Verification
|
||||
|
||||
```typescript
|
||||
{
|
||||
run: {
|
||||
stdout: "expected output", // exact match
|
||||
stdout: /regex/, // pattern match
|
||||
partialStdout: "contains this", // substring
|
||||
stderr: "error output",
|
||||
exitCode: 1,
|
||||
env: { NODE_ENV: "production" },
|
||||
runtime: "bun" | "node",
|
||||
|
||||
// Runtime errors
|
||||
error: "ReferenceError: x is not defined",
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## Bundle Errors/Warnings
|
||||
|
||||
```typescript
|
||||
{
|
||||
bundleErrors: {
|
||||
"/file.js": ["error message 1", "error message 2"],
|
||||
},
|
||||
bundleWarnings: {
|
||||
"/file.js": ["warning message"],
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## Dead Code Elimination (DCE)
|
||||
|
||||
Add markers in source code:
|
||||
|
||||
```javascript
|
||||
// KEEP - this should survive
|
||||
const used = 1;
|
||||
|
||||
// REMOVE - this should be eliminated
|
||||
const unused = 2;
|
||||
```
|
||||
|
||||
```typescript
|
||||
{
|
||||
dce: true,
|
||||
dceKeepMarkerCount: 5, // expected KEEP markers
|
||||
}
|
||||
```
|
||||
|
||||
## Capture Pattern
|
||||
|
||||
Verify exact transpilation with `capture()`:
|
||||
|
||||
```typescript
|
||||
itBundled("string/Folding", {
|
||||
files: {
|
||||
"index.ts": `capture(\`\${1 + 1}\`);`,
|
||||
},
|
||||
capture: ['"2"'], // expected captured value
|
||||
minifySyntax: true,
|
||||
});
|
||||
```
|
||||
|
||||
## Post-Bundle Assertions
|
||||
|
||||
```typescript
|
||||
{
|
||||
onAfterBundle(api) {
|
||||
api.expectFile("out.js").toContain("console.log");
|
||||
api.assertFileExists("out.js");
|
||||
|
||||
const content = api.readFile("out.js");
|
||||
expect(content).toMatchSnapshot();
|
||||
|
||||
const values = api.captureFile("out.js");
|
||||
expect(values).toEqual(["2"]);
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## Common Patterns
|
||||
|
||||
**Simple output verification:**
|
||||
|
||||
```typescript
|
||||
itBundled("banner/Comment", {
|
||||
banner: "// copyright",
|
||||
files: { "a.js": `console.log("Hello")` },
|
||||
onAfterBundle(api) {
|
||||
api.expectFile("out.js").toContain("// copyright");
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
**Multi-file CJS/ESM interop:**
|
||||
|
||||
```typescript
|
||||
itBundled("cjs/ImportSyntax", {
|
||||
files: {
|
||||
"entry.js": `import lib from './lib.cjs'; console.log(lib);`,
|
||||
"lib.cjs": `exports.foo = 'bar';`,
|
||||
},
|
||||
run: { stdout: '{"foo":"bar"}' },
|
||||
});
|
||||
```
|
||||
|
||||
**Error handling:**
|
||||
|
||||
```typescript
|
||||
itBundled("edgecase/InvalidLoader", {
|
||||
files: { "index.js": `...` },
|
||||
bundleErrors: {
|
||||
"index.js": ["Unsupported loader type"],
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
## Test Organization
|
||||
|
||||
```text
|
||||
test/bundler/
|
||||
├── bundler_banner.test.ts
|
||||
├── bundler_string.test.ts
|
||||
├── bundler_minify.test.ts
|
||||
├── bundler_cjs.test.ts
|
||||
├── bundler_edgecase.test.ts
|
||||
├── bundler_splitting.test.ts
|
||||
├── css/
|
||||
├── transpiler/
|
||||
└── expectBundled.ts
|
||||
```
|
||||
|
||||
## Running Tests
|
||||
|
||||
```bash
|
||||
bun bd test test/bundler/bundler_banner.test.ts
|
||||
BUN_BUNDLER_TEST_FILTER="banner/Comment" bun bd test bundler_banner.test.ts
|
||||
BUN_BUNDLER_TEST_DEBUG=1 bun bd test bundler_minify.test.ts
|
||||
```
|
||||
|
||||
## Key Points
|
||||
|
||||
- Use `dedent` for readable multi-line code
|
||||
- File paths are relative (e.g., `/index.js`)
|
||||
- Use `capture()` to verify exact transpilation results
|
||||
- Use `.toMatchSnapshot()` for complex outputs
|
||||
- Pass array to `run` for multiple test scenarios
|
||||
@@ -1,94 +0,0 @@
|
||||
---
|
||||
name: writing-dev-server-tests
|
||||
description: Guides writing HMR/Dev Server tests in test/bake/. Use when creating or modifying dev server, hot reloading, or bundling tests.
|
||||
---
|
||||
|
||||
# Writing HMR/Dev Server Tests
|
||||
|
||||
Dev server tests validate hot-reloading robustness and reliability.
|
||||
|
||||
## File Structure
|
||||
|
||||
- `test/bake/bake-harness.ts` - shared utilities: `devTest`, `prodTest`, `devAndProductionTest`, `Dev` class, `Client` class
|
||||
- `test/bake/client-fixture.mjs` - subprocess for `Client` (page loading, IPC queries)
|
||||
- `test/bake/dev/*.test.ts` - dev server and hot reload tests
|
||||
- `test/bake/dev-and-prod.ts` - tests running on both dev and production mode
|
||||
|
||||
## Test Categories
|
||||
|
||||
- `bundle.test.ts` - DevServer-specific bundling bugs
|
||||
- `css.test.ts` - CSS bundling issues
|
||||
- `plugins.test.ts` - development mode plugins
|
||||
- `ecosystem.test.ts` - library compatibility (prefer concrete bugs over full package tests)
|
||||
- `esm.test.ts` - ESM features in development
|
||||
- `html.test.ts` - HTML file handling
|
||||
- `react-spa.test.ts` - React, react-refresh transform, server components
|
||||
- `sourcemap.test.ts` - source map correctness
|
||||
|
||||
## devTest Basics
|
||||
|
||||
```ts
|
||||
import { devTest, emptyHtmlFile } from "../bake-harness";
|
||||
|
||||
devTest("html file is watched", {
|
||||
files: {
|
||||
"index.html": emptyHtmlFile({
|
||||
scripts: ["/script.ts"],
|
||||
body: "<h1>Hello</h1>",
|
||||
}),
|
||||
"script.ts": `console.log("hello");`,
|
||||
},
|
||||
async test(dev) {
|
||||
await dev.fetch("/").expect.toInclude("<h1>Hello</h1>");
|
||||
await dev.patch("index.html", { find: "Hello", replace: "World" });
|
||||
await dev.fetch("/").expect.toInclude("<h1>World</h1>");
|
||||
|
||||
await using c = await dev.client("/");
|
||||
await c.expectMessage("hello");
|
||||
|
||||
await c.expectReload(async () => {
|
||||
await dev.patch("index.html", { find: "World", replace: "Bar" });
|
||||
});
|
||||
await c.expectMessage("hello");
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
## Key APIs
|
||||
|
||||
- **`files`**: Initial filesystem state
|
||||
- **`dev.fetch()`**: HTTP requests
|
||||
- **`dev.client()`**: Opens browser instance
|
||||
- **`dev.write/patch/delete`**: Filesystem mutations (wait for hot-reload automatically)
|
||||
- **`c.expectMessage()`**: Assert console.log output
|
||||
- **`c.expectReload()`**: Wrap code that causes hard reload
|
||||
|
||||
**Important**: Use `dev.write/patch/delete` instead of `node:fs` - they wait for hot-reload.
|
||||
|
||||
## Testing Errors
|
||||
|
||||
```ts
|
||||
devTest("import then create", {
|
||||
files: {
|
||||
"index.html": `<!DOCTYPE html><html><head></head><body><script type="module" src="/script.ts"></script></body></html>`,
|
||||
"script.ts": `import data from "./data"; console.log(data);`,
|
||||
},
|
||||
async test(dev) {
|
||||
const c = await dev.client("/", {
|
||||
errors: ['script.ts:1:18: error: Could not resolve: "./data"'],
|
||||
});
|
||||
await c.expectReload(async () => {
|
||||
await dev.write("data.ts", "export default 'data';");
|
||||
});
|
||||
await c.expectMessage("data");
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
Specify expected errors with the `errors` option:
|
||||
|
||||
```ts
|
||||
await dev.delete("other.ts", {
|
||||
errors: ['index.ts:1:16: error: Could not resolve: "./other"'],
|
||||
});
|
||||
```
|
||||
@@ -1,268 +0,0 @@
|
||||
---
|
||||
name: zig-system-calls
|
||||
description: Guides using bun.sys for system calls and file I/O in Zig. Use when implementing file operations instead of std.fs or std.posix.
|
||||
---
|
||||
|
||||
# System Calls & File I/O in Zig
|
||||
|
||||
Use `bun.sys` instead of `std.fs` or `std.posix` for cross-platform syscalls with proper error handling.
|
||||
|
||||
## bun.sys.File (Preferred)
|
||||
|
||||
For most file operations, use the `bun.sys.File` wrapper:
|
||||
|
||||
```zig
|
||||
const File = bun.sys.File;
|
||||
|
||||
const file = switch (File.open(path, bun.O.RDWR, 0o644)) {
|
||||
.result => |f| f,
|
||||
.err => |err| return .{ .err = err },
|
||||
};
|
||||
defer file.close();
|
||||
|
||||
// Read/write
|
||||
_ = try file.read(buffer).unwrap();
|
||||
_ = try file.writeAll(data).unwrap();
|
||||
|
||||
// Get file info
|
||||
const stat = try file.stat().unwrap();
|
||||
const size = try file.getEndPos().unwrap();
|
||||
|
||||
// std.io compatible
|
||||
const reader = file.reader();
|
||||
const writer = file.writer();
|
||||
```
|
||||
|
||||
### Complete Example
|
||||
|
||||
```zig
|
||||
const File = bun.sys.File;
|
||||
|
||||
pub fn writeFile(path: [:0]const u8, data: []const u8) File.WriteError!void {
|
||||
const file = switch (File.open(path, bun.O.WRONLY | bun.O.CREAT | bun.O.TRUNC, 0o664)) {
|
||||
.result => |f| f,
|
||||
.err => |err| return err.toError(),
|
||||
};
|
||||
defer file.close();
|
||||
|
||||
_ = switch (file.writeAll(data)) {
|
||||
.result => {},
|
||||
.err => |err| return err.toError(),
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
## Why bun.sys?
|
||||
|
||||
| Aspect | bun.sys | std.fs/std.posix |
|
||||
| ----------- | -------------------------------- | ------------------- |
|
||||
| Return Type | `Maybe(T)` with detailed Error | Generic error union |
|
||||
| Windows | Full support with libuv fallback | Limited/POSIX-only |
|
||||
| Error Info | errno, syscall tag, path, fd | errno only |
|
||||
| EINTR | Automatic retry | Manual handling |
|
||||
|
||||
## Error Handling with Maybe(T)
|
||||
|
||||
`bun.sys` functions return `Maybe(T)` - a tagged union:
|
||||
|
||||
```zig
|
||||
const sys = bun.sys;
|
||||
|
||||
// Pattern 1: Switch on result/error
|
||||
switch (sys.read(fd, buffer)) {
|
||||
.result => |bytes_read| {
|
||||
// use bytes_read
|
||||
},
|
||||
.err => |err| {
|
||||
// err.errno, err.syscall, err.fd, err.path
|
||||
if (err.getErrno() == .AGAIN) {
|
||||
// handle EAGAIN
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
// Pattern 2: Unwrap with try (converts to Zig error)
|
||||
const bytes = try sys.read(fd, buffer).unwrap();
|
||||
|
||||
// Pattern 3: Unwrap with default
|
||||
const value = sys.stat(path).unwrapOr(default_stat);
|
||||
```
|
||||
|
||||
## Low-Level File Operations
|
||||
|
||||
Only use these when `bun.sys.File` doesn't meet your needs.
|
||||
|
||||
### Opening Files
|
||||
|
||||
```zig
|
||||
const sys = bun.sys;
|
||||
|
||||
// Use bun.O flags (cross-platform normalized)
|
||||
const fd = switch (sys.open(path, bun.O.RDONLY, 0)) {
|
||||
.result => |fd| fd,
|
||||
.err => |err| return .{ .err = err },
|
||||
};
|
||||
defer fd.close();
|
||||
|
||||
// Common flags
|
||||
bun.O.RDONLY, bun.O.WRONLY, bun.O.RDWR
|
||||
bun.O.CREAT, bun.O.TRUNC, bun.O.APPEND
|
||||
bun.O.NONBLOCK, bun.O.DIRECTORY
|
||||
```
|
||||
|
||||
### Reading & Writing
|
||||
|
||||
```zig
|
||||
// Single read (may return less than buffer size)
|
||||
switch (sys.read(fd, buffer)) {
|
||||
.result => |n| { /* n bytes read */ },
|
||||
.err => |err| { /* handle error */ },
|
||||
}
|
||||
|
||||
// Read until EOF or buffer full
|
||||
const total = try sys.readAll(fd, buffer).unwrap();
|
||||
|
||||
// Position-based read/write
|
||||
sys.pread(fd, buffer, offset)
|
||||
sys.pwrite(fd, data, offset)
|
||||
|
||||
// Vector I/O
|
||||
sys.readv(fd, iovecs)
|
||||
sys.writev(fd, iovecs)
|
||||
```
|
||||
|
||||
### File Info
|
||||
|
||||
```zig
|
||||
sys.stat(path) // Follow symlinks
|
||||
sys.lstat(path) // Don't follow symlinks
|
||||
sys.fstat(fd) // From file descriptor
|
||||
sys.fstatat(fd, path)
|
||||
|
||||
// Linux-only: faster selective stat
|
||||
sys.statx(path, &.{ .size, .mtime })
|
||||
```
|
||||
|
||||
### Path Operations
|
||||
|
||||
```zig
|
||||
sys.unlink(path)
|
||||
sys.unlinkat(dir_fd, path)
|
||||
sys.rename(from, to)
|
||||
sys.renameat(from_dir, from, to_dir, to)
|
||||
sys.readlink(path, buf)
|
||||
sys.readlinkat(fd, path, buf)
|
||||
sys.link(T, src, dest)
|
||||
sys.linkat(src_fd, src, dest_fd, dest)
|
||||
sys.symlink(target, dest)
|
||||
sys.symlinkat(target, dirfd, dest)
|
||||
sys.mkdir(path, mode)
|
||||
sys.mkdirat(dir_fd, path, mode)
|
||||
sys.rmdir(path)
|
||||
```
|
||||
|
||||
### Permissions
|
||||
|
||||
```zig
|
||||
sys.chmod(path, mode)
|
||||
sys.fchmod(fd, mode)
|
||||
sys.fchmodat(fd, path, mode, flags)
|
||||
sys.chown(path, uid, gid)
|
||||
sys.fchown(fd, uid, gid)
|
||||
```
|
||||
|
||||
### Closing File Descriptors
|
||||
|
||||
Close is on `bun.FD`:
|
||||
|
||||
```zig
|
||||
fd.close(); // Asserts on error (use in defer)
|
||||
|
||||
// Or if you need error info:
|
||||
if (fd.closeAllowingBadFileDescriptor(null)) |err| {
|
||||
// handle error
|
||||
}
|
||||
```
|
||||
|
||||
## Directory Operations
|
||||
|
||||
```zig
|
||||
var buf: bun.PathBuffer = undefined;
|
||||
const cwd = try sys.getcwd(&buf).unwrap();
|
||||
const cwdZ = try sys.getcwdZ(&buf).unwrap(); // Zero-terminated
|
||||
sys.chdir(path, destination)
|
||||
```
|
||||
|
||||
### Directory Iteration
|
||||
|
||||
Use `bun.DirIterator` instead of `std.fs.Dir.Iterator`:
|
||||
|
||||
```zig
|
||||
var iter = bun.iterateDir(dir_fd);
|
||||
while (true) {
|
||||
switch (iter.next()) {
|
||||
.result => |entry| {
|
||||
if (entry) |e| {
|
||||
const name = e.name.slice();
|
||||
const kind = e.kind; // .file, .directory, .sym_link, etc.
|
||||
} else {
|
||||
break; // End of directory
|
||||
}
|
||||
},
|
||||
.err => |err| return .{ .err = err },
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Socket Operations
|
||||
|
||||
**Important**: `bun.sys` has limited socket support. For network I/O:
|
||||
|
||||
- **Non-blocking sockets**: Use `uws.Socket` (libuwebsockets) exclusively
|
||||
- **Pipes/blocking I/O**: Use `PipeReader.zig` and `PipeWriter.zig`
|
||||
|
||||
Available in bun.sys:
|
||||
|
||||
```zig
|
||||
sys.setsockopt(fd, level, optname, value)
|
||||
sys.socketpair(domain, socktype, protocol, nonblocking_status)
|
||||
```
|
||||
|
||||
Do NOT use `bun.sys` for socket read/write - use `uws.Socket` instead.
|
||||
|
||||
## Other Operations
|
||||
|
||||
```zig
|
||||
sys.ftruncate(fd, size)
|
||||
sys.lseek(fd, offset, whence)
|
||||
sys.dup(fd)
|
||||
sys.dupWithFlags(fd, flags)
|
||||
sys.fcntl(fd, cmd, arg)
|
||||
sys.pipe()
|
||||
sys.mmap(...)
|
||||
sys.munmap(memory)
|
||||
sys.access(path, mode)
|
||||
sys.futimens(fd, atime, mtime)
|
||||
sys.utimens(path, atime, mtime)
|
||||
```
|
||||
|
||||
## Error Type
|
||||
|
||||
```zig
|
||||
const err: bun.sys.Error = ...;
|
||||
err.errno // Raw errno value
|
||||
err.getErrno() // As std.posix.E enum
|
||||
err.syscall // Which syscall failed (Tag enum)
|
||||
err.fd // Optional: file descriptor
|
||||
err.path // Optional: path string
|
||||
```
|
||||
|
||||
## Key Points
|
||||
|
||||
- Prefer `bun.sys.File` wrapper for most file operations
|
||||
- Use low-level `bun.sys` functions only when needed
|
||||
- Use `bun.O.*` flags instead of `std.os.O.*`
|
||||
- Handle `Maybe(T)` with switch or `.unwrap()`
|
||||
- Use `defer fd.close()` for cleanup
|
||||
- EINTR is handled automatically in most functions
|
||||
- For sockets, use `uws.Socket` not `bun.sys`
|
||||
147
.coderabbit.yaml
147
.coderabbit.yaml
@@ -1,147 +0,0 @@
|
||||
language: en-US
|
||||
|
||||
issue_enrichment:
|
||||
auto_enrich:
|
||||
enabled: false
|
||||
|
||||
reviews:
|
||||
profile: assertive
|
||||
request_changes_workflow: false
|
||||
high_level_summary: false
|
||||
high_level_summary_placeholder: "@coderabbitai summary"
|
||||
high_level_summary_in_walkthrough: true
|
||||
auto_title_placeholder: "@coderabbitai"
|
||||
review_status: false
|
||||
commit_status: false
|
||||
fail_commit_status: false
|
||||
collapse_walkthrough: false
|
||||
changed_files_summary: true
|
||||
sequence_diagrams: false
|
||||
estimate_code_review_effort: false
|
||||
assess_linked_issues: true
|
||||
related_issues: true
|
||||
related_prs: true
|
||||
suggested_labels: false
|
||||
suggested_reviewers: true
|
||||
in_progress_fortune: false
|
||||
poem: false
|
||||
abort_on_close: true
|
||||
|
||||
path_filters:
|
||||
- "!test/js/node/test/"
|
||||
|
||||
auto_review:
|
||||
enabled: true
|
||||
auto_incremental_review: true
|
||||
drafts: false
|
||||
|
||||
finishing_touches:
|
||||
docstrings:
|
||||
enabled: false
|
||||
unit_tests:
|
||||
enabled: false
|
||||
|
||||
pre_merge_checks:
|
||||
docstrings:
|
||||
mode: off
|
||||
title:
|
||||
mode: warning
|
||||
description:
|
||||
mode: warning
|
||||
issue_assessment:
|
||||
mode: warning
|
||||
|
||||
tools:
|
||||
shellcheck:
|
||||
enabled: true
|
||||
ruff:
|
||||
enabled: true
|
||||
markdownlint:
|
||||
enabled: true
|
||||
github-checks:
|
||||
enabled: true
|
||||
timeout_ms: 90000
|
||||
languagetool:
|
||||
enabled: true
|
||||
enabled_only: false
|
||||
level: default
|
||||
biome:
|
||||
enabled: true
|
||||
hadolint:
|
||||
enabled: true
|
||||
swiftlint:
|
||||
enabled: true
|
||||
phpstan:
|
||||
enabled: true
|
||||
level: default
|
||||
phpmd:
|
||||
enabled: true
|
||||
phpcs:
|
||||
enabled: true
|
||||
golangci-lint:
|
||||
enabled: true
|
||||
yamllint:
|
||||
enabled: true
|
||||
gitleaks:
|
||||
enabled: true
|
||||
checkov:
|
||||
enabled: true
|
||||
detekt:
|
||||
enabled: true
|
||||
eslint:
|
||||
enabled: true
|
||||
flake8:
|
||||
enabled: true
|
||||
rubocop:
|
||||
enabled: true
|
||||
buf:
|
||||
enabled: true
|
||||
regal:
|
||||
enabled: true
|
||||
actionlint:
|
||||
enabled: true
|
||||
pmd:
|
||||
enabled: true
|
||||
clang:
|
||||
enabled: true
|
||||
cppcheck:
|
||||
enabled: true
|
||||
semgrep:
|
||||
enabled: true
|
||||
circleci:
|
||||
enabled: true
|
||||
clippy:
|
||||
enabled: true
|
||||
sqlfluff:
|
||||
enabled: true
|
||||
prismaLint:
|
||||
enabled: true
|
||||
pylint:
|
||||
enabled: true
|
||||
oxc:
|
||||
enabled: true
|
||||
shopifyThemeCheck:
|
||||
enabled: true
|
||||
luacheck:
|
||||
enabled: true
|
||||
brakeman:
|
||||
enabled: true
|
||||
dotenvLint:
|
||||
enabled: true
|
||||
htmlhint:
|
||||
enabled: true
|
||||
checkmake:
|
||||
enabled: true
|
||||
osvScanner:
|
||||
enabled: true
|
||||
|
||||
chat:
|
||||
auto_reply: true
|
||||
|
||||
knowledge_base:
|
||||
opt_out: false
|
||||
code_guidelines:
|
||||
enabled: true
|
||||
filePatterns:
|
||||
- "**/.cursor/rules/*.mdc"
|
||||
- "**/CLAUDE.md"
|
||||
41
.cursor/rules/building-bun.mdc
Normal file
41
.cursor/rules/building-bun.mdc
Normal file
@@ -0,0 +1,41 @@
|
||||
---
|
||||
description:
|
||||
globs: src/**/*.cpp,src/**/*.zig
|
||||
alwaysApply: false
|
||||
---
|
||||
|
||||
### Build Commands
|
||||
|
||||
- **Build debug version**: `bun bd` or `bun run build:debug`
|
||||
- Creates a debug build at `./build/debug/bun-debug`
|
||||
- Compilation takes ~2.5 minutes
|
||||
- **Run tests with your debug build**: `bun bd test <test-file>`
|
||||
- **CRITICAL**: Never use `bun test` directly - it won't include your changes
|
||||
- **Run any command with debug build**: `bun bd <command>`
|
||||
|
||||
### Run a file
|
||||
|
||||
To run a file, use:
|
||||
|
||||
```sh
|
||||
bun bd <file> <...args>
|
||||
```
|
||||
|
||||
**CRITICAL**: Never use `bun <file>` directly. It will not have your changes.
|
||||
|
||||
### Logging
|
||||
|
||||
`BUN_DEBUG_$(SCOPE)=1` enables debug logs for a specific debug log scope.
|
||||
|
||||
Debug logs look like this:
|
||||
|
||||
```zig
|
||||
const log = bun.Output.scoped(.${SCOPE}, false);
|
||||
|
||||
// ...later
|
||||
log("MY DEBUG LOG", .{})
|
||||
```
|
||||
|
||||
### Code Generation
|
||||
|
||||
Code generation happens automatically as part of the build process. There are no commands to run.
|
||||
139
.cursor/rules/dev-server-tests.mdc
Normal file
139
.cursor/rules/dev-server-tests.mdc
Normal file
@@ -0,0 +1,139 @@
|
||||
---
|
||||
description: Writing HMR/Dev Server tests
|
||||
globs: test/bake/*
|
||||
---
|
||||
|
||||
# Writing HMR/Dev Server tests
|
||||
|
||||
Dev server tests validate that hot-reloading is robust, correct, and reliable. Remember to write thorough, yet concise tests.
|
||||
|
||||
## File Structure
|
||||
|
||||
- `test/bake/bake-harness.ts` - shared utilities and test harness
|
||||
- primary test functions `devTest` / `prodTest` / `devAndProductionTest`
|
||||
- class `Dev` (controls subprocess for dev server)
|
||||
- class `Client` (controls a happy-dom subprocess for having the page open)
|
||||
- more helpers
|
||||
- `test/bake/client-fixture.mjs` - subprocess for what `Client` controls. it loads a page and uses IPC to query parts of the page, run javascript, and much more.
|
||||
- `test/bake/dev/*.test.ts` - these call `devTest` to test dev server and hot reloading
|
||||
- `test/bake/dev-and-prod.ts` - these use `devAndProductionTest` to run the same test on dev and production mode. these tests cannot really test hot reloading for obvious reasons.
|
||||
|
||||
## Categories
|
||||
|
||||
bundle.test.ts - Bundle tests are tests concerning bundling bugs that only occur in DevServer.
|
||||
css.test.ts - CSS tests concern bundling bugs with CSS files
|
||||
plugins.test.ts - Plugin tests concern plugins in development mode.
|
||||
ecosystem.test.ts - These tests involve ensuring certain libraries are correct. It is preferred to test more concrete bugs than testing entire packages.
|
||||
esm.test.ts - ESM tests are about various esm features in development mode.
|
||||
html.test.ts - HTML tests are tests relating to HTML files themselves.
|
||||
react-spa.test.ts - Tests relating to React, our react-refresh transform, and basic server component transforms.
|
||||
sourcemap.test.ts - Tests verifying source-maps are correct.
|
||||
|
||||
## `devTest` Basics
|
||||
|
||||
A test takes in two primary inputs: `files` and `async test(dev) {`
|
||||
|
||||
```ts
|
||||
import { devTest, emptyHtmlFile } from "../bake-harness";
|
||||
|
||||
devTest("html file is watched", {
|
||||
files: {
|
||||
"index.html": emptyHtmlFile({
|
||||
scripts: ["/script.ts"],
|
||||
body: "<h1>Hello</h1>",
|
||||
}),
|
||||
"script.ts": `
|
||||
console.log("hello");
|
||||
`,
|
||||
},
|
||||
async test(dev) {
|
||||
await dev.fetch("/").expect.toInclude("<h1>Hello</h1>");
|
||||
await dev.fetch("/").expect.toInclude("<h1>Hello</h1>");
|
||||
await dev.patch("index.html", {
|
||||
find: "Hello",
|
||||
replace: "World",
|
||||
});
|
||||
await dev.fetch("/").expect.toInclude("<h1>World</h1>");
|
||||
|
||||
// Works
|
||||
await using c = await dev.client("/");
|
||||
await c.expectMessage("hello");
|
||||
|
||||
// Editing HTML reloads
|
||||
await c.expectReload(async () => {
|
||||
await dev.patch("index.html", {
|
||||
find: "World",
|
||||
replace: "Hello",
|
||||
});
|
||||
await dev.fetch("/").expect.toInclude("<h1>Hello</h1>");
|
||||
});
|
||||
await c.expectMessage("hello");
|
||||
|
||||
await c.expectReload(async () => {
|
||||
await dev.patch("index.html", {
|
||||
find: "Hello",
|
||||
replace: "Bar",
|
||||
});
|
||||
await dev.fetch("/").expect.toInclude("<h1>Bar</h1>");
|
||||
});
|
||||
await c.expectMessage("hello");
|
||||
|
||||
await c.expectReload(async () => {
|
||||
await dev.patch("script.ts", {
|
||||
find: "hello",
|
||||
replace: "world",
|
||||
});
|
||||
});
|
||||
await c.expectMessage("world");
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
`files` holds the initial state, and the callback runs with the server running. `dev.fetch()` runs HTTP requests, while `dev.client()` opens a browser instance to the code.
|
||||
|
||||
Functions `dev.write` and `dev.patch` and `dev.delete` mutate the filesystem. Do not use `node:fs` APIs, as the dev server ones are hooked to wait for hot-reload, and all connected clients to receive changes.
|
||||
|
||||
When a change performs a hard-reload, that must be explicitly annotated with `expectReload`. This tells `client-fixture.mjs` that the test is meant to reload the page once; All other hard reloads automatically fail the test.
|
||||
|
||||
Client's have `console.log` instrumented, so that any unasserted logs fail the test. This makes it more obvious when an extra reload or re-evaluation. Messages are awaited via `c.expectMessage("log")` or with multiple arguments if there are multiple logs.
|
||||
|
||||
## Testing for bundling errors
|
||||
|
||||
By default, a client opening a page to an error will fail the test. This makes testing errors explicit.
|
||||
|
||||
```ts
|
||||
devTest("import then create", {
|
||||
files: {
|
||||
"index.html": `
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head></head>
|
||||
<body>
|
||||
<script type="module" src="/script.ts"></script>
|
||||
</body>
|
||||
</html>
|
||||
`,
|
||||
"script.ts": `
|
||||
import data from "./data";
|
||||
console.log(data);
|
||||
`,
|
||||
},
|
||||
async test(dev) {
|
||||
const c = await dev.client("/", {
|
||||
errors: ['script.ts:1:18: error: Could not resolve: "./data"'],
|
||||
});
|
||||
await c.expectReload(async () => {
|
||||
await dev.write("data.ts", "export default 'data';");
|
||||
});
|
||||
await c.expectMessage("data");
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
Many functions take an options value to allow specifying it will produce errors. For example, this delete is going to cause a resolution failure.
|
||||
|
||||
```ts
|
||||
await dev.delete("other.ts", {
|
||||
errors: ['index.ts:1:16: error: Could not resolve: "./other"'],
|
||||
});
|
||||
```
|
||||
413
.cursor/rules/javascriptcore-class.mdc
Normal file
413
.cursor/rules/javascriptcore-class.mdc
Normal file
@@ -0,0 +1,413 @@
|
||||
---
|
||||
description: JavaScript class implemented in C++
|
||||
globs: *.cpp
|
||||
alwaysApply: false
|
||||
---
|
||||
|
||||
# Implementing JavaScript classes in C++
|
||||
|
||||
If there is a publicly accessible Constructor and Prototype, then there are 3 classes:
|
||||
|
||||
- IF there are C++ class members we need a destructor, so `class Foo : public JSC::DestructibleObject`, if no C++ class fields (only JS properties) then we don't need a class at all usually. We can instead use JSC::constructEmptyObject(vm, structure) and `putDirectOffset` like in [NodeFSStatBinding.cpp](mdc:src/bun.js/bindings/NodeFSStatBinding.cpp).
|
||||
- class FooPrototype : public JSC::JSNonFinalObject
|
||||
- class FooConstructor : public JSC::InternalFunction
|
||||
|
||||
If there is no publicly accessible Constructor, just the Prototype and the class is necessary. In some cases, we can avoid the prototype entirely (but that's rare).
|
||||
|
||||
If there are C++ fields on the Foo class, the Foo class will need an iso subspace added to [DOMClientIsoSubspaces.h](mdc:src/bun.js/bindings/webcore/DOMClientIsoSubspaces.h) and [DOMIsoSubspaces.h](mdc:src/bun.js/bindings/webcore/DOMIsoSubspaces.h). Prototype and Constructor do not need subspaces.
|
||||
|
||||
Usually you'll need to #include "root.h" at the top of C++ files or you'll get lint errors.
|
||||
|
||||
Generally, defining the subspace looks like this:
|
||||
|
||||
```c++
|
||||
|
||||
class Foo : public JSC::DestructibleObject {
|
||||
|
||||
// ...
|
||||
|
||||
template<typename MyClassT, JSC::SubspaceAccess mode>
|
||||
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
|
||||
{
|
||||
if constexpr (mode == JSC::SubspaceAccess::Concurrently)
|
||||
return nullptr;
|
||||
return WebCore::subspaceForImpl<MyClassT, WebCore::UseCustomHeapCellType::No>(
|
||||
vm,
|
||||
[](auto& spaces) { return spaces.m_clientSubspaceFor${MyClassT}.get(); },
|
||||
[](auto& spaces, auto&& space) { spaces.m_clientSubspaceFor${MyClassT} = std::forward<decltype(space)>(space); },
|
||||
[](auto& spaces) { return spaces.m_subspaceFo${MyClassT}.get(); },
|
||||
[](auto& spaces, auto&& space) { spaces.m_subspaceFor${MyClassT} = std::forward<decltype(space)>(space); });
|
||||
}
|
||||
|
||||
|
||||
```
|
||||
|
||||
It's better to put it in the .cpp file instead of the .h file, when possible.
|
||||
|
||||
## Defining properties
|
||||
|
||||
Define properties on the prototype. Use a const HashTableValues like this:
|
||||
|
||||
```C++
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckEmail);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckHost);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckIP);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckIssued);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckPrivateKey);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncToJSON);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncToLegacyObject);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncToString);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncVerify);
|
||||
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_ca);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_fingerprint);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_fingerprint256);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_fingerprint512);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_subject);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_subjectAltName);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_infoAccess);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_keyUsage);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_issuer);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_issuerCertificate);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_publicKey);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_raw);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_serialNumber);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validFrom);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validTo);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validFromDate);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validToDate);
|
||||
|
||||
static const HashTableValue JSX509CertificatePrototypeTableValues[] = {
|
||||
{ "ca"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_ca, 0 } },
|
||||
{ "checkEmail"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckEmail, 2 } },
|
||||
{ "checkHost"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckHost, 2 } },
|
||||
{ "checkIP"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckIP, 1 } },
|
||||
{ "checkIssued"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckIssued, 1 } },
|
||||
{ "checkPrivateKey"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckPrivateKey, 1 } },
|
||||
{ "fingerprint"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_fingerprint, 0 } },
|
||||
{ "fingerprint256"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_fingerprint256, 0 } },
|
||||
{ "fingerprint512"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_fingerprint512, 0 } },
|
||||
{ "infoAccess"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_infoAccess, 0 } },
|
||||
{ "issuer"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_issuer, 0 } },
|
||||
{ "issuerCertificate"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_issuerCertificate, 0 } },
|
||||
{ "keyUsage"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_keyUsage, 0 } },
|
||||
{ "publicKey"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_publicKey, 0 } },
|
||||
{ "raw"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_raw, 0 } },
|
||||
{ "serialNumber"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_serialNumber, 0 } },
|
||||
{ "subject"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_subject, 0 } },
|
||||
{ "subjectAltName"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_subjectAltName, 0 } },
|
||||
{ "toJSON"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncToJSON, 0 } },
|
||||
{ "toLegacyObject"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncToLegacyObject, 0 } },
|
||||
{ "toString"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncToString, 0 } },
|
||||
{ "validFrom"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validFrom, 0 } },
|
||||
{ "validFromDate"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessorOrValue), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validFromDate, 0 } },
|
||||
{ "validTo"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validTo, 0 } },
|
||||
{ "validToDate"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessorOrValue), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validToDate, 0 } },
|
||||
{ "verify"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncVerify, 1 } },
|
||||
};
|
||||
```
|
||||
|
||||
### Creating a prototype class
|
||||
|
||||
Follow a pattern like this:
|
||||
|
||||
```c++
|
||||
class JSX509CertificatePrototype final : public JSC::JSNonFinalObject {
|
||||
public:
|
||||
using Base = JSC::JSNonFinalObject;
|
||||
static constexpr unsigned StructureFlags = Base::StructureFlags;
|
||||
|
||||
static JSX509CertificatePrototype* create(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::Structure* structure)
|
||||
{
|
||||
JSX509CertificatePrototype* prototype = new (NotNull, allocateCell<JSX509CertificatePrototype>(vm)) JSX509CertificatePrototype(vm, structure);
|
||||
prototype->finishCreation(vm);
|
||||
return prototype;
|
||||
}
|
||||
|
||||
template<typename, JSC::SubspaceAccess>
|
||||
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
|
||||
{
|
||||
return &vm.plainObjectSpace();
|
||||
}
|
||||
|
||||
DECLARE_INFO;
|
||||
|
||||
static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype)
|
||||
{
|
||||
auto* structure = JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::ObjectType, StructureFlags), info());
|
||||
structure->setMayBePrototype(true);
|
||||
return structure;
|
||||
}
|
||||
|
||||
private:
|
||||
JSX509CertificatePrototype(JSC::VM& vm, JSC::Structure* structure)
|
||||
: Base(vm, structure)
|
||||
{
|
||||
}
|
||||
|
||||
void finishCreation(JSC::VM& vm);
|
||||
};
|
||||
|
||||
const ClassInfo JSX509CertificatePrototype::s_info = { "X509Certificate"_s, &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(JSX509CertificatePrototype) };
|
||||
|
||||
void JSX509CertificatePrototype::finishCreation(VM& vm)
|
||||
{
|
||||
Base::finishCreation(vm);
|
||||
reifyStaticProperties(vm, JSX509Certificate::info(), JSX509CertificatePrototypeTableValues, *this);
|
||||
JSC_TO_STRING_TAG_WITHOUT_TRANSITION();
|
||||
}
|
||||
|
||||
} // namespace Bun
|
||||
```
|
||||
|
||||
### Getter definition:
|
||||
|
||||
```C++
|
||||
|
||||
JSC_DEFINE_CUSTOM_GETTER(jsX509CertificateGetter_ca, (JSGlobalObject * globalObject, EncodedJSValue thisValue, PropertyName))
|
||||
{
|
||||
VM& vm = globalObject->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
|
||||
JSX509Certificate* thisObject = jsDynamicCast<JSX509Certificate*>(JSValue::decode(thisValue));
|
||||
if (UNLIKELY(!thisObject)) {
|
||||
Bun::throwThisTypeError(*globalObject, scope, "JSX509Certificate"_s, "ca"_s);
|
||||
return {};
|
||||
}
|
||||
|
||||
return JSValue::encode(jsBoolean(thisObject->view().isCA()));
|
||||
}
|
||||
```
|
||||
|
||||
### Setter definition
|
||||
|
||||
```C++
|
||||
JSC_DEFINE_CUSTOM_SETTER(jsImportMetaObjectSetter_require, (JSGlobalObject * jsGlobalObject, JSC::EncodedJSValue thisValue, JSC::EncodedJSValue encodedValue, PropertyName propertyName))
|
||||
{
|
||||
ImportMetaObject* thisObject = jsDynamicCast<ImportMetaObject*>(JSValue::decode(thisValue));
|
||||
if (UNLIKELY(!thisObject))
|
||||
return false;
|
||||
|
||||
JSValue value = JSValue::decode(encodedValue);
|
||||
if (!value.isCell()) {
|
||||
// TODO:
|
||||
return true;
|
||||
}
|
||||
|
||||
thisObject->requireProperty.set(thisObject->vm(), thisObject, value.asCell());
|
||||
return true;
|
||||
}
|
||||
```
|
||||
|
||||
### Function definition
|
||||
|
||||
```C++
|
||||
JSC_DEFINE_HOST_FUNCTION(jsX509CertificateProtoFuncToJSON, (JSGlobalObject * globalObject, CallFrame* callFrame))
|
||||
{
|
||||
VM& vm = globalObject->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
auto *thisObject = jsDynamicCast<MyClassT*>(callFrame->thisValue());
|
||||
if (UNLIKELY(!thisObject)) {
|
||||
Bun::throwThisTypeError(*globalObject, scope, "MyClass"_s, "myFunctionName"_s);
|
||||
return {};
|
||||
}
|
||||
|
||||
return JSValue::encode(functionThatReturnsJSValue(vm, globalObject, thisObject));
|
||||
}
|
||||
```
|
||||
|
||||
### Constructor definition
|
||||
|
||||
```C++
|
||||
|
||||
JSC_DECLARE_HOST_FUNCTION(callStats);
|
||||
JSC_DECLARE_HOST_FUNCTION(constructStats);
|
||||
|
||||
class JSStatsConstructor final : public JSC::InternalFunction {
|
||||
public:
|
||||
using Base = JSC::InternalFunction;
|
||||
static constexpr unsigned StructureFlags = Base::StructureFlags;
|
||||
|
||||
static JSStatsConstructor* create(JSC::VM& vm, JSC::Structure* structure, JSC::JSObject* prototype)
|
||||
{
|
||||
JSStatsConstructor* constructor = new (NotNull, JSC::allocateCell<JSStatsConstructor>(vm)) JSStatsConstructor(vm, structure);
|
||||
constructor->finishCreation(vm, prototype);
|
||||
return constructor;
|
||||
}
|
||||
|
||||
DECLARE_INFO;
|
||||
|
||||
template<typename CellType, JSC::SubspaceAccess>
|
||||
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
|
||||
{
|
||||
return &vm.internalFunctionSpace();
|
||||
}
|
||||
|
||||
static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype)
|
||||
{
|
||||
return JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::InternalFunctionType, StructureFlags), info());
|
||||
}
|
||||
|
||||
private:
|
||||
JSStatsConstructor(JSC::VM& vm, JSC::Structure* structure)
|
||||
: Base(vm, structure, callStats, constructStats)
|
||||
{
|
||||
}
|
||||
|
||||
void finishCreation(JSC::VM& vm, JSC::JSObject* prototype)
|
||||
{
|
||||
Base::finishCreation(vm, 0, "Stats"_s);
|
||||
putDirectWithoutTransition(vm, vm.propertyNames->prototype, prototype, JSC::PropertyAttribute::DontEnum | JSC::PropertyAttribute::DontDelete | JSC::PropertyAttribute::ReadOnly);
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
### Structure caching
|
||||
|
||||
If there's a class, prototype, and constructor:
|
||||
|
||||
1. Add the `JSC::LazyClassStructure` to [ZigGlobalObject.h](mdc:src/bun.js/bindings/ZigGlobalObject.h)
|
||||
2. Initialize the class structure in [ZigGlobalObject.cpp](mdc:src/bun.js/bindings/ZigGlobalObject.cpp) in `void GlobalObject::finishCreation(VM& vm)`
|
||||
3. Visit the class structure in visitChildren in [ZigGlobalObject.cpp](mdc:src/bun.js/bindings/ZigGlobalObject.cpp) in `void GlobalObject::visitChildrenImpl`
|
||||
|
||||
```c++#ZigGlobalObject.cpp
|
||||
void GlobalObject::finishCreation(VM& vm) {
|
||||
// ...
|
||||
m_JSStatsBigIntClassStructure.initLater(
|
||||
[](LazyClassStructure::Initializer& init) {
|
||||
// Call the function to initialize our class structure.
|
||||
Bun::initJSBigIntStatsClassStructure(init);
|
||||
});
|
||||
```
|
||||
|
||||
Then, implement the function that creates the structure:
|
||||
|
||||
```c++
|
||||
void setupX509CertificateClassStructure(LazyClassStructure::Initializer& init)
|
||||
{
|
||||
auto* prototypeStructure = JSX509CertificatePrototype::createStructure(init.vm, init.global, init.global->objectPrototype());
|
||||
auto* prototype = JSX509CertificatePrototype::create(init.vm, init.global, prototypeStructure);
|
||||
|
||||
auto* constructorStructure = JSX509CertificateConstructor::createStructure(init.vm, init.global, init.global->functionPrototype());
|
||||
|
||||
auto* constructor = JSX509CertificateConstructor::create(init.vm, init.global, constructorStructure, prototype);
|
||||
|
||||
auto* structure = JSX509Certificate::createStructure(init.vm, init.global, prototype);
|
||||
init.setPrototype(prototype);
|
||||
init.setStructure(structure);
|
||||
init.setConstructor(constructor);
|
||||
}
|
||||
```
|
||||
|
||||
If there's only a class, use `JSC::LazyProperty<JSGlobalObject, Structure>` instead of `JSC::LazyClassStructure`:
|
||||
|
||||
1. Add the `JSC::LazyProperty<JSGlobalObject, Structure>` to @ZigGlobalObject.h
|
||||
2. Initialize the class structure in @ZigGlobalObject.cpp in `void GlobalObject::finishCreation(VM& vm)`
|
||||
3. Visit the lazy property in visitChildren in @ZigGlobalObject.cpp in `void GlobalObject::visitChildrenImpl`
|
||||
void GlobalObject::finishCreation(VM& vm) {
|
||||
// ...
|
||||
this.m_myLazyProperty.initLater([](const JSC::LazyProperty<JSC::JSGlobalObject, JSC::Structure>::Initializer& init) {
|
||||
init.set(Bun::initMyStructure(init.vm, reinterpret_cast<Zig::GlobalObject\*>(init.owner)));
|
||||
});
|
||||
|
||||
```
|
||||
|
||||
Then, implement the function that creates the structure:
|
||||
```c++
|
||||
Structure* setupX509CertificateStructure(JSC::VM &vm, Zig::GlobalObject* globalObject)
|
||||
{
|
||||
// If there is a prototype:
|
||||
auto* prototypeStructure = JSX509CertificatePrototype::createStructure(init.vm, init.global, init.global->objectPrototype());
|
||||
auto* prototype = JSX509CertificatePrototype::create(init.vm, init.global, prototypeStructure);
|
||||
|
||||
// If there is no prototype or it only has
|
||||
|
||||
auto* structure = JSX509Certificate::createStructure(init.vm, init.global, prototype);
|
||||
init.setPrototype(prototype);
|
||||
init.setStructure(structure);
|
||||
init.setConstructor(constructor);
|
||||
}
|
||||
```
|
||||
|
||||
Then, use the structure by calling `globalObject.m_myStructureName.get(globalObject)`
|
||||
|
||||
```C++
|
||||
JSC_DEFINE_HOST_FUNCTION(x509CertificateConstructorConstruct, (JSGlobalObject * globalObject, CallFrame* callFrame))
|
||||
{
|
||||
VM& vm = globalObject->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
|
||||
if (!callFrame->argumentCount()) {
|
||||
Bun::throwError(globalObject, scope, ErrorCode::ERR_MISSING_ARGS, "X509Certificate constructor requires at least one argument"_s);
|
||||
return {};
|
||||
}
|
||||
|
||||
JSValue arg = callFrame->uncheckedArgument(0);
|
||||
if (!arg.isCell()) {
|
||||
Bun::throwError(globalObject, scope, ErrorCode::ERR_INVALID_ARG_TYPE, "X509Certificate constructor argument must be a Buffer, TypedArray, or string"_s);
|
||||
return {};
|
||||
}
|
||||
|
||||
auto* zigGlobalObject = defaultGlobalObject(globalObject);
|
||||
Structure* structure = zigGlobalObject->m_JSX509CertificateClassStructure.get(zigGlobalObject);
|
||||
JSValue newTarget = callFrame->newTarget();
|
||||
if (UNLIKELY(zigGlobalObject->m_JSX509CertificateClassStructure.constructor(zigGlobalObject) != newTarget)) {
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
if (!newTarget) {
|
||||
throwTypeError(globalObject, scope, "Class constructor X509Certificate cannot be invoked without 'new'"_s);
|
||||
return {};
|
||||
}
|
||||
|
||||
auto* functionGlobalObject = defaultGlobalObject(getFunctionRealm(globalObject, newTarget.getObject()));
|
||||
RETURN_IF_EXCEPTION(scope, {});
|
||||
structure = InternalFunction::createSubclassStructure(globalObject, newTarget.getObject(), functionGlobalObject->NodeVMScriptStructure());
|
||||
RETURN_IF_EXCEPTION(scope, {});
|
||||
}
|
||||
|
||||
return JSValue::encode(createX509Certificate(vm, globalObject, structure, arg));
|
||||
}
|
||||
```
|
||||
|
||||
### Expose to Zig
|
||||
|
||||
To expose the constructor to zig:
|
||||
|
||||
```c++
|
||||
extern "C" JSC::EncodedJSValue Bun__JSBigIntStatsObjectConstructor(Zig::GlobalObject* globalobject)
|
||||
{
|
||||
return JSValue::encode(globalobject->m_JSStatsBigIntClassStructure.constructor(globalobject));
|
||||
}
|
||||
```
|
||||
|
||||
Zig:
|
||||
|
||||
```zig
|
||||
extern "c" fn Bun__JSBigIntStatsObjectConstructor(*JSC.JSGlobalObject) JSC.JSValue;
|
||||
pub const getBigIntStatsConstructor = Bun__JSBigIntStatsObjectConstructor;
|
||||
```
|
||||
|
||||
To create an object (instance) of a JS class defined in C++ from Zig, follow the \_\_toJS convention like this:
|
||||
|
||||
```c++
|
||||
// X509* is whatever we need to create the object
|
||||
extern "C" EncodedJSValue Bun__X509__toJS(Zig::GlobalObject* globalObject, X509* cert)
|
||||
{
|
||||
// ... implementation details
|
||||
auto* structure = globalObject->m_JSX509CertificateClassStructure.get(globalObject);
|
||||
return JSValue::encode(JSX509Certificate::create(globalObject->vm(), structure, globalObject, WTFMove(cert)));
|
||||
}
|
||||
```
|
||||
|
||||
And from Zig:
|
||||
|
||||
```zig
|
||||
const X509 = opaque {
|
||||
// ... class
|
||||
|
||||
extern fn Bun__X509__toJS(*JSC.JSGlobalObject, *X509) JSC.JSValue;
|
||||
|
||||
pub fn toJS(this: *X509, globalObject: *JSC.JSGlobalObject) JSC.JSValue {
|
||||
return Bun__X509__toJS(globalObject, this);
|
||||
}
|
||||
};
|
||||
```
|
||||
203
.cursor/rules/registering-bun-modules.mdc
Normal file
203
.cursor/rules/registering-bun-modules.mdc
Normal file
@@ -0,0 +1,203 @@
|
||||
# Registering Functions, Objects, and Modules in Bun
|
||||
|
||||
This guide documents the process of adding new functionality to the Bun global object and runtime.
|
||||
|
||||
## Overview
|
||||
|
||||
Bun's architecture exposes functionality to JavaScript through a set of carefully registered functions, objects, and modules. Most core functionality is implemented in Zig, with JavaScript bindings that make these features accessible to users.
|
||||
|
||||
There are several key ways to expose functionality in Bun:
|
||||
|
||||
1. **Global Functions**: Direct methods on the `Bun` object (e.g., `Bun.serve()`)
|
||||
2. **Getter Properties**: Lazily initialized properties on the `Bun` object (e.g., `Bun.sqlite`)
|
||||
3. **Constructor Classes**: Classes available through the `Bun` object (e.g., `Bun.ValkeyClient`)
|
||||
4. **Global Modules**: Modules that can be imported directly (e.g., `import {X} from "bun:*"`)
|
||||
|
||||
## The Registration Process
|
||||
|
||||
Adding new functionality to Bun involves several coordinated steps across multiple files:
|
||||
|
||||
### 1. Implement the Core Functionality in Zig
|
||||
|
||||
First, implement your feature in Zig, typically in its own directory in `src/`. Examples:
|
||||
|
||||
- `src/valkey/` for Redis/Valkey client
|
||||
- `src/semver/` for SemVer functionality
|
||||
- `src/smtp/` for SMTP client
|
||||
|
||||
### 2. Create JavaScript Bindings
|
||||
|
||||
Create bindings that expose your Zig functionality to JavaScript:
|
||||
|
||||
- Create a class definition file (e.g., `js_bindings.classes.ts`) to define the JavaScript interface
|
||||
- Implement `JSYourFeature` struct in a file like `js_your_feature.zig`
|
||||
|
||||
Example from a class definition file:
|
||||
|
||||
```typescript
|
||||
// Example from a .classes.ts file
|
||||
import { define } from "../../codegen/class-definitions";
|
||||
|
||||
export default [
|
||||
define({
|
||||
name: "YourFeature",
|
||||
construct: true,
|
||||
finalize: true,
|
||||
hasPendingActivity: true,
|
||||
memoryCost: true,
|
||||
klass: {},
|
||||
JSType: "0b11101110",
|
||||
proto: {
|
||||
yourMethod: {
|
||||
fn: "yourZigMethod",
|
||||
length: 1,
|
||||
},
|
||||
property: {
|
||||
getter: "getProperty",
|
||||
},
|
||||
},
|
||||
values: ["cachedValues"],
|
||||
}),
|
||||
];
|
||||
```
|
||||
|
||||
### 3. Register with BunObject in `src/bun.js/bindings/BunObject+exports.h`
|
||||
|
||||
Add an entry to the `FOR_EACH_GETTER` macro:
|
||||
|
||||
```c
|
||||
// In BunObject+exports.h
|
||||
#define FOR_EACH_GETTER(macro) \
|
||||
macro(CSRF) \
|
||||
macro(CryptoHasher) \
|
||||
... \
|
||||
macro(YourFeature) \
|
||||
```
|
||||
|
||||
### 4. Create a Getter Function in `src/bun.js/api/BunObject.zig`
|
||||
|
||||
Implement a getter function in `BunObject.zig` that returns your feature:
|
||||
|
||||
```zig
|
||||
// In BunObject.zig
|
||||
pub const YourFeature = toJSGetter(Bun.getYourFeatureConstructor);
|
||||
|
||||
// In the exportAll() function:
|
||||
@export(&BunObject.YourFeature, .{ .name = getterName("YourFeature") });
|
||||
```
|
||||
|
||||
### 5. Implement the Getter Function in a Relevant Zig File
|
||||
|
||||
Implement the function that creates your object:
|
||||
|
||||
```zig
|
||||
// In your main module file (e.g., src/your_feature/your_feature.zig)
|
||||
pub fn getYourFeatureConstructor(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.JSValue {
|
||||
return JSC.API.YourFeature.getConstructor(globalThis);
|
||||
}
|
||||
```
|
||||
|
||||
### 6. Add to Build System
|
||||
|
||||
Ensure your files are included in the build system by adding them to the appropriate targets.
|
||||
|
||||
## Example: Adding a New Module
|
||||
|
||||
Here's a comprehensive example of adding a hypothetical SMTP module:
|
||||
|
||||
1. Create implementation files in `src/smtp/`:
|
||||
|
||||
- `index.zig`: Main entry point that exports everything
|
||||
- `SmtpClient.zig`: Core SMTP client implementation
|
||||
- `js_smtp.zig`: JavaScript bindings
|
||||
- `js_bindings.classes.ts`: Class definition
|
||||
|
||||
2. Define your JS class in `js_bindings.classes.ts`:
|
||||
|
||||
```typescript
|
||||
import { define } from "../../codegen/class-definitions";
|
||||
|
||||
export default [
|
||||
define({
|
||||
name: "EmailClient",
|
||||
construct: true,
|
||||
finalize: true,
|
||||
hasPendingActivity: true,
|
||||
configurable: false,
|
||||
memoryCost: true,
|
||||
klass: {},
|
||||
JSType: "0b11101110",
|
||||
proto: {
|
||||
send: {
|
||||
fn: "send",
|
||||
length: 1,
|
||||
},
|
||||
verify: {
|
||||
fn: "verify",
|
||||
length: 0,
|
||||
},
|
||||
close: {
|
||||
fn: "close",
|
||||
length: 0,
|
||||
},
|
||||
},
|
||||
values: ["connectionPromise"],
|
||||
}),
|
||||
];
|
||||
```
|
||||
|
||||
3. Add getter to `BunObject+exports.h`:
|
||||
|
||||
```c
|
||||
#define FOR_EACH_GETTER(macro) \
|
||||
macro(CSRF) \
|
||||
... \
|
||||
macro(SMTP) \
|
||||
```
|
||||
|
||||
4. Add getter function to `BunObject.zig`:
|
||||
|
||||
```zig
|
||||
pub const SMTP = toJSGetter(Bun.getSmtpConstructor);
|
||||
|
||||
// In exportAll:
|
||||
@export(&BunObject.SMTP, .{ .name = getterName("SMTP") });
|
||||
```
|
||||
|
||||
5. Implement getter in your module:
|
||||
|
||||
```zig
|
||||
pub fn getSmtpConstructor(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.JSValue {
|
||||
return JSC.API.JSEmailClient.getConstructor(globalThis);
|
||||
}
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Follow Naming Conventions**: Align your naming with existing patterns
|
||||
2. **Reference Existing Modules**: Study similar modules like Valkey or S3Client for guidance
|
||||
3. **Memory Management**: Be careful with memory management and reference counting
|
||||
4. **Error Handling**: Use `bun.JSError!JSValue` for proper error propagation
|
||||
5. **Documentation**: Add JSDoc comments to your JavaScript bindings
|
||||
6. **Testing**: Add tests for your new functionality
|
||||
|
||||
## Common Gotchas
|
||||
|
||||
- Be sure to handle reference counting properly with `ref()`/`deref()`
|
||||
- Always implement proper cleanup in `deinit()` and `finalize()`
|
||||
- For network operations, manage socket lifetimes correctly
|
||||
- Use `JSC.Codegen` correctly to generate necessary binding code
|
||||
|
||||
## Related Files
|
||||
|
||||
- `src/bun.js/bindings/BunObject+exports.h`: Registration of getters and functions
|
||||
- `src/bun.js/api/BunObject.zig`: Implementation of getters and object creation
|
||||
- `src/bun.js/api/BunObject.classes.ts`: Class definitions
|
||||
- `.cursor/rules/zig-javascriptcore-classes.mdc`: More details on class bindings
|
||||
|
||||
## Additional Resources
|
||||
|
||||
For more detailed information on specific topics:
|
||||
|
||||
- See `zig-javascriptcore-classes.mdc` for details on creating JS class bindings
|
||||
- Review existing modules like `valkey`, `sqlite`, or `s3` for real-world examples
|
||||
91
.cursor/rules/writing-tests.mdc
Normal file
91
.cursor/rules/writing-tests.mdc
Normal file
@@ -0,0 +1,91 @@
|
||||
---
|
||||
description: Writing tests for Bun
|
||||
globs:
|
||||
---
|
||||
# Writing tests for Bun
|
||||
|
||||
## Where tests are found
|
||||
|
||||
You'll find all of Bun's tests in the `test/` directory.
|
||||
|
||||
* `test/`
|
||||
* `cli/` - CLI command tests, like `bun install` or `bun init`
|
||||
* `js/` - JavaScript & TypeScript tests
|
||||
* `bun/` - `Bun` APIs tests, separated by category, for example: `glob/` for `Bun.Glob` tests
|
||||
* `node/` - Node.js module tests, separated by module, for example: `assert/` for `node:assert` tests
|
||||
* `test/` - Vendored Node.js tests, taken from the Node.js repository (does not conform to Bun's test style)
|
||||
* `web/` - Web API tests, separated by category, for example: `fetch/` for `Request` and `Response` tests
|
||||
* `third_party/` - npm package tests, to validate that basic usage works in Bun
|
||||
* `napi/` - N-API tests
|
||||
* `v8/` - V8 C++ API tests
|
||||
* `bundler/` - Bundler, transpiler, CSS, and `bun build` tests
|
||||
* `regression/issue/[number]` - Regression tests, always make one when fixing a particular issue
|
||||
|
||||
## How tests are written
|
||||
|
||||
Bun's tests are written as JavaScript and TypeScript files with the Jest-style APIs, like `test`, `describe`, and `expect`. They are tested using Bun's own test runner, `bun test`.
|
||||
|
||||
```js
|
||||
import { describe, test, expect } from "bun:test";
|
||||
import assert, { AssertionError } from "assert";
|
||||
|
||||
describe("assert(expr)", () => {
|
||||
test.each([true, 1, "foo"])(`assert(%p) does not throw`, expr => {
|
||||
expect(() => assert(expr)).not.toThrow();
|
||||
});
|
||||
|
||||
test.each([false, 0, "", null, undefined])(`assert(%p) throws`, expr => {
|
||||
expect(() => assert(expr)).toThrow(AssertionError);
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Testing conventions
|
||||
|
||||
* See `test/harness.ts` for common test utilities and helpers
|
||||
* Be rigorous and test for edge-cases and unexpected inputs
|
||||
* Use data-driven tests, e.g. `test.each`, to reduce boilerplate when possible
|
||||
* When you need to test Bun as a CLI, use the following pattern:
|
||||
|
||||
```js
|
||||
import { test, expect } from "bun:test";
|
||||
import { spawn } from "bun";
|
||||
import { bunExe, bunEnv } from "harness";
|
||||
|
||||
test("bun --version", async () => {
|
||||
const { exited, stdout: stdoutStream, stderr: stderrStream } = spawn({
|
||||
cmd: [bunExe(), "--version"],
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
const [ exitCode, stdout, stderr ] = await Promise.all([
|
||||
exited,
|
||||
new Response(stdoutStream).text(),
|
||||
new Response(stderrStream).text(),
|
||||
]);
|
||||
expect({ exitCode, stdout, stderr }).toMatchObject({
|
||||
exitCode: 0,
|
||||
stdout: expect.stringContaining(Bun.version),
|
||||
stderr: "",
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Before writing a test
|
||||
|
||||
* If you are fixing a bug, write the test first and make sure it fails (as expected) with the canary version of Bun
|
||||
* If you are fixing a Node.js compatibility bug, create a throw-away snippet of code and test that it works as you expect in Node.js, then that it fails (as expected) with the canary version of Bun
|
||||
* When the expected behaviour is ambigious, defer to matching what happens in Node.js
|
||||
* Always attempt to find related tests in an existing test file before creating a new test file
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
509
.cursor/rules/zig-javascriptcore-classes.mdc
Normal file
509
.cursor/rules/zig-javascriptcore-classes.mdc
Normal file
@@ -0,0 +1,509 @@
|
||||
---
|
||||
description: How Zig works with JavaScriptCore bindings generator
|
||||
globs:
|
||||
alwaysApply: false
|
||||
---
|
||||
|
||||
# Bun's JavaScriptCore Class Bindings Generator
|
||||
|
||||
This document explains how Bun's class bindings generator works to bridge Zig and JavaScript code through JavaScriptCore (JSC).
|
||||
|
||||
## Architecture Overview
|
||||
|
||||
Bun's binding system creates a seamless bridge between JavaScript and Zig, allowing Zig implementations to be exposed as JavaScript classes. The system has several key components:
|
||||
|
||||
1. **Zig Implementation** (.zig files)
|
||||
2. **JavaScript Interface Definition** (.classes.ts files)
|
||||
3. **Generated Code** (C++/Zig files that connect everything)
|
||||
|
||||
## Class Definition Files
|
||||
|
||||
### JavaScript Interface (.classes.ts)
|
||||
|
||||
The `.classes.ts` files define the JavaScript API using a declarative approach:
|
||||
|
||||
```typescript
|
||||
// Example: encoding.classes.ts
|
||||
define({
|
||||
name: "TextDecoder",
|
||||
constructor: true,
|
||||
JSType: "object",
|
||||
finalize: true,
|
||||
proto: {
|
||||
decode: {
|
||||
// Function definition
|
||||
args: 1,
|
||||
},
|
||||
encoding: {
|
||||
// Getter with caching
|
||||
getter: true,
|
||||
cache: true,
|
||||
},
|
||||
fatal: {
|
||||
// Read-only property
|
||||
getter: true,
|
||||
},
|
||||
ignoreBOM: {
|
||||
// Read-only property
|
||||
getter: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
Each class definition specifies:
|
||||
|
||||
- The class name
|
||||
- Whether it has a constructor
|
||||
- JavaScript type (object, function, etc.)
|
||||
- Properties and methods in the `proto` field
|
||||
- Caching strategy for properties
|
||||
- Finalization requirements
|
||||
|
||||
### Zig Implementation (.zig)
|
||||
|
||||
The Zig files implement the native functionality:
|
||||
|
||||
```zig
|
||||
// Example: TextDecoder.zig
|
||||
pub const TextDecoder = struct {
|
||||
// Expose generated bindings as `js` namespace with trait conversion methods
|
||||
pub const js = JSC.Codegen.JSTextDecoder;
|
||||
pub const toJS = js.toJS;
|
||||
pub const fromJS = js.fromJS;
|
||||
pub const fromJSDirect = js.fromJSDirect;
|
||||
|
||||
// Internal state
|
||||
encoding: []const u8,
|
||||
fatal: bool,
|
||||
ignoreBOM: bool,
|
||||
|
||||
// Constructor implementation - note use of globalObject
|
||||
pub fn constructor(
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame,
|
||||
) bun.JSError!*TextDecoder {
|
||||
// Implementation
|
||||
|
||||
return bun.new(TextDecoder, .{
|
||||
// Fields
|
||||
});
|
||||
}
|
||||
|
||||
// Prototype methods - note return type includes JSError
|
||||
pub fn decode(
|
||||
this: *TextDecoder,
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame,
|
||||
) bun.JSError!JSC.JSValue {
|
||||
// Implementation
|
||||
}
|
||||
|
||||
// Getters
|
||||
pub fn getEncoding(this: *TextDecoder, globalObject: *JSGlobalObject) JSC.JSValue {
|
||||
return JSC.JSValue.createStringFromUTF8(globalObject, this.encoding);
|
||||
}
|
||||
|
||||
pub fn getFatal(this: *TextDecoder, globalObject: *JSGlobalObject) JSC.JSValue {
|
||||
return JSC.JSValue.jsBoolean(this.fatal);
|
||||
}
|
||||
|
||||
// Cleanup - note standard pattern of using deinit/deref
|
||||
fn deinit(this: *TextDecoder) void {
|
||||
// Release any retained resources
|
||||
// Free the pointer at the end.
|
||||
bun.destroy(this);
|
||||
}
|
||||
|
||||
// Finalize - called by JS garbage collector. This should call deinit, or deref if reference counted.
|
||||
pub fn finalize(this: *TextDecoder) void {
|
||||
this.deinit();
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
Key components in the Zig file:
|
||||
|
||||
- The struct containing native state
|
||||
- `pub const js = JSC.Codegen.JS<ClassName>` to include generated code
|
||||
- Constructor and methods using `bun.JSError!JSValue` return type for proper error handling
|
||||
- Consistent use of `globalObject` parameter name instead of `ctx`
|
||||
- Methods matching the JavaScript interface
|
||||
- Getters/setters for properties
|
||||
- Proper resource cleanup pattern with `deinit()` and `finalize()`
|
||||
- Update `src/bun.js/bindings/generated_classes_list.zig` to include the new class
|
||||
|
||||
## Code Generation System
|
||||
|
||||
The binding generator produces C++ code that connects JavaScript and Zig:
|
||||
|
||||
1. **JSC Class Structure**: Creates C++ classes for the JS object, prototype, and constructor
|
||||
2. **Memory Management**: Handles GC integration through JSC's WriteBarrier
|
||||
3. **Method Binding**: Connects JS function calls to Zig implementations
|
||||
4. **Type Conversion**: Converts between JS values and Zig types
|
||||
5. **Property Caching**: Implements the caching system for properties
|
||||
|
||||
The generated C++ code includes:
|
||||
|
||||
- A JSC wrapper class (`JSTextDecoder`)
|
||||
- A prototype class (`JSTextDecoderPrototype`)
|
||||
- A constructor function (`JSTextDecoderConstructor`)
|
||||
- Function bindings (`TextDecoderPrototype__decodeCallback`)
|
||||
- Property getters/setters (`TextDecoderPrototype__encodingGetterWrap`)
|
||||
|
||||
## CallFrame Access
|
||||
|
||||
The `CallFrame` object provides access to JavaScript execution context:
|
||||
|
||||
```zig
|
||||
pub fn decode(
|
||||
this: *TextDecoder,
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame
|
||||
) bun.JSError!JSC.JSValue {
|
||||
// Get arguments
|
||||
const input = callFrame.argument(0);
|
||||
const options = callFrame.argument(1);
|
||||
|
||||
// Get this value
|
||||
const thisValue = callFrame.thisValue();
|
||||
|
||||
// Implementation with error handling
|
||||
if (input.isUndefinedOrNull()) {
|
||||
return globalObject.throw("Input cannot be null or undefined", .{});
|
||||
}
|
||||
|
||||
// Return value or throw error
|
||||
return JSC.JSValue.jsString(globalObject, "result");
|
||||
}
|
||||
```
|
||||
|
||||
CallFrame methods include:
|
||||
|
||||
- `argument(i)`: Get the i-th argument
|
||||
- `argumentCount()`: Get the number of arguments
|
||||
- `thisValue()`: Get the `this` value
|
||||
- `callee()`: Get the function being called
|
||||
|
||||
## Property Caching and GC-Owned Values
|
||||
|
||||
The `cache: true` option in property definitions enables JSC's WriteBarrier to efficiently store values:
|
||||
|
||||
```typescript
|
||||
encoding: {
|
||||
getter: true,
|
||||
cache: true, // Enable caching
|
||||
}
|
||||
```
|
||||
|
||||
### C++ Implementation
|
||||
|
||||
In the generated C++ code, caching uses JSC's WriteBarrier:
|
||||
|
||||
```cpp
|
||||
JSC_DEFINE_CUSTOM_GETTER(TextDecoderPrototype__encodingGetterWrap, (...)) {
|
||||
auto& vm = JSC::getVM(lexicalGlobalObject);
|
||||
Zig::GlobalObject *globalObject = reinterpret_cast<Zig::GlobalObject*>(lexicalGlobalObject);
|
||||
auto throwScope = DECLARE_THROW_SCOPE(vm);
|
||||
JSTextDecoder* thisObject = jsCast<JSTextDecoder*>(JSValue::decode(encodedThisValue));
|
||||
JSC::EnsureStillAliveScope thisArg = JSC::EnsureStillAliveScope(thisObject);
|
||||
|
||||
// Check for cached value and return if present
|
||||
if (JSValue cachedValue = thisObject->m_encoding.get())
|
||||
return JSValue::encode(cachedValue);
|
||||
|
||||
// Get value from Zig implementation
|
||||
JSC::JSValue result = JSC::JSValue::decode(
|
||||
TextDecoderPrototype__getEncoding(thisObject->wrapped(), globalObject)
|
||||
);
|
||||
RETURN_IF_EXCEPTION(throwScope, {});
|
||||
|
||||
// Store in cache for future access
|
||||
thisObject->m_encoding.set(vm, thisObject, result);
|
||||
RELEASE_AND_RETURN(throwScope, JSValue::encode(result));
|
||||
}
|
||||
```
|
||||
|
||||
### Zig Accessor Functions
|
||||
|
||||
For each cached property, the generator creates Zig accessor functions that allow Zig code to work with these GC-owned values:
|
||||
|
||||
```zig
|
||||
// External function declarations
|
||||
extern fn TextDecoderPrototype__encodingSetCachedValue(JSC.JSValue, *JSC.JSGlobalObject, JSC.JSValue) callconv(JSC.conv) void;
|
||||
extern fn TextDecoderPrototype__encodingGetCachedValue(JSC.JSValue) callconv(JSC.conv) JSC.JSValue;
|
||||
|
||||
/// `TextDecoder.encoding` setter
|
||||
/// This value will be visited by the garbage collector.
|
||||
pub fn encodingSetCached(thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) void {
|
||||
JSC.markBinding(@src());
|
||||
TextDecoderPrototype__encodingSetCachedValue(thisValue, globalObject, value);
|
||||
}
|
||||
|
||||
/// `TextDecoder.encoding` getter
|
||||
/// This value will be visited by the garbage collector.
|
||||
pub fn encodingGetCached(thisValue: JSC.JSValue) ?JSC.JSValue {
|
||||
JSC.markBinding(@src());
|
||||
const result = TextDecoderPrototype__encodingGetCachedValue(thisValue);
|
||||
if (result == .zero)
|
||||
return null;
|
||||
|
||||
return result;
|
||||
}
|
||||
```
|
||||
|
||||
### Benefits of GC-Owned Values
|
||||
|
||||
This system provides several key benefits:
|
||||
|
||||
1. **Automatic Memory Management**: The JavaScriptCore GC tracks and manages these values
|
||||
2. **Proper Garbage Collection**: The WriteBarrier ensures values are properly visited during GC
|
||||
3. **Consistent Access**: Zig code can easily get/set these cached JS values
|
||||
4. **Performance**: Cached values avoid repeated computation or serialization
|
||||
|
||||
### Use Cases
|
||||
|
||||
GC-owned cached values are particularly useful for:
|
||||
|
||||
1. **Computed Properties**: Store expensive computation results
|
||||
2. **Lazily Created Objects**: Create objects only when needed, then cache them
|
||||
3. **References to Other Objects**: Store references to other JS objects that need GC tracking
|
||||
4. **Memoization**: Cache results based on input parameters
|
||||
|
||||
The WriteBarrier mechanism ensures that any JS values stored in this way are properly tracked by the garbage collector.
|
||||
|
||||
## Memory Management and Finalization
|
||||
|
||||
The binding system handles memory management across the JavaScript/Zig boundary:
|
||||
|
||||
1. **Object Creation**: JavaScript `new TextDecoder()` creates both a JS wrapper and a Zig struct
|
||||
2. **Reference Tracking**: JSC's GC tracks all JS references to the object
|
||||
3. **Finalization**: When the JS object is collected, the finalizer releases Zig resources
|
||||
|
||||
Bun uses a consistent pattern for resource cleanup:
|
||||
|
||||
```zig
|
||||
// Resource cleanup method - separate from finalization
|
||||
pub fn deinit(this: *TextDecoder) void {
|
||||
// Release resources like strings
|
||||
this._encoding.deref(); // String deref pattern
|
||||
|
||||
// Free any buffers
|
||||
if (this.buffer) |buffer| {
|
||||
bun.default_allocator.free(buffer);
|
||||
}
|
||||
}
|
||||
|
||||
// Called by the GC when object is collected
|
||||
pub fn finalize(this: *TextDecoder) void {
|
||||
JSC.markBinding(@src()); // For debugging
|
||||
this.deinit(); // Clean up resources
|
||||
bun.default_allocator.destroy(this); // Free the object itself
|
||||
}
|
||||
```
|
||||
|
||||
Some objects that hold references to other JS objects use `.deref()` instead:
|
||||
|
||||
```zig
|
||||
pub fn finalize(this: *SocketAddress) void {
|
||||
JSC.markBinding(@src());
|
||||
this._presentation.deref(); // Release references
|
||||
this.destroy();
|
||||
}
|
||||
```
|
||||
|
||||
## Error Handling with JSError
|
||||
|
||||
Bun uses `bun.JSError!JSValue` return type for proper error handling:
|
||||
|
||||
```zig
|
||||
pub fn decode(
|
||||
this: *TextDecoder,
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame
|
||||
) bun.JSError!JSC.JSValue {
|
||||
// Throwing an error
|
||||
if (callFrame.argumentCount() < 1) {
|
||||
return globalObject.throw("Missing required argument", .{});
|
||||
}
|
||||
|
||||
// Or returning a success value
|
||||
return JSC.JSValue.jsString(globalObject, "Success!");
|
||||
}
|
||||
```
|
||||
|
||||
This pattern allows Zig functions to:
|
||||
|
||||
1. Return JavaScript values on success
|
||||
2. Throw JavaScript exceptions on error
|
||||
3. Propagate errors automatically through the call stack
|
||||
|
||||
## Type Safety and Error Handling
|
||||
|
||||
The binding system includes robust error handling:
|
||||
|
||||
```cpp
|
||||
// Example of type checking in generated code
|
||||
JSTextDecoder* thisObject = jsDynamicCast<JSTextDecoder*>(callFrame->thisValue());
|
||||
if (UNLIKELY(!thisObject)) {
|
||||
scope.throwException(lexicalGlobalObject,
|
||||
Bun::createInvalidThisError(lexicalGlobalObject, callFrame->thisValue(), "TextDecoder"_s));
|
||||
return {};
|
||||
}
|
||||
```
|
||||
|
||||
## Prototypal Inheritance
|
||||
|
||||
The binding system creates proper JavaScript prototype chains:
|
||||
|
||||
1. **Constructor**: JSTextDecoderConstructor with standard .prototype property
|
||||
2. **Prototype**: JSTextDecoderPrototype with methods and properties
|
||||
3. **Instances**: Each JSTextDecoder instance with **proto** pointing to prototype
|
||||
|
||||
This ensures JavaScript inheritance works as expected:
|
||||
|
||||
```cpp
|
||||
// From generated code
|
||||
void JSTextDecoderConstructor::finishCreation(VM& vm, JSC::JSGlobalObject* globalObject, JSTextDecoderPrototype* prototype)
|
||||
{
|
||||
Base::finishCreation(vm, 0, "TextDecoder"_s, PropertyAdditionMode::WithoutStructureTransition);
|
||||
|
||||
// Set up the prototype chain
|
||||
putDirectWithoutTransition(vm, vm.propertyNames->prototype, prototype, PropertyAttribute::DontEnum | PropertyAttribute::DontDelete | PropertyAttribute::ReadOnly);
|
||||
ASSERT(inherits(info()));
|
||||
}
|
||||
```
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
The binding system is optimized for performance:
|
||||
|
||||
1. **Direct Pointer Access**: JavaScript objects maintain a direct pointer to Zig objects
|
||||
2. **Property Caching**: WriteBarrier caching avoids repeated native calls for stable properties
|
||||
3. **Memory Management**: JSC garbage collection integrated with Zig memory management
|
||||
4. **Type Conversion**: Fast paths for common JavaScript/Zig type conversions
|
||||
|
||||
## Creating a New Class Binding
|
||||
|
||||
To create a new class binding in Bun:
|
||||
|
||||
1. **Define the class interface** in a `.classes.ts` file:
|
||||
|
||||
```typescript
|
||||
define({
|
||||
name: "MyClass",
|
||||
constructor: true,
|
||||
finalize: true,
|
||||
proto: {
|
||||
myMethod: {
|
||||
args: 1,
|
||||
},
|
||||
myProperty: {
|
||||
getter: true,
|
||||
cache: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
2. **Implement the native functionality** in a `.zig` file:
|
||||
|
||||
```zig
|
||||
pub const MyClass = struct {
|
||||
// Generated bindings
|
||||
pub const js = JSC.Codegen.JSMyClass;
|
||||
pub const toJS = js.toJS;
|
||||
pub const fromJS = js.fromJS;
|
||||
pub const fromJSDirect = js.fromJSDirect;
|
||||
|
||||
// State
|
||||
value: []const u8,
|
||||
|
||||
pub const new = bun.TrivialNew(@This());
|
||||
|
||||
// Constructor
|
||||
pub fn constructor(
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame,
|
||||
) bun.JSError!*MyClass {
|
||||
const arg = callFrame.argument(0);
|
||||
// Implementation
|
||||
}
|
||||
|
||||
// Method
|
||||
pub fn myMethod(
|
||||
this: *MyClass,
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame,
|
||||
) bun.JSError!JSC.JSValue {
|
||||
// Implementation
|
||||
}
|
||||
|
||||
// Getter
|
||||
pub fn getMyProperty(this: *MyClass, globalObject: *JSGlobalObject) JSC.JSValue {
|
||||
return JSC.JSValue.jsString(globalObject, this.value);
|
||||
}
|
||||
|
||||
// Resource cleanup
|
||||
pub fn deinit(this: *MyClass) void {
|
||||
// Clean up resources
|
||||
}
|
||||
|
||||
pub fn finalize(this: *MyClass) void {
|
||||
this.deinit();
|
||||
bun.destroy(this);
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
3. **The binding generator** creates all necessary C++ and Zig glue code to connect JavaScript and Zig, including:
|
||||
- C++ class definitions
|
||||
- Method and property bindings
|
||||
- Memory management utilities
|
||||
- GC integration code
|
||||
|
||||
## Generated Code Structure
|
||||
|
||||
The binding generator produces several components:
|
||||
|
||||
### 1. C++ Classes
|
||||
|
||||
For each Zig class, the system generates:
|
||||
|
||||
- **JS<Class>**: Main wrapper that holds a pointer to the Zig object (`JSTextDecoder`)
|
||||
- **JS<Class>Prototype**: Contains methods and properties (`JSTextDecoderPrototype`)
|
||||
- **JS<Class>Constructor**: Implementation of the JavaScript constructor (`JSTextDecoderConstructor`)
|
||||
|
||||
### 2. C++ Methods and Properties
|
||||
|
||||
- **Method Callbacks**: `TextDecoderPrototype__decodeCallback`
|
||||
- **Property Getters/Setters**: `TextDecoderPrototype__encodingGetterWrap`
|
||||
- **Initialization Functions**: `finishCreation` methods for setting up the class
|
||||
|
||||
### 3. Zig Bindings
|
||||
|
||||
- **External Function Declarations**:
|
||||
|
||||
```zig
|
||||
extern fn TextDecoderPrototype__decode(*TextDecoder, *JSC.JSGlobalObject, *JSC.CallFrame) callconv(JSC.conv) JSC.EncodedJSValue;
|
||||
```
|
||||
|
||||
- **Cached Value Accessors**:
|
||||
|
||||
```zig
|
||||
pub fn encodingGetCached(thisValue: JSC.JSValue) ?JSC.JSValue { ... }
|
||||
pub fn encodingSetCached(thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) void { ... }
|
||||
```
|
||||
|
||||
- **Constructor Helpers**:
|
||||
```zig
|
||||
pub fn create(globalObject: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue { ... }
|
||||
```
|
||||
|
||||
### 4. GC Integration
|
||||
|
||||
- **Memory Cost Calculation**: `estimatedSize` method
|
||||
- **Child Visitor Methods**: `visitChildrenImpl` and `visitAdditionalChildren`
|
||||
- **Heap Analysis**: `analyzeHeap` for debugging memory issues
|
||||
|
||||
This architecture makes it possible to implement high-performance native functionality in Zig while exposing a clean, idiomatic JavaScript API to users.
|
||||
1
.gitattributes
vendored
1
.gitattributes
vendored
@@ -16,7 +16,6 @@
|
||||
*.map text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.md text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.mdc text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.mdx text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.mjs text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.mts text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
|
||||
|
||||
4
.github/CODEOWNERS
vendored
4
.github/CODEOWNERS
vendored
@@ -3,7 +3,3 @@
|
||||
|
||||
# Tests
|
||||
/test/expectations.txt @Jarred-Sumner
|
||||
|
||||
# Types
|
||||
*.d.ts @alii
|
||||
/packages/bun-types/ @alii
|
||||
|
||||
2
.github/actions/bump/action.yml
vendored
2
.github/actions/bump/action.yml
vendored
@@ -25,7 +25,7 @@ runs:
|
||||
echo "version=$LATEST" >> $GITHUB_OUTPUT
|
||||
echo "message=$MESSAGE" >> $GITHUB_OUTPUT
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
add-paths: |
|
||||
CMakeLists.txt
|
||||
|
||||
19
.github/workflows/auto-assign-types.yml
vendored
19
.github/workflows/auto-assign-types.yml
vendored
@@ -1,19 +0,0 @@
|
||||
name: Auto Assign Types Issues
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [labeled]
|
||||
|
||||
jobs:
|
||||
auto-assign:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.label.name == 'types'
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
- name: Assign to alii
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GH_REPO: ${{ github.repository }}
|
||||
run: |
|
||||
gh issue edit ${{ github.event.issue.number }} --add-assignee alii
|
||||
29
.github/workflows/auto-close-duplicates.yml
vendored
29
.github/workflows/auto-close-duplicates.yml
vendored
@@ -1,29 +0,0 @@
|
||||
name: Auto-close duplicate issues
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 9 * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
auto-close-duplicates:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
concurrency:
|
||||
group: auto-close-duplicates-${{ github.repository }}
|
||||
cancel-in-progress: true
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
|
||||
- name: Auto-close duplicate issues
|
||||
run: bun run scripts/auto-close-duplicates.ts
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||
34
.github/workflows/claude-dedupe-issues.yml
vendored
34
.github/workflows/claude-dedupe-issues.yml
vendored
@@ -1,34 +0,0 @@
|
||||
name: Claude Issue Dedupe
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
issue_number:
|
||||
description: 'Issue number to process for duplicate detection'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
claude-dedupe-issues:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
concurrency:
|
||||
group: claude-dedupe-issues-${{ github.event.issue.number || inputs.issue_number }}
|
||||
cancel-in-progress: true
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Run Claude Code slash command
|
||||
uses: anthropics/claude-code-base-action@beta
|
||||
with:
|
||||
prompt: "/dedupe ${{ github.repository }}/issues/${{ github.event.issue.number || inputs.issue_number }}"
|
||||
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
claude_args: "--model claude-sonnet-4-5-20250929"
|
||||
claude_env: |
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
67
.github/workflows/claude.yml
vendored
Normal file
67
.github/workflows/claude.yml
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
name: Claude Code
|
||||
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
pull_request_review_comment:
|
||||
types: [created]
|
||||
issues:
|
||||
types: [opened, assigned]
|
||||
pull_request_review:
|
||||
types: [submitted]
|
||||
|
||||
jobs:
|
||||
claude:
|
||||
if: |
|
||||
github.repository == 'oven-sh/bun' &&
|
||||
(
|
||||
(github.event_name == 'issue_comment' && (github.event.comment.author_association == 'MEMBER' || github.event.comment.author_association == 'OWNER' || github.event.comment.author_association == 'COLLABORATOR')) ||
|
||||
(github.event_name == 'pull_request_review_comment' && (github.event.comment.author_association == 'MEMBER' || github.event.comment.author_association == 'OWNER' || github.event.comment.author_association == 'COLLABORATOR')) ||
|
||||
(github.event_name == 'pull_request_review' && (github.event.review.author_association == 'MEMBER' || github.event.review.author_association == 'OWNER' || github.event.review.author_association == 'COLLABORATOR')) ||
|
||||
(github.event_name == 'issues' && (github.event.issue.author_association == 'MEMBER' || github.event.issue.author_association == 'OWNER' || github.event.issue.author_association == 'COLLABORATOR'))
|
||||
) &&
|
||||
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
|
||||
(github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude')))
|
||||
runs-on: claude
|
||||
env:
|
||||
IS_SANDBOX: 1
|
||||
container:
|
||||
image: localhost:5000/claude-bun:latest
|
||||
options: --privileged --user 1000:1000
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
working-directory: /workspace/bun
|
||||
run: |
|
||||
git config --global user.email "claude-bot@bun.sh" && \
|
||||
git config --global user.name "Claude Bot" && \
|
||||
git config --global url."git@github.com:".insteadOf "https://github.com/" && \
|
||||
git config --global url."git@github.com:".insteadOf "http://github.com/" && \
|
||||
git config --global --add safe.directory /workspace/bun && \
|
||||
git config --global push.default current && \
|
||||
git config --global pull.rebase true && \
|
||||
git config --global init.defaultBranch main && \
|
||||
git config --global core.editor "vim" && \
|
||||
git config --global color.ui auto && \
|
||||
git config --global fetch.prune true && \
|
||||
git config --global diff.colorMoved zebra && \
|
||||
git config --global merge.conflictStyle diff3 && \
|
||||
git config --global rerere.enabled true && \
|
||||
git config --global core.autocrlf input
|
||||
git fetch origin ${{ github.event.pull_request.head.sha }}
|
||||
git checkout ${{ github.event.pull_request.head.ref }}
|
||||
git reset --hard origin/${{ github.event.pull_request.head.ref }}
|
||||
- name: Run Claude Code
|
||||
id: claude
|
||||
# TODO: switch this out once they merge their v1
|
||||
uses: km-anthropic/claude-code-action@v1-dev
|
||||
with:
|
||||
timeout_minutes: "180"
|
||||
claude_args: |
|
||||
--dangerously-skip-permissions
|
||||
--system-prompt "You are working on the Bun codebase"
|
||||
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
||||
58
.github/workflows/codex-test-sync.yml
vendored
Normal file
58
.github/workflows/codex-test-sync.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
name: Codex Test Sync
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [labeled, opened]
|
||||
|
||||
env:
|
||||
BUN_VERSION: "1.2.15"
|
||||
|
||||
jobs:
|
||||
sync-node-tests:
|
||||
runs-on: ubuntu-latest
|
||||
if: |
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'codex') ||
|
||||
(github.event.action == 'opened' && contains(github.event.pull_request.labels.*.name, 'codex')) ||
|
||||
contains(github.head_ref, 'codex')
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@v44
|
||||
with:
|
||||
files: |
|
||||
test/js/node/test/parallel/**/*.{js,mjs,ts}
|
||||
test/js/node/test/sequential/**/*.{js,mjs,ts}
|
||||
|
||||
- name: Sync tests
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Changed test files:"
|
||||
echo "${{ steps.changed-files.outputs.all_changed_files }}"
|
||||
|
||||
# Process each changed test file
|
||||
for file in ${{ steps.changed-files.outputs.all_changed_files }}; do
|
||||
# Extract test name from file path
|
||||
test_name=$(basename "$file" | sed 's/\.[^.]*$//')
|
||||
echo "Syncing test: $test_name"
|
||||
bun node:test:cp "$test_name"
|
||||
done
|
||||
|
||||
- name: Commit changes
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: "Sync Node.js tests with upstream"
|
||||
24
.github/workflows/docs.yml
vendored
Normal file
24
.github/workflows/docs.yml
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
name: Docs
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- "docs/**"
|
||||
- "packages/bun-types/**.d.ts"
|
||||
- "CONTRIBUTING.md"
|
||||
- "src/cli/install.sh"
|
||||
- "src/cli/install.ps1"
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
name: Deploy
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
steps:
|
||||
# redeploy Vercel site when a file in `docs` changes
|
||||
# using VERCEL_DEPLOY_HOOK environment variable
|
||||
- name: Trigger Webhook
|
||||
run: |
|
||||
curl -v ${{ secrets.VERCEL_DEPLOY_HOOK }}
|
||||
2
.github/workflows/format.yml
vendored
2
.github/workflows/format.yml
vendored
@@ -9,7 +9,7 @@ on:
|
||||
pull_request:
|
||||
merge_group:
|
||||
env:
|
||||
BUN_VERSION: "1.3.2"
|
||||
BUN_VERSION: "1.2.20"
|
||||
LLVM_VERSION: "19.1.7"
|
||||
LLVM_VERSION_MAJOR: "19"
|
||||
|
||||
|
||||
@@ -105,16 +105,11 @@ jobs:
|
||||
env:
|
||||
GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
|
||||
GITHUB_ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
GITHUB_ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||
shell: bash
|
||||
run: |
|
||||
LABELS=$(bun scripts/read-issue.ts)
|
||||
bun scripts/is-outdated.ts
|
||||
|
||||
# Check for patterns that should close the issue
|
||||
CLOSE_ACTION=$(bun scripts/handle-crash-patterns.ts)
|
||||
echo "close-action=$CLOSE_ACTION" >> $GITHUB_OUTPUT
|
||||
|
||||
if [[ -f "is-outdated.txt" ]]; then
|
||||
echo "is-outdated=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
@@ -123,10 +118,6 @@ jobs:
|
||||
echo "outdated=$(cat outdated.txt)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ -f "is-standalone.txt" ]]; then
|
||||
echo "is-standalone=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ -f "is-very-outdated.txt" ]]; then
|
||||
echo "is-very-outdated=true" >> $GITHUB_OUTPUT
|
||||
LABELS="$LABELS,old-version"
|
||||
@@ -136,32 +127,9 @@ jobs:
|
||||
|
||||
echo "latest=$(cat LATEST)" >> $GITHUB_OUTPUT
|
||||
echo "labels=$LABELS" >> $GITHUB_OUTPUT
|
||||
rm -rf is-outdated.txt outdated.txt latest.txt is-very-outdated.txt is-standalone.txt
|
||||
- name: Close issue if pattern detected
|
||||
if: github.event.label.name == 'crash' && fromJson(steps.add-labels.outputs.close-action).close == true
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const closeAction = ${{ fromJson(steps.add-labels.outputs.close-action) }};
|
||||
|
||||
// Comment with the reason
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
body: closeAction.comment
|
||||
});
|
||||
|
||||
// Close the issue
|
||||
await github.rest.issues.update({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
state: 'closed',
|
||||
state_reason: closeAction.reason
|
||||
});
|
||||
rm -rf is-outdated.txt outdated.txt latest.txt is-very-outdated.txt
|
||||
- name: Generate comment text with Sentry Link
|
||||
if: github.event.label.name == 'crash' && fromJson(steps.add-labels.outputs.close-action).close != true
|
||||
if: github.event.label.name == 'crash'
|
||||
# ignore if fail
|
||||
continue-on-error: true
|
||||
id: generate-comment-text
|
||||
@@ -195,17 +163,8 @@ jobs:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
labels: ${{ steps.add-labels.outputs.labels }}
|
||||
- name: Comment outdated (standalone executable)
|
||||
if: steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone == 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
@${{ github.event.issue.user.login }}, the latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but the standalone executable is running Bun v${{ steps.add-labels.outputs.outdated }}. When the CLI using Bun's single-file executable next updates it might be fixed.
|
||||
- name: Comment outdated
|
||||
if: steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone != 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
|
||||
if: steps.add-labels.outputs.is-outdated == 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
@@ -219,22 +178,8 @@ jobs:
|
||||
```sh
|
||||
bun upgrade
|
||||
```
|
||||
- name: Comment with Sentry Link and outdated version (standalone executable)
|
||||
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone == 'true'
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
@${{ github.event.issue.user.login }}, thank you for reporting this crash. The latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but the standalone executable is running Bun v${{ steps.add-labels.outputs.outdated }}. When the CLI using Bun's single-file executable next updates it might be fixed.
|
||||
|
||||
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
|
||||
|
||||
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
|
||||
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
|
||||
- name: Comment with Sentry Link and outdated version
|
||||
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone != 'true'
|
||||
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true'
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
89
.github/workflows/on-submodule-update.yml
vendored
Normal file
89
.github/workflows/on-submodule-update.yml
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
name: Comment on updated submodule
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
paths:
|
||||
- "src/generated_versions_list.zig"
|
||||
- ".github/workflows/on-submodule-update.yml"
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
name: Comment
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
issues: write
|
||||
steps:
|
||||
- name: Checkout current
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
src
|
||||
- name: Hash generated versions list
|
||||
id: hash
|
||||
run: |
|
||||
echo "hash=$(sha256sum src/generated_versions_list.zig | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
|
||||
- name: Checkout base
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.base_ref }}
|
||||
sparse-checkout: |
|
||||
src
|
||||
- name: Hash base
|
||||
id: base
|
||||
run: |
|
||||
echo "base=$(sha256sum src/generated_versions_list.zig | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
|
||||
- name: Compare
|
||||
id: compare
|
||||
run: |
|
||||
if [ "${{ steps.hash.outputs.hash }}" != "${{ steps.base.outputs.base }}" ]; then
|
||||
echo "changed=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "changed=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Find Comment
|
||||
id: comment
|
||||
uses: peter-evans/find-comment@v3
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: github-actions[bot]
|
||||
body-includes: <!-- generated-comment submodule-updated -->
|
||||
- name: Write Warning Comment
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
if: steps.compare.outputs.changed == 'true'
|
||||
with:
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
edit-mode: replace
|
||||
body: |
|
||||
⚠️ **Warning:** @${{ github.actor }}, this PR has changes to submodule versions.
|
||||
|
||||
If this change was intentional, please ignore this message. If not, please undo changes to submodules and rebase your branch.
|
||||
|
||||
<!-- generated-comment submodule-updated -->
|
||||
- name: Add labels
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: steps.compare.outputs.changed == 'true'
|
||||
with:
|
||||
actions: "add-labels"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
labels: "changed-submodules"
|
||||
- name: Remove labels
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: steps.compare.outputs.changed == 'false'
|
||||
with:
|
||||
actions: "remove-labels"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
labels: "changed-submodules"
|
||||
- name: Delete outdated comment
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: steps.compare.outputs.changed == 'false' && steps.comment.outputs.comment-id != ''
|
||||
with:
|
||||
actions: "delete-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
19
.github/workflows/typos.yml
vendored
Normal file
19
.github/workflows/typos.yml
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
name: Typos
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
docs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Spellcheck
|
||||
uses: crate-ci/typos@v1.29.4
|
||||
with:
|
||||
files: docs/**/*
|
||||
2
.github/workflows/update-cares.yml
vendored
2
.github/workflows/update-cares.yml
vendored
@@ -80,7 +80,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
4
.github/workflows/update-hdrhistogram.yml
vendored
4
.github/workflows/update-hdrhistogram.yml
vendored
@@ -55,7 +55,7 @@ jobs:
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
# Try to get commit SHA from tag object (for annotated tags)
|
||||
# If it fails, assume it's a lightweight tag pointing directly to commit
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/HdrHistogram/HdrHistogram_c/git/tags/$LATEST_TAG_SHA" 2>/dev/null | jq -r '.object.sha // empty')
|
||||
@@ -83,7 +83,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
4
.github/workflows/update-highway.yml
vendored
4
.github/workflows/update-highway.yml
vendored
@@ -58,7 +58,7 @@ jobs:
|
||||
|
||||
TAG_OBJECT_SHA=$(echo "$TAG_REF" | jq -r '.object.sha')
|
||||
TAG_OBJECT_TYPE=$(echo "$TAG_REF" | jq -r '.object.type')
|
||||
|
||||
|
||||
if [ -z "$TAG_OBJECT_SHA" ] || [ "$TAG_OBJECT_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
2
.github/workflows/update-libarchive.yml
vendored
2
.github/workflows/update-libarchive.yml
vendored
@@ -80,7 +80,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
2
.github/workflows/update-libdeflate.yml
vendored
2
.github/workflows/update-libdeflate.yml
vendored
@@ -80,7 +80,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
6
.github/workflows/update-lolhtml.yml
vendored
6
.github/workflows/update-lolhtml.yml
vendored
@@ -55,12 +55,12 @@ jobs:
|
||||
TAG_REF_RESPONSE=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/refs/tags/$LATEST_TAG")
|
||||
LATEST_TAG_SHA=$(echo "$TAG_REF_RESPONSE" | jq -r '.object.sha')
|
||||
TAG_OBJECT_TYPE=$(echo "$TAG_REF_RESPONSE" | jq -r '.object.type')
|
||||
|
||||
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
if [ "$TAG_OBJECT_TYPE" = "tag" ]; then
|
||||
# This is an annotated tag, we need to get the commit it points to
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
@@ -92,7 +92,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
4
.github/workflows/update-lshpack.yml
vendored
4
.github/workflows/update-lshpack.yml
vendored
@@ -59,7 +59,7 @@ jobs:
|
||||
|
||||
LATEST_TAG_SHA=$(echo "$TAG_REF" | jq -r '.object.sha')
|
||||
TAG_TYPE=$(echo "$TAG_REF" | jq -r '.object.type')
|
||||
|
||||
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
@@ -97,7 +97,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
21
.github/workflows/update-sqlite3.yml
vendored
21
.github/workflows/update-sqlite3.yml
vendored
@@ -70,11 +70,28 @@ jobs:
|
||||
- name: Update SQLite if needed
|
||||
if: success() && steps.check-version.outputs.current_num < steps.check-version.outputs.latest_num
|
||||
run: |
|
||||
./scripts/update-sqlite-amalgamation.sh ${{ steps.check-version.outputs.latest_num }} ${{ steps.check-version.outputs.latest_year }}
|
||||
set -euo pipefail
|
||||
|
||||
TEMP_DIR=$(mktemp -d)
|
||||
cd $TEMP_DIR
|
||||
|
||||
echo "Downloading from: https://sqlite.org/${{ steps.check-version.outputs.latest_year }}/sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
|
||||
|
||||
# Download and extract latest version
|
||||
wget "https://sqlite.org/${{ steps.check-version.outputs.latest_year }}/sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
|
||||
unzip "sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
|
||||
cd "sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}"
|
||||
|
||||
# Add header comment and copy files
|
||||
echo "// clang-format off" > $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3.c
|
||||
cat sqlite3.c >> $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3.c
|
||||
|
||||
echo "// clang-format off" > $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3_local.h
|
||||
cat sqlite3.h >> $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3_local.h
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current_num < steps.check-version.outputs.latest_num
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
79
.github/workflows/update-vendor.yml
vendored
79
.github/workflows/update-vendor.yml
vendored
@@ -1,79 +0,0 @@
|
||||
name: Update vendor
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 4 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
package:
|
||||
- elysia
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: oven-sh/setup-bun@v2
|
||||
|
||||
- name: Check version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
current=$(bun -p '(await Bun.file("test/vendor.json").json()).filter(v=>v.package===process.argv[1])[0].tag' ${{ matrix.package }})
|
||||
repository=$(bun -p '(await Bun.file("test/vendor.json").json()).filter(v=>v.package===process.argv[1])[0].repository' ${{ matrix.package }} | cut -d'/' -f4,5)
|
||||
|
||||
if [ -z "$current" ]; then
|
||||
echo "Error: Could not find COMMIT line in test/vendor.json"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$current" >> $GITHUB_OUTPUT
|
||||
echo "repository=$repository" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/${repository}/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
bun -e 'await Bun.write("test/vendor.json", JSON.stringify((await Bun.file("test/vendor.json").json()).map(v=>{if(v.package===process.argv[1])v.tag=process.argv[2];return v;}), null, 2) + "\n")' ${{ matrix.package }} ${{ steps.check-version.outputs.latest }}
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
test/vendor.json
|
||||
commit-message: "deps: update ${{ matrix.package }} to ${{ steps.check-version.outputs.latest }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update ${{ matrix.package }} to ${{ steps.check-version.outputs.latest }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-${{ matrix.package }}-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates ${{ matrix.package }} to version ${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Compare: https://github.com/${{ steps.check-version.outputs.repository }}/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-vendor.yml)
|
||||
2
.github/workflows/update-zstd.yml
vendored
2
.github/workflows/update-zstd.yml
vendored
@@ -80,7 +80,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
5
.github/workflows/vscode-release.yml
vendored
5
.github/workflows/vscode-release.yml
vendored
@@ -45,8 +45,3 @@ jobs:
|
||||
env:
|
||||
VSCE_PAT: ${{ secrets.VSCODE_EXTENSION }}
|
||||
working-directory: packages/bun-vscode/extension
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-vscode-${{ github.event.inputs.version }}.vsix
|
||||
path: packages/bun-vscode/extension/bun-vscode-${{ github.event.inputs.version }}.vsix
|
||||
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -1,15 +1,12 @@
|
||||
.claude/settings.local.json
|
||||
.DS_Store
|
||||
.env
|
||||
.envrc
|
||||
.eslintcache
|
||||
.gdb_history
|
||||
.idea
|
||||
.next
|
||||
.ninja_deps
|
||||
.ninja_log
|
||||
.npm
|
||||
.npmrc
|
||||
.npm.gz
|
||||
.parcel-cache
|
||||
.swcrc
|
||||
@@ -192,4 +189,4 @@ scratch*.{js,ts,tsx,cjs,mjs}
|
||||
scripts/lldb-inline
|
||||
|
||||
# We regenerate these in all the build scripts
|
||||
cmake/sources/*.txt
|
||||
cmake/sources/*.txt
|
||||
@@ -7,8 +7,4 @@ src/react-refresh.js
|
||||
*.min.js
|
||||
test/snippets
|
||||
test/js/node/test
|
||||
test/napi/node-napi-tests
|
||||
bun.lock
|
||||
|
||||
# the output codeblocks need to stay minified
|
||||
docs/bundler/minifier.mdx
|
||||
|
||||
@@ -19,12 +19,6 @@
|
||||
"options": {
|
||||
"printWidth": 80
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["src/codegen/bindgenv2/**/*.ts", "*.bindv2.ts"],
|
||||
"options": {
|
||||
"printWidth": 100
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
11
.vscode/launch.json
generated
vendored
11
.vscode/launch.json
generated
vendored
@@ -25,9 +25,6 @@
|
||||
// "BUN_JSC_validateExceptionChecks": "1",
|
||||
// "BUN_JSC_dumpSimulatedThrows": "1",
|
||||
// "BUN_JSC_unexpectedExceptionStackTraceLimit": "20",
|
||||
// "BUN_DESTRUCT_VM_ON_EXIT": "1",
|
||||
// "ASAN_OPTIONS": "allow_user_segv_handler=1:disable_coredump=0:detect_leaks=1:abort_on_error=1",
|
||||
// "LSAN_OPTIONS": "malloc_context_size=100:print_suppressions=1:suppressions=${workspaceFolder}/test/leaksan.supp",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
"sourceMap": {
|
||||
@@ -60,17 +57,11 @@
|
||||
"name": "bun run [file]",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${fileDirname}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "0",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
// "BUN_JSC_validateExceptionChecks": "1",
|
||||
// "BUN_JSC_dumpSimulatedThrows": "1",
|
||||
// "BUN_JSC_unexpectedExceptionStackTraceLimit": "20",
|
||||
// "BUN_DESTRUCT_VM_ON_EXIT": "1",
|
||||
// "ASAN_OPTIONS": "allow_user_segv_handler=1:disable_coredump=0:detect_leaks=1:abort_on_error=1",
|
||||
// "LSAN_OPTIONS": "malloc_context_size=100:print_suppressions=1:suppressions=${workspaceFolder}/test/leaksan.supp",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
"sourceMap": {
|
||||
|
||||
16
.vscode/settings.json
vendored
16
.vscode/settings.json
vendored
@@ -27,22 +27,18 @@
|
||||
"git.ignoreLimitWarning": true,
|
||||
|
||||
// Zig
|
||||
// "zig.initialSetupDone": true,
|
||||
// "zig.buildOption": "build",
|
||||
"zig.initialSetupDone": true,
|
||||
"zig.buildOption": "build",
|
||||
"zig.zls.zigLibPath": "${workspaceFolder}/vendor/zig/lib",
|
||||
"zig.buildOnSaveArgs": [
|
||||
"-Dgenerated-code=./build/debug/codegen",
|
||||
"--watch",
|
||||
"-fincremental"
|
||||
],
|
||||
// "zig.zls.buildOnSaveStep": "check",
|
||||
"zig.buildArgs": ["-Dgenerated-code=./build/debug/codegen", "--watch", "-fincremental"],
|
||||
"zig.zls.buildOnSaveStep": "check",
|
||||
// "zig.zls.enableBuildOnSave": true,
|
||||
// "zig.buildOnSave": true,
|
||||
// "zig.buildFilePath": "${workspaceFolder}/build.zig",
|
||||
"zig.buildFilePath": "${workspaceFolder}/build.zig",
|
||||
"zig.path": "${workspaceFolder}/vendor/zig/zig.exe",
|
||||
"zig.zls.path": "${workspaceFolder}/vendor/zig/zls.exe",
|
||||
"zig.formattingProvider": "zls",
|
||||
// "zig.zls.enableInlayHints": false,
|
||||
"zig.zls.enableInlayHints": false,
|
||||
"[zig]": {
|
||||
"editor.tabSize": 4,
|
||||
"editor.useTabStops": false,
|
||||
|
||||
124
CLAUDE.md
124
CLAUDE.md
@@ -4,14 +4,18 @@ This is the Bun repository - an all-in-one JavaScript runtime & toolkit designed
|
||||
|
||||
### Build Commands
|
||||
|
||||
- **Build Bun**: `bun bd`
|
||||
- **Build debug version**: `bun bd`
|
||||
- Creates a debug build at `./build/debug/bun-debug`
|
||||
- **CRITICAL**: do not set a timeout when running `bun bd`
|
||||
- **CRITICAL**: DO NOT set a build timeout. Compilation takes ~5 minutes. Be patient.
|
||||
- **Run tests with your debug build**: `bun bd test <test-file>`
|
||||
- **CRITICAL**: Never use `bun test` directly - it won't include your changes
|
||||
- **Run any command with debug build**: `bun bd <command>`
|
||||
|
||||
Tip: Bun is already installed and in $PATH. The `bd` subcommand is a package.json script.
|
||||
### Other Build Variants
|
||||
|
||||
- `bun run build:release` - Release build
|
||||
|
||||
Address sanitizer is enabled by default in debug builds of Bun.
|
||||
|
||||
## Testing
|
||||
|
||||
@@ -23,51 +27,28 @@ Tip: Bun is already installed and in $PATH. The `bd` subcommand is a package.jso
|
||||
|
||||
### Test Organization
|
||||
|
||||
If a test is for a specific numbered GitHub Issue, it should be placed in `test/regression/issue/${issueNumber}.test.ts`. Ensure the issue number is **REAL** and not a placeholder!
|
||||
|
||||
If no valid issue number is provided, find the best existing file to modify instead, such as;
|
||||
|
||||
- `test/js/bun/` - Bun-specific API tests (http, crypto, ffi, shell, etc.)
|
||||
- `test/js/node/` - Node.js compatibility tests
|
||||
- `test/js/web/` - Web API tests (fetch, WebSocket, streams, etc.)
|
||||
- `test/cli/` - CLI command tests (install, run, test, etc.)
|
||||
- `test/bundler/` - Bundler and transpiler tests. Use `itBundled` helper.
|
||||
- `test/regression/issue/` - Regression tests (create one per bug fix)
|
||||
- `test/bundler/` - Bundler and transpiler tests
|
||||
- `test/integration/` - End-to-end integration tests
|
||||
- `test/napi/` - N-API compatibility tests
|
||||
- `test/v8/` - V8 C++ API compatibility tests
|
||||
|
||||
### Writing Tests
|
||||
|
||||
Tests use Bun's Jest-compatible test runner with proper test fixtures.
|
||||
|
||||
- For **single-file tests**, prefer `-e` over `tempDir`.
|
||||
- For **multi-file tests**, prefer `tempDir` and `Bun.spawn`.
|
||||
Tests use Bun's Jest-compatible test runner with proper test fixtures:
|
||||
|
||||
```typescript
|
||||
import { test, expect } from "bun:test";
|
||||
import { bunEnv, bunExe, normalizeBunSnapshot, tempDir } from "harness";
|
||||
|
||||
test("(single-file test) my feature", async () => {
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "-e", "console.log('Hello, world!')"],
|
||||
env: bunEnv,
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([
|
||||
proc.stdout.text(),
|
||||
proc.stderr.text(),
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
expect(normalizeBunSnapshot(stdout)).toMatchInlineSnapshot(`"Hello, world!"`);
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
|
||||
test("(multi-file test) my feature", async () => {
|
||||
test("my feature", async () => {
|
||||
// Create temp directory with test files
|
||||
using dir = tempDir("test-prefix", {
|
||||
"index.js": `import { foo } from "./foo.ts"; foo();`,
|
||||
"foo.ts": `export function foo() { console.log("foo"); }`,
|
||||
"index.js": `console.log("hello");`,
|
||||
});
|
||||
|
||||
// Spawn Bun process
|
||||
@@ -84,21 +65,15 @@ test("(multi-file test) my feature", async () => {
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
expect(exitCode).toBe(0);
|
||||
// Prefer snapshot tests over expect(stdout).toBe("hello\n");
|
||||
expect(normalizeBunSnapshot(stdout, dir)).toMatchInlineSnapshot(`"hello"`);
|
||||
|
||||
// Assert the exit code last. This gives you a more useful error message on test failure.
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
```
|
||||
|
||||
- Always use `port: 0`. Do not hardcode ports. Do not use your own random port number function.
|
||||
- Use `normalizeBunSnapshot` to normalize snapshot output of the test.
|
||||
- NEVER write tests that check for no "panic" or "uncaught exception" or similar in the test output. These tests will never fail in CI.
|
||||
- Use `tempDir` from `"harness"` to create a temporary directory. **Do not** use `tmpdirSync` or `fs.mkdtempSync` to create temporary directories.
|
||||
- When spawning processes, tests should expect(stdout).toBe(...) BEFORE expect(exitCode).toBe(0). This gives you a more useful error message on test failure.
|
||||
- **CRITICAL**: Do not write flaky tests. Do not use `setTimeout` in tests. Instead, `await` the condition to be met. You are not testing the TIME PASSING, you are testing the CONDITION.
|
||||
- **CRITICAL**: Verify your test fails with `USE_SYSTEM_BUN=1 bun test <file>` and passes with `bun bd test <file>`. Your test is NOT VALID if it passes with `USE_SYSTEM_BUN=1`.
|
||||
- NEVER write tests that check for no "panic" or "uncaught exception" or similar in the test output. That is NOT a valid test.
|
||||
|
||||
## Code Architecture
|
||||
|
||||
@@ -107,7 +82,7 @@ test("(multi-file test) my feature", async () => {
|
||||
- **Zig code** (`src/*.zig`): Core runtime, JavaScript bindings, package manager
|
||||
- **C++ code** (`src/bun.js/bindings/*.cpp`): JavaScriptCore bindings, Web APIs
|
||||
- **TypeScript** (`src/js/`): Built-in JavaScript modules with special syntax (see JavaScript Modules section)
|
||||
- **Generated code**: Many files are auto-generated from `.classes.ts` and other sources. Bun will automatically rebuild these files when you make changes to them.
|
||||
- **Generated code**: Many files are auto-generated from `.classes.ts` and other sources
|
||||
|
||||
### Core Source Organization
|
||||
|
||||
@@ -172,6 +147,19 @@ When implementing JavaScript classes in C++:
|
||||
3. Add iso subspaces for classes with C++ fields
|
||||
4. Cache structures in ZigGlobalObject
|
||||
|
||||
## Development Workflow
|
||||
|
||||
### Code Formatting
|
||||
|
||||
- `bun run prettier` - Format JS/TS files
|
||||
- `bun run zig-format` - Format Zig files
|
||||
- `bun run clang-format` - Format C++ files
|
||||
|
||||
### Watching for Changes
|
||||
|
||||
- `bun run watch` - Incremental Zig compilation with error checking
|
||||
- `bun run watch-windows` - Windows-specific watch mode
|
||||
|
||||
### Code Generation
|
||||
|
||||
Code generation happens automatically as part of the build process. The main scripts are:
|
||||
@@ -193,6 +181,47 @@ Built-in JavaScript modules use special syntax and are organized as:
|
||||
- `internal/` - Internal modules not exposed to users
|
||||
- `builtins/` - Core JavaScript builtins (streams, console, etc.)
|
||||
|
||||
### Special Syntax in Built-in Modules
|
||||
|
||||
1. **`$` prefix** - Access to private properties and JSC intrinsics:
|
||||
|
||||
```js
|
||||
const arr = $Array.from(...); // Private global
|
||||
map.$set(...); // Private method
|
||||
const arr2 = $newArrayWithSize(5); // JSC intrinsic
|
||||
```
|
||||
|
||||
2. **`require()`** - Must use string literals, resolved at compile time:
|
||||
|
||||
```js
|
||||
const fs = require("fs"); // Directly loads by numeric ID
|
||||
```
|
||||
|
||||
3. **Debug helpers**:
|
||||
- `$debug()` - Like console.log but stripped in release builds
|
||||
- `$assert()` - Assertions stripped in release builds
|
||||
- `if($debug) {}` - Check if debug env var is set
|
||||
|
||||
4. **Platform detection**: `process.platform` and `process.arch` are inlined and dead-code eliminated
|
||||
|
||||
5. **Export syntax**: Use `export default` which gets converted to a return statement:
|
||||
```js
|
||||
export default {
|
||||
readFile,
|
||||
writeFile,
|
||||
};
|
||||
```
|
||||
|
||||
Note: These are NOT ES modules. The preprocessor converts `$` to `@` (JSC's actual syntax) and handles the special functions.
|
||||
|
||||
## CI
|
||||
|
||||
Bun uses BuildKite for CI. To get the status of a PR, you can use the following command:
|
||||
|
||||
```bash
|
||||
bun ci
|
||||
```
|
||||
|
||||
## Important Development Notes
|
||||
|
||||
1. **Never use `bun test` or `bun <file>` directly** - always use `bun bd test` or `bun bd <command>`. `bun bd` compiles & runs the debug build.
|
||||
@@ -204,8 +233,19 @@ Built-in JavaScript modules use special syntax and are organized as:
|
||||
7. **Avoid shell commands** - Don't use `find` or `grep` in tests; use Bun's Glob and built-in tools
|
||||
8. **Memory management** - In Zig code, be careful with allocators and use defer for cleanup
|
||||
9. **Cross-platform** - Run `bun run zig:check-all` to compile the Zig code on all platforms when making platform-specific changes
|
||||
10. **Debug builds** - Use `BUN_DEBUG_QUIET_LOGS=1` to disable debug logging, or `BUN_DEBUG_<scopeName>=1` to enable specific `Output.scoped(.${scopeName}, .visible)`s
|
||||
10. **Debug builds** - Use `BUN_DEBUG_QUIET_LOGS=1` to disable debug logging, or `BUN_DEBUG_<scope>=1` to enable specific scopes
|
||||
11. **Be humble & honest** - NEVER overstate what you got done or what actually works in commits, PRs or in messages to the user.
|
||||
12. **Branch names must start with `claude/`** - This is a requirement for the CI to work.
|
||||
|
||||
**ONLY** push up changes after running `bun bd test <file>` and ensuring your tests pass.
|
||||
## Key APIs and Features
|
||||
|
||||
### Bun-Specific APIs
|
||||
|
||||
- **Bun.serve()** - High-performance HTTP server
|
||||
- **Bun.spawn()** - Process spawning with better performance than Node.js
|
||||
- **Bun.file()** - Fast file I/O operations
|
||||
- **Bun.write()** - Unified API for writing to files, stdout, etc.
|
||||
- **Bun.$ (Shell)** - Cross-platform shell scripting
|
||||
- **Bun.SQLite** - Native SQLite integration
|
||||
- **Bun.FFI** - Call native libraries from JavaScript
|
||||
- **Bun.Glob** - Fast file pattern matching
|
||||
|
||||
@@ -24,17 +24,13 @@ if(CMAKE_HOST_APPLE)
|
||||
include(SetupMacSDK)
|
||||
endif()
|
||||
include(SetupLLVM)
|
||||
include(SetupCcache)
|
||||
|
||||
# --- Project ---
|
||||
|
||||
parse_package_json(VERSION_VARIABLE DEFAULT_VERSION)
|
||||
optionx(VERSION STRING "The version of Bun" DEFAULT ${DEFAULT_VERSION})
|
||||
project(Bun VERSION ${VERSION})
|
||||
|
||||
# Bun uses C++23, which is compatible with BoringSSL's C++17 requirement
|
||||
set(CMAKE_CXX_STANDARD 23)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||
|
||||
include(Options)
|
||||
include(CompilerFlags)
|
||||
|
||||
@@ -47,11 +43,6 @@ include(SetupEsbuild)
|
||||
include(SetupZig)
|
||||
include(SetupRust)
|
||||
|
||||
include(SetupCcache)
|
||||
|
||||
# Generate dependency versions header
|
||||
include(GenerateDependencyVersions)
|
||||
|
||||
# --- Targets ---
|
||||
|
||||
include(BuildBun)
|
||||
|
||||
@@ -2,21 +2,7 @@ Configuring a development environment for Bun can take 10-30 minutes depending o
|
||||
|
||||
If you are using Windows, please refer to [this guide](https://bun.com/docs/project/building-windows)
|
||||
|
||||
## Using Nix (Alternative)
|
||||
|
||||
A Nix flake is provided as an alternative to manual dependency installation:
|
||||
|
||||
```bash
|
||||
nix develop
|
||||
# or explicitly use the pure shell
|
||||
# nix develop .#pure
|
||||
export CMAKE_SYSTEM_PROCESSOR=$(uname -m)
|
||||
bun bd
|
||||
```
|
||||
|
||||
This provides all dependencies in an isolated, reproducible environment without requiring sudo.
|
||||
|
||||
## Install Dependencies (Manual)
|
||||
## Install Dependencies
|
||||
|
||||
Using your system's package manager, install Bun's dependencies:
|
||||
|
||||
@@ -27,15 +13,15 @@ $ brew install automake ccache cmake coreutils gnu-sed go icu4c libiconv libtool
|
||||
```
|
||||
|
||||
```bash#Ubuntu/Debian
|
||||
$ sudo apt install curl wget lsb-release software-properties-common cargo cmake git golang libtool ninja-build pkg-config rustc ruby-full xz-utils
|
||||
$ sudo apt install curl wget lsb-release software-properties-common cargo ccache cmake git golang libtool ninja-build pkg-config rustc ruby-full xz-utils
|
||||
```
|
||||
|
||||
```bash#Arch
|
||||
$ sudo pacman -S base-devel cmake git go libiconv libtool make ninja pkg-config python rust sed unzip ruby
|
||||
$ sudo pacman -S base-devel ccache cmake git go libiconv libtool make ninja pkg-config python rust sed unzip ruby
|
||||
```
|
||||
|
||||
```bash#Fedora
|
||||
$ sudo dnf install cargo clang19 llvm19 lld19 cmake git golang libtool ninja-build pkg-config rustc ruby libatomic-static libstdc++-static sed unzip which libicu-devel 'perl(Math::BigInt)'
|
||||
$ sudo dnf install cargo ccache cmake git golang libtool ninja-build pkg-config rustc ruby libatomic-static libstdc++-static sed unzip which libicu-devel 'perl(Math::BigInt)'
|
||||
```
|
||||
|
||||
```bash#openSUSE Tumbleweed
|
||||
@@ -65,29 +51,6 @@ $ brew install bun
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
### Optional: Install `ccache`
|
||||
|
||||
ccache is used to cache compilation artifacts, significantly speeding up builds:
|
||||
|
||||
```bash
|
||||
# For macOS
|
||||
$ brew install ccache
|
||||
|
||||
# For Ubuntu/Debian
|
||||
$ sudo apt install ccache
|
||||
|
||||
# For Arch
|
||||
$ sudo pacman -S ccache
|
||||
|
||||
# For Fedora
|
||||
$ sudo dnf install ccache
|
||||
|
||||
# For openSUSE
|
||||
$ sudo zypper install ccache
|
||||
```
|
||||
|
||||
Our build scripts will automatically detect and use `ccache` if available. You can check cache statistics with `ccache --show-stats`.
|
||||
|
||||
## Install LLVM
|
||||
|
||||
Bun requires LLVM 19 (`clang` is part of LLVM). This version requirement is to match WebKit (precompiled), as mismatching versions will cause memory allocation failures at runtime. In most cases, you can install LLVM through your system package manager:
|
||||
@@ -186,7 +149,7 @@ Bun generally takes about 2.5 minutes to compile a debug build when there are Zi
|
||||
- Batch up your changes
|
||||
- Ensure zls is running with incremental watching for LSP errors (if you use VSCode and install Zig and run `bun run build` once to download Zig, this should just work)
|
||||
- Prefer using the debugger ("CodeLLDB" in VSCode) to step through the code.
|
||||
- Use debug logs. `BUN_DEBUG_<scope>=1` will enable debug logging for the corresponding `Output.scoped(.<scope>, .hidden)` logs. You can also set `BUN_DEBUG_QUIET_LOGS=1` to disable all debug logging that isn't explicitly enabled. To dump debug logs into a file, `BUN_DEBUG=<path-to-file>.log`. Debug logs are aggressively removed in release builds.
|
||||
- Use debug logs. `BUN_DEBUG_<scope>=1` will enable debug logging for the corresponding `Output.scoped(.<scope>, false)` logs. You can also set `BUN_DEBUG_QUIET_LOGS=1` to disable all debug logging that isn't explicitly enabled. To dump debug lgos into a file, `BUN_DEBUG=<path-to-file>.log`. Debug logs are aggressively removed in release builds.
|
||||
- src/js/\*\*.ts changes are pretty much instant to rebuild. C++ changes are a bit slower, but still much faster than the Zig code (Zig is one compilation unit, C++ is many).
|
||||
|
||||
## Code generation scripts
|
||||
@@ -354,6 +317,15 @@ $ bun run build -DUSE_STATIC_LIBATOMIC=OFF
|
||||
|
||||
The built version of Bun may not work on other systems if compiled this way.
|
||||
|
||||
### ccache conflicts with building TinyCC on macOS
|
||||
|
||||
If you run into issues with `ccache` when building TinyCC, try reinstalling ccache
|
||||
|
||||
```bash
|
||||
brew uninstall ccache
|
||||
brew install ccache
|
||||
```
|
||||
|
||||
## Using bun-debug
|
||||
|
||||
- Disable logging: `BUN_DEBUG_QUIET_LOGS=1 bun-debug ...` (to disable all debug logging)
|
||||
|
||||
10
README.md
10
README.md
@@ -54,7 +54,7 @@ Bun supports Linux (x64 & arm64), macOS (x64 & Apple Silicon) and Windows (x64).
|
||||
curl -fsSL https://bun.com/install | bash
|
||||
|
||||
# on windows
|
||||
powershell -c "irm bun.sh/install.ps1 | iex"
|
||||
powershell -c "irm bun.com/install.ps1 | iex"
|
||||
|
||||
# with npm
|
||||
npm install -g bun
|
||||
@@ -104,13 +104,13 @@ bun upgrade --canary
|
||||
- [File types (Loaders)](https://bun.com/docs/runtime/loaders)
|
||||
- [TypeScript](https://bun.com/docs/runtime/typescript)
|
||||
- [JSX](https://bun.com/docs/runtime/jsx)
|
||||
- [Environment variables](https://bun.com/docs/runtime/environment-variables)
|
||||
- [Environment variables](https://bun.com/docs/runtime/env)
|
||||
- [Bun APIs](https://bun.com/docs/runtime/bun-apis)
|
||||
- [Web APIs](https://bun.com/docs/runtime/web-apis)
|
||||
- [Node.js compatibility](https://bun.com/docs/runtime/nodejs-compat)
|
||||
- [Node.js compatibility](https://bun.com/docs/runtime/nodejs-apis)
|
||||
- [Single-file executable](https://bun.com/docs/bundler/executables)
|
||||
- [Plugins](https://bun.com/docs/runtime/plugins)
|
||||
- [Watch mode / Hot Reloading](https://bun.com/docs/runtime/watch-mode)
|
||||
- [Watch mode / Hot Reloading](https://bun.com/docs/runtime/hot)
|
||||
- [Module resolution](https://bun.com/docs/runtime/modules)
|
||||
- [Auto-install](https://bun.com/docs/runtime/autoimport)
|
||||
- [bunfig.toml](https://bun.com/docs/runtime/bunfig)
|
||||
@@ -230,7 +230,7 @@ bun upgrade --canary
|
||||
|
||||
- Ecosystem
|
||||
- [Use React and JSX](https://bun.com/guides/ecosystem/react)
|
||||
- [Use Gel with Bun](https://bun.com/guides/ecosystem/gel)
|
||||
- [Use EdgeDB with Bun](https://bun.com/guides/ecosystem/edgedb)
|
||||
- [Use Prisma with Bun](https://bun.com/guides/ecosystem/prisma)
|
||||
- [Add Sentry to a Bun app](https://bun.com/guides/ecosystem/sentry)
|
||||
- [Create a Discord bot](https://bun.com/guides/ecosystem/discordjs)
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"configVersion": 0,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "bench",
|
||||
@@ -23,7 +22,6 @@
|
||||
"react-dom": "^18.3.1",
|
||||
"string-width": "7.1.0",
|
||||
"strip-ansi": "^7.1.0",
|
||||
"tar": "^7.4.3",
|
||||
"tinycolor2": "^1.6.0",
|
||||
"zx": "^7.2.3",
|
||||
},
|
||||
@@ -109,8 +107,6 @@
|
||||
|
||||
"@fastify/proxy-addr": ["@fastify/proxy-addr@5.0.0", "", { "dependencies": { "@fastify/forwarded": "^3.0.0", "ipaddr.js": "^2.1.0" } }, "sha512-37qVVA1qZ5sgH7KpHkkC4z9SK6StIsIcOmpjvMPXNb3vx2GQxhZocogVYbr2PbbeLCQxYIPDok307xEvRZOzGA=="],
|
||||
|
||||
"@isaacs/fs-minipass": ["@isaacs/fs-minipass@4.0.1", "", { "dependencies": { "minipass": "^7.0.4" } }, "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w=="],
|
||||
|
||||
"@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.1.1", "", { "dependencies": { "@jridgewell/set-array": "^1.0.0", "@jridgewell/sourcemap-codec": "^1.4.10" } }, "sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w=="],
|
||||
|
||||
"@jridgewell/resolve-uri": ["@jridgewell/resolve-uri@3.1.0", "", {}, "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w=="],
|
||||
@@ -185,8 +181,6 @@
|
||||
|
||||
"chalk": ["chalk@5.3.0", "", {}, "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w=="],
|
||||
|
||||
"chownr": ["chownr@3.0.0", "", {}, "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g=="],
|
||||
|
||||
"color": ["color@4.2.3", "", { "dependencies": { "color-convert": "^2.0.1", "color-string": "^1.9.0" } }, "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A=="],
|
||||
|
||||
"color-convert": ["color-convert@2.0.1", "", { "dependencies": { "color-name": "~1.1.4" } }, "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ=="],
|
||||
@@ -367,10 +361,6 @@
|
||||
|
||||
"minimist": ["minimist@1.2.8", "", {}, "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA=="],
|
||||
|
||||
"minipass": ["minipass@7.1.2", "", {}, "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw=="],
|
||||
|
||||
"minizlib": ["minizlib@3.1.0", "", { "dependencies": { "minipass": "^7.1.2" } }, "sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw=="],
|
||||
|
||||
"mitata": ["mitata@1.0.25", "", {}, "sha512-0v5qZtVW5vwj9FDvYfraR31BMDcRLkhSFWPTLaxx/Z3/EvScfVtAAWtMI2ArIbBcwh7P86dXh0lQWKiXQPlwYA=="],
|
||||
|
||||
"ms": ["ms@2.1.2", "", {}, "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="],
|
||||
@@ -467,8 +457,6 @@
|
||||
|
||||
"supports-color": ["supports-color@5.5.0", "", { "dependencies": { "has-flag": "^3.0.0" } }, "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow=="],
|
||||
|
||||
"tar": ["tar@7.5.2", "", { "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", "minizlib": "^3.1.0", "yallist": "^5.0.0" } }, "sha512-7NyxrTE4Anh8km8iEy7o0QYPs+0JKBTj5ZaqHg6B39erLg0qYXN3BijtShwbsNSvQ+LN75+KV+C4QR/f6Gwnpg=="],
|
||||
|
||||
"thread-stream": ["thread-stream@3.1.0", "", { "dependencies": { "real-require": "^0.2.0" } }, "sha512-OqyPZ9u96VohAyMfJykzmivOrY2wfMSf3C5TtFJVgN+Hm6aj+voFhlK+kZEIv2FBh1X6Xp3DlnCOfEQ3B2J86A=="],
|
||||
|
||||
"through": ["through@2.3.8", "", {}, "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg=="],
|
||||
@@ -493,7 +481,7 @@
|
||||
|
||||
"which": ["which@3.0.1", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "bin/which.js" } }, "sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg=="],
|
||||
|
||||
"yallist": ["yallist@5.0.0", "", {}, "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw=="],
|
||||
"yallist": ["yallist@3.1.1", "", {}, "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g=="],
|
||||
|
||||
"yaml": ["yaml@2.3.4", "", {}, "sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA=="],
|
||||
|
||||
@@ -513,8 +501,6 @@
|
||||
|
||||
"light-my-request/process-warning": ["process-warning@4.0.1", "", {}, "sha512-3c2LzQ3rY9d0hc1emcsHhfT9Jwz0cChib/QN89oME2R451w5fy3f0afAhERFZAwrbDU43wk12d0ORBpDVME50Q=="],
|
||||
|
||||
"lru-cache/yallist": ["yallist@3.1.1", "", {}, "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g=="],
|
||||
|
||||
"npm-run-path/path-key": ["path-key@4.0.0", "", {}, "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ=="],
|
||||
|
||||
"ansi-styles/color-convert/color-name": ["color-name@1.1.3", "", {}, "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw=="],
|
||||
|
||||
@@ -1,29 +1,40 @@
|
||||
# Create T3 App
|
||||
# `install` benchmark
|
||||
|
||||
This is a [T3 Stack](https://create.t3.gg/) project bootstrapped with `create-t3-app`.
|
||||
Requires [`hyperfine`](https://github.com/sharkdp/hyperfine). The goal of this benchmark is to compare installation performance of Bun with other package managers _when caches are hot_.
|
||||
|
||||
## What's next? How do I make an app with this?
|
||||
### With lockfile, online mode
|
||||
|
||||
We try to keep this project as simple as possible, so you can start with just the scaffolding we set up for you, and add additional things later when they become necessary.
|
||||
To run the benchmark with the standard "install" command for each package manager:
|
||||
|
||||
If you are not familiar with the different technologies used in this project, please refer to the respective docs. If you still are in the wind, please join our [Discord](https://t3.gg/discord) and ask for help.
|
||||
```sh
|
||||
$ hyperfine --prepare 'rm -rf node_modules' --warmup 1 --runs 3 'bun install' 'pnpm install' 'yarn' 'npm install'
|
||||
```
|
||||
|
||||
- [Next.js](https://nextjs.org)
|
||||
- [NextAuth.js](https://next-auth.js.org)
|
||||
- [Prisma](https://prisma.io)
|
||||
- [Drizzle](https://orm.drizzle.team)
|
||||
- [Tailwind CSS](https://tailwindcss.com)
|
||||
- [tRPC](https://trpc.io)
|
||||
### With lockfile, offline mode
|
||||
|
||||
## Learn More
|
||||
Even though all packages are cached, some tools may hit the npm API during the version resolution step. (This is not the same as re-downloading a package.) To entirely avoid network calls, the other package managers require `--prefer-offline/--offline` flag. To run the benchmark using "offline" mode:
|
||||
|
||||
To learn more about the [T3 Stack](https://create.t3.gg/), take a look at the following resources:
|
||||
```sh
|
||||
$ hyperfine --prepare 'rm -rf node_modules' --runs 1 'bun install' 'pnpm install --prefer-offline' 'yarn --offline' 'npm install --prefer-offline'
|
||||
```
|
||||
|
||||
- [Documentation](https://create.t3.gg/)
|
||||
- [Learn the T3 Stack](https://create.t3.gg/en/faq#what-learning-resources-are-currently-available) — Check out these awesome tutorials
|
||||
### Without lockfile, offline mode
|
||||
|
||||
You can check out the [create-t3-app GitHub repository](https://github.com/t3-oss/create-t3-app) — your feedback and contributions are welcome!
|
||||
To run the benchmark with offline mode but without lockfiles:
|
||||
|
||||
## How do I deploy this?
|
||||
```sh
|
||||
$ hyperfine --prepare 'rm -rf node_modules' --warmup 1 'rm bun.lock && bun install' 'rm pnpm-lock.yaml && pnpm install --prefer-offline' 'rm yarn.lock && yarn --offline' 'rm package-lock.json && npm install --prefer-offline'
|
||||
```
|
||||
|
||||
Follow our deployment guides for [Vercel](https://create.t3.gg/en/deployment/vercel), [Netlify](https://create.t3.gg/en/deployment/netlify) and [Docker](https://create.t3.gg/en/deployment/docker) for more information.
|
||||
##
|
||||
|
||||
To check that the app is working as expected:
|
||||
|
||||
```
|
||||
$ bun run dev
|
||||
$ npm run dev
|
||||
$ yarn dev
|
||||
$ pnpm dev
|
||||
```
|
||||
|
||||
Then visit [http://localhost:3000](http://localhost:3000).
|
||||
|
||||
18
bench/install/app/entry.client.tsx
Normal file
18
bench/install/app/entry.client.tsx
Normal file
@@ -0,0 +1,18 @@
|
||||
/**
|
||||
* By default, Remix will handle hydrating your app on the client for you.
|
||||
* You are free to delete this file if you'd like to, but if you ever want it revealed again, you can run `npx remix reveal` ✨
|
||||
* For more information, see https://remix.run/docs/en/main/file-conventions/entry.client
|
||||
*/
|
||||
|
||||
import { RemixBrowser } from "@remix-run/react";
|
||||
import { startTransition, StrictMode } from "react";
|
||||
import { hydrateRoot } from "react-dom/client";
|
||||
|
||||
startTransition(() => {
|
||||
hydrateRoot(
|
||||
document,
|
||||
<StrictMode>
|
||||
<RemixBrowser />
|
||||
</StrictMode>,
|
||||
);
|
||||
});
|
||||
101
bench/install/app/entry.server.tsx
Normal file
101
bench/install/app/entry.server.tsx
Normal file
@@ -0,0 +1,101 @@
|
||||
/**
|
||||
* By default, Remix will handle generating the HTTP Response for you.
|
||||
* You are free to delete this file if you'd like to, but if you ever want it revealed again, you can run `npx remix reveal` ✨
|
||||
* For more information, see https://remix.run/docs/en/main/file-conventions/entry.server
|
||||
*/
|
||||
|
||||
import type { EntryContext } from "@remix-run/node";
|
||||
import { Response } from "@remix-run/node";
|
||||
import { RemixServer } from "@remix-run/react";
|
||||
import isbot from "isbot";
|
||||
import { PassThrough } from "node:stream";
|
||||
import { renderToPipeableStream } from "react-dom/server";
|
||||
|
||||
const ABORT_DELAY = 5_000;
|
||||
|
||||
export default function handleRequest(
|
||||
request: Request,
|
||||
responseStatusCode: number,
|
||||
responseHeaders: Headers,
|
||||
remixContext: EntryContext,
|
||||
) {
|
||||
return isbot(request.headers.get("user-agent"))
|
||||
? handleBotRequest(request, responseStatusCode, responseHeaders, remixContext)
|
||||
: handleBrowserRequest(request, responseStatusCode, responseHeaders, remixContext);
|
||||
}
|
||||
|
||||
function handleBotRequest(
|
||||
request: Request,
|
||||
responseStatusCode: number,
|
||||
responseHeaders: Headers,
|
||||
remixContext: EntryContext,
|
||||
) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const { pipe, abort } = renderToPipeableStream(
|
||||
<RemixServer context={remixContext} url={request.url} abortDelay={ABORT_DELAY} />,
|
||||
{
|
||||
onAllReady() {
|
||||
const body = new PassThrough();
|
||||
|
||||
responseHeaders.set("Content-Type", "text/html");
|
||||
|
||||
resolve(
|
||||
new Response(body, {
|
||||
headers: responseHeaders,
|
||||
status: responseStatusCode,
|
||||
}),
|
||||
);
|
||||
|
||||
pipe(body);
|
||||
},
|
||||
onShellError(error: unknown) {
|
||||
reject(error);
|
||||
},
|
||||
onError(error: unknown) {
|
||||
responseStatusCode = 500;
|
||||
console.error(error);
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
setTimeout(abort, ABORT_DELAY);
|
||||
});
|
||||
}
|
||||
|
||||
function handleBrowserRequest(
|
||||
request: Request,
|
||||
responseStatusCode: number,
|
||||
responseHeaders: Headers,
|
||||
remixContext: EntryContext,
|
||||
) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const { pipe, abort } = renderToPipeableStream(
|
||||
<RemixServer context={remixContext} url={request.url} abortDelay={ABORT_DELAY} />,
|
||||
{
|
||||
onShellReady() {
|
||||
const body = new PassThrough();
|
||||
|
||||
responseHeaders.set("Content-Type", "text/html");
|
||||
|
||||
resolve(
|
||||
new Response(body, {
|
||||
headers: responseHeaders,
|
||||
status: responseStatusCode,
|
||||
}),
|
||||
);
|
||||
|
||||
pipe(body);
|
||||
},
|
||||
onShellError(error: unknown) {
|
||||
reject(error);
|
||||
},
|
||||
onError(error: unknown) {
|
||||
console.error(error);
|
||||
responseStatusCode = 500;
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
setTimeout(abort, ABORT_DELAY);
|
||||
});
|
||||
}
|
||||
20
bench/install/app/root.tsx
Normal file
20
bench/install/app/root.tsx
Normal file
@@ -0,0 +1,20 @@
|
||||
import { Links, LiveReload, Meta, Outlet, Scripts, ScrollRestoration } from "@remix-run/react";
|
||||
|
||||
export default function App() {
|
||||
return (
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charSet="utf-8" />
|
||||
<meta name="viewport" content="width=device-width,initial-scale=1" />
|
||||
<Meta />
|
||||
<Links />
|
||||
</head>
|
||||
<body>
|
||||
<Outlet />
|
||||
<ScrollRestoration />
|
||||
<Scripts />
|
||||
<LiveReload />
|
||||
</body>
|
||||
</html>
|
||||
);
|
||||
}
|
||||
30
bench/install/app/routes/_index.tsx
Normal file
30
bench/install/app/routes/_index.tsx
Normal file
@@ -0,0 +1,30 @@
|
||||
import type { V2_MetaFunction } from "@remix-run/node";
|
||||
|
||||
export const meta: V2_MetaFunction = () => {
|
||||
return [{ title: "New Remix App" }];
|
||||
};
|
||||
|
||||
export default function Index() {
|
||||
return (
|
||||
<div style={{ fontFamily: "system-ui, sans-serif", lineHeight: "1.4" }}>
|
||||
<h1>Welcome to Remix</h1>
|
||||
<ul>
|
||||
<li>
|
||||
<a target="_blank" href="https://remix.run/tutorials/blog" rel="noreferrer">
|
||||
15m Quickstart Blog Tutorial
|
||||
</a>
|
||||
</li>
|
||||
<li>
|
||||
<a target="_blank" href="https://remix.run/tutorials/jokes" rel="noreferrer">
|
||||
Deep Dive Jokes App Tutorial
|
||||
</a>
|
||||
</li>
|
||||
<li>
|
||||
<a target="_blank" href="https://remix.run/docs" rel="noreferrer">
|
||||
Remix Docs
|
||||
</a>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,489 +0,0 @@
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"configVersion": 0,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "installbench",
|
||||
"dependencies": {
|
||||
"@auth/drizzle-adapter": "^1.7.2",
|
||||
"@t3-oss/env-nextjs": "^0.12.0",
|
||||
"@tanstack/react-query": "^5.69.0",
|
||||
"@trpc/client": "^11.0.0",
|
||||
"@trpc/react-query": "^11.0.0",
|
||||
"@trpc/server": "^11.0.0",
|
||||
"drizzle-orm": "^0.41.0",
|
||||
"esbuild": "^0.25.11",
|
||||
"next": "15.5.7",
|
||||
"next-auth": "5.0.0-beta.25",
|
||||
"postgres": "^3.4.4",
|
||||
"react": "^19.0.0",
|
||||
"react-dom": "^19.0.0",
|
||||
"server-only": "^0.0.1",
|
||||
"superjson": "^2.2.1",
|
||||
"zod": "^3.24.2",
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "1.9.4",
|
||||
"@tailwindcss/postcss": "^4.0.15",
|
||||
"@types/node": "^20.14.10",
|
||||
"@types/react": "^19.0.0",
|
||||
"@types/react-dom": "^19.0.0",
|
||||
"drizzle-kit": "^0.30.5",
|
||||
"postcss": "^8.5.3",
|
||||
"tailwindcss": "^4.0.15",
|
||||
"typescript": "^5.8.2",
|
||||
},
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"@alloc/quick-lru": ["@alloc/quick-lru@5.2.0", "", {}, "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw=="],
|
||||
|
||||
"@auth/core": ["@auth/core@0.41.1", "", { "dependencies": { "@panva/hkdf": "1.2.1", "jose": "6.1.0", "oauth4webapi": "3.8.2", "preact": "10.24.3", "preact-render-to-string": "6.5.11" } }, "sha512-t9cJ2zNYAdWMacGRMT6+r4xr1uybIdmYa49calBPeTqwgAFPV/88ac9TEvCR85pvATiSPt8VaNf+Gt24JIT/uw=="],
|
||||
|
||||
"@auth/drizzle-adapter": ["@auth/drizzle-adapter@1.11.1", "", { "dependencies": { "@auth/core": "0.41.1" } }, "sha512-cQTvDZqsyF7RPhDm/B6SvqdVP9EzQhy3oM4Muu7fjjmSYFLbSR203E6dH631ZHSKDn2b4WZkfMnjPDzRsPSAeA=="],
|
||||
|
||||
"@biomejs/biome": ["@biomejs/biome@1.9.4", "", { "optionalDependencies": { "@biomejs/cli-darwin-arm64": "1.9.4", "@biomejs/cli-darwin-x64": "1.9.4", "@biomejs/cli-linux-arm64": "1.9.4", "@biomejs/cli-linux-arm64-musl": "1.9.4", "@biomejs/cli-linux-x64": "1.9.4", "@biomejs/cli-linux-x64-musl": "1.9.4", "@biomejs/cli-win32-arm64": "1.9.4", "@biomejs/cli-win32-x64": "1.9.4" }, "bin": { "biome": "bin/biome" } }, "sha512-1rkd7G70+o9KkTn5KLmDYXihGoTaIGO9PIIN2ZB7UJxFrWw04CZHPYiMRjYsaDvVV7hP1dYNRLxSANLaBFGpog=="],
|
||||
|
||||
"@biomejs/cli-darwin-arm64": ["@biomejs/cli-darwin-arm64@1.9.4", "", { "os": "darwin", "cpu": "arm64" }, "sha512-bFBsPWrNvkdKrNCYeAp+xo2HecOGPAy9WyNyB/jKnnedgzl4W4Hb9ZMzYNbf8dMCGmUdSavlYHiR01QaYR58cw=="],
|
||||
|
||||
"@biomejs/cli-darwin-x64": ["@biomejs/cli-darwin-x64@1.9.4", "", { "os": "darwin", "cpu": "x64" }, "sha512-ngYBh/+bEedqkSevPVhLP4QfVPCpb+4BBe2p7Xs32dBgs7rh9nY2AIYUL6BgLw1JVXV8GlpKmb/hNiuIxfPfZg=="],
|
||||
|
||||
"@biomejs/cli-linux-arm64": ["@biomejs/cli-linux-arm64@1.9.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-fJIW0+LYujdjUgJJuwesP4EjIBl/N/TcOX3IvIHJQNsAqvV2CHIogsmA94BPG6jZATS4Hi+xv4SkBBQSt1N4/g=="],
|
||||
|
||||
"@biomejs/cli-linux-arm64-musl": ["@biomejs/cli-linux-arm64-musl@1.9.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-v665Ct9WCRjGa8+kTr0CzApU0+XXtRgwmzIf1SeKSGAv+2scAlW6JR5PMFo6FzqqZ64Po79cKODKf3/AAmECqA=="],
|
||||
|
||||
"@biomejs/cli-linux-x64": ["@biomejs/cli-linux-x64@1.9.4", "", { "os": "linux", "cpu": "x64" }, "sha512-lRCJv/Vi3Vlwmbd6K+oQ0KhLHMAysN8lXoCI7XeHlxaajk06u7G+UsFSO01NAs5iYuWKmVZjmiOzJ0OJmGsMwg=="],
|
||||
|
||||
"@biomejs/cli-linux-x64-musl": ["@biomejs/cli-linux-x64-musl@1.9.4", "", { "os": "linux", "cpu": "x64" }, "sha512-gEhi/jSBhZ2m6wjV530Yy8+fNqG8PAinM3oV7CyO+6c3CEh16Eizm21uHVsyVBEB6RIM8JHIl6AGYCv6Q6Q9Tg=="],
|
||||
|
||||
"@biomejs/cli-win32-arm64": ["@biomejs/cli-win32-arm64@1.9.4", "", { "os": "win32", "cpu": "arm64" }, "sha512-tlbhLk+WXZmgwoIKwHIHEBZUwxml7bRJgk0X2sPyNR3S93cdRq6XulAZRQJ17FYGGzWne0fgrXBKpl7l4M87Hg=="],
|
||||
|
||||
"@biomejs/cli-win32-x64": ["@biomejs/cli-win32-x64@1.9.4", "", { "os": "win32", "cpu": "x64" }, "sha512-8Y5wMhVIPaWe6jw2H+KlEm4wP/f7EW3810ZLmDlrEEy5KvBsb9ECEfu/kMWD484ijfQ8+nIi0giMgu9g1UAuuA=="],
|
||||
|
||||
"@drizzle-team/brocli": ["@drizzle-team/brocli@0.10.2", "", {}, "sha512-z33Il7l5dKjUgGULTqBsQBQwckHh5AbIuxhdsIxDDiZAzBOrZO6q9ogcWC65kU382AfynTfgNumVcNIjuIua6w=="],
|
||||
|
||||
"@emnapi/runtime": ["@emnapi/runtime@1.6.0", "", { "dependencies": { "tslib": "2.8.1" } }, "sha512-obtUmAHTMjll499P+D9A3axeJFlhdjOWdKUNs/U6QIGT7V5RjcUW1xToAzjvmgTSQhDbYn/NwfTRoJcQ2rNBxA=="],
|
||||
|
||||
"@esbuild-kit/core-utils": ["@esbuild-kit/core-utils@3.3.2", "", { "dependencies": { "esbuild": "0.18.20", "source-map-support": "0.5.21" } }, "sha512-sPRAnw9CdSsRmEtnsl2WXWdyquogVpB3yZ3dgwJfe8zrOzTsV7cJvmwrKVa+0ma5BoiGJ+BoqkMvawbayKUsqQ=="],
|
||||
|
||||
"@esbuild-kit/esm-loader": ["@esbuild-kit/esm-loader@2.6.5", "", { "dependencies": { "@esbuild-kit/core-utils": "3.3.2", "get-tsconfig": "4.13.0" } }, "sha512-FxEMIkJKnodyA1OaCUoEvbYRkoZlLZ4d/eXFu9Fh8CbBBgP5EmZxrfTRyN0qpXZ4vOvqnE5YdRdcrmUUXuU+dA=="],
|
||||
|
||||
"@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.25.11", "", { "os": "aix", "cpu": "ppc64" }, "sha512-Xt1dOL13m8u0WE8iplx9Ibbm+hFAO0GsU2P34UNoDGvZYkY8ifSiy6Zuc1lYxfG7svWE2fzqCUmFp5HCn51gJg=="],
|
||||
|
||||
"@esbuild/android-arm": ["@esbuild/android-arm@0.25.11", "", { "os": "android", "cpu": "arm" }, "sha512-uoa7dU+Dt3HYsethkJ1k6Z9YdcHjTrSb5NUy66ZfZaSV8hEYGD5ZHbEMXnqLFlbBflLsl89Zke7CAdDJ4JI+Gg=="],
|
||||
|
||||
"@esbuild/android-arm64": ["@esbuild/android-arm64@0.25.11", "", { "os": "android", "cpu": "arm64" }, "sha512-9slpyFBc4FPPz48+f6jyiXOx/Y4v34TUeDDXJpZqAWQn/08lKGeD8aDp9TMn9jDz2CiEuHwfhRmGBvpnd/PWIQ=="],
|
||||
|
||||
"@esbuild/android-x64": ["@esbuild/android-x64@0.25.11", "", { "os": "android", "cpu": "x64" }, "sha512-Sgiab4xBjPU1QoPEIqS3Xx+R2lezu0LKIEcYe6pftr56PqPygbB7+szVnzoShbx64MUupqoE0KyRlN7gezbl8g=="],
|
||||
|
||||
"@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.25.11", "", { "os": "darwin", "cpu": "arm64" }, "sha512-VekY0PBCukppoQrycFxUqkCojnTQhdec0vevUL/EDOCnXd9LKWqD/bHwMPzigIJXPhC59Vd1WFIL57SKs2mg4w=="],
|
||||
|
||||
"@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.25.11", "", { "os": "darwin", "cpu": "x64" }, "sha512-+hfp3yfBalNEpTGp9loYgbknjR695HkqtY3d3/JjSRUyPg/xd6q+mQqIb5qdywnDxRZykIHs3axEqU6l1+oWEQ=="],
|
||||
|
||||
"@esbuild/freebsd-arm64": ["@esbuild/freebsd-arm64@0.25.11", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-CmKjrnayyTJF2eVuO//uSjl/K3KsMIeYeyN7FyDBjsR3lnSJHaXlVoAK8DZa7lXWChbuOk7NjAc7ygAwrnPBhA=="],
|
||||
|
||||
"@esbuild/freebsd-x64": ["@esbuild/freebsd-x64@0.25.11", "", { "os": "freebsd", "cpu": "x64" }, "sha512-Dyq+5oscTJvMaYPvW3x3FLpi2+gSZTCE/1ffdwuM6G1ARang/mb3jvjxs0mw6n3Lsw84ocfo9CrNMqc5lTfGOw=="],
|
||||
|
||||
"@esbuild/linux-arm": ["@esbuild/linux-arm@0.25.11", "", { "os": "linux", "cpu": "arm" }, "sha512-TBMv6B4kCfrGJ8cUPo7vd6NECZH/8hPpBHHlYI3qzoYFvWu2AdTvZNuU/7hsbKWqu/COU7NIK12dHAAqBLLXgw=="],
|
||||
|
||||
"@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.25.11", "", { "os": "linux", "cpu": "arm64" }, "sha512-Qr8AzcplUhGvdyUF08A1kHU3Vr2O88xxP0Tm8GcdVOUm25XYcMPp2YqSVHbLuXzYQMf9Bh/iKx7YPqECs6ffLA=="],
|
||||
|
||||
"@esbuild/linux-ia32": ["@esbuild/linux-ia32@0.25.11", "", { "os": "linux", "cpu": "ia32" }, "sha512-TmnJg8BMGPehs5JKrCLqyWTVAvielc615jbkOirATQvWWB1NMXY77oLMzsUjRLa0+ngecEmDGqt5jiDC6bfvOw=="],
|
||||
|
||||
"@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.25.11", "", { "os": "linux", "cpu": "none" }, "sha512-DIGXL2+gvDaXlaq8xruNXUJdT5tF+SBbJQKbWy/0J7OhU8gOHOzKmGIlfTTl6nHaCOoipxQbuJi7O++ldrxgMw=="],
|
||||
|
||||
"@esbuild/linux-mips64el": ["@esbuild/linux-mips64el@0.25.11", "", { "os": "linux", "cpu": "none" }, "sha512-Osx1nALUJu4pU43o9OyjSCXokFkFbyzjXb6VhGIJZQ5JZi8ylCQ9/LFagolPsHtgw6himDSyb5ETSfmp4rpiKQ=="],
|
||||
|
||||
"@esbuild/linux-ppc64": ["@esbuild/linux-ppc64@0.25.11", "", { "os": "linux", "cpu": "ppc64" }, "sha512-nbLFgsQQEsBa8XSgSTSlrnBSrpoWh7ioFDUmwo158gIm5NNP+17IYmNWzaIzWmgCxq56vfr34xGkOcZ7jX6CPw=="],
|
||||
|
||||
"@esbuild/linux-riscv64": ["@esbuild/linux-riscv64@0.25.11", "", { "os": "linux", "cpu": "none" }, "sha512-HfyAmqZi9uBAbgKYP1yGuI7tSREXwIb438q0nqvlpxAOs3XnZ8RsisRfmVsgV486NdjD7Mw2UrFSw51lzUk1ww=="],
|
||||
|
||||
"@esbuild/linux-s390x": ["@esbuild/linux-s390x@0.25.11", "", { "os": "linux", "cpu": "s390x" }, "sha512-HjLqVgSSYnVXRisyfmzsH6mXqyvj0SA7pG5g+9W7ESgwA70AXYNpfKBqh1KbTxmQVaYxpzA/SvlB9oclGPbApw=="],
|
||||
|
||||
"@esbuild/linux-x64": ["@esbuild/linux-x64@0.25.11", "", { "os": "linux", "cpu": "x64" }, "sha512-HSFAT4+WYjIhrHxKBwGmOOSpphjYkcswF449j6EjsjbinTZbp8PJtjsVK1XFJStdzXdy/jaddAep2FGY+wyFAQ=="],
|
||||
|
||||
"@esbuild/netbsd-arm64": ["@esbuild/netbsd-arm64@0.25.11", "", { "os": "none", "cpu": "arm64" }, "sha512-hr9Oxj1Fa4r04dNpWr3P8QKVVsjQhqrMSUzZzf+LZcYjZNqhA3IAfPQdEh1FLVUJSiu6sgAwp3OmwBfbFgG2Xg=="],
|
||||
|
||||
"@esbuild/netbsd-x64": ["@esbuild/netbsd-x64@0.25.11", "", { "os": "none", "cpu": "x64" }, "sha512-u7tKA+qbzBydyj0vgpu+5h5AeudxOAGncb8N6C9Kh1N4n7wU1Xw1JDApsRjpShRpXRQlJLb9wY28ELpwdPcZ7A=="],
|
||||
|
||||
"@esbuild/openbsd-arm64": ["@esbuild/openbsd-arm64@0.25.11", "", { "os": "openbsd", "cpu": "arm64" }, "sha512-Qq6YHhayieor3DxFOoYM1q0q1uMFYb7cSpLD2qzDSvK1NAvqFi8Xgivv0cFC6J+hWVw2teCYltyy9/m/14ryHg=="],
|
||||
|
||||
"@esbuild/openbsd-x64": ["@esbuild/openbsd-x64@0.25.11", "", { "os": "openbsd", "cpu": "x64" }, "sha512-CN+7c++kkbrckTOz5hrehxWN7uIhFFlmS/hqziSFVWpAzpWrQoAG4chH+nN3Be+Kzv/uuo7zhX716x3Sn2Jduw=="],
|
||||
|
||||
"@esbuild/openharmony-arm64": ["@esbuild/openharmony-arm64@0.25.11", "", { "os": "none", "cpu": "arm64" }, "sha512-rOREuNIQgaiR+9QuNkbkxubbp8MSO9rONmwP5nKncnWJ9v5jQ4JxFnLu4zDSRPf3x4u+2VN4pM4RdyIzDty/wQ=="],
|
||||
|
||||
"@esbuild/sunos-x64": ["@esbuild/sunos-x64@0.25.11", "", { "os": "sunos", "cpu": "x64" }, "sha512-nq2xdYaWxyg9DcIyXkZhcYulC6pQ2FuCgem3LI92IwMgIZ69KHeY8T4Y88pcwoLIjbed8n36CyKoYRDygNSGhA=="],
|
||||
|
||||
"@esbuild/win32-arm64": ["@esbuild/win32-arm64@0.25.11", "", { "os": "win32", "cpu": "arm64" }, "sha512-3XxECOWJq1qMZ3MN8srCJ/QfoLpL+VaxD/WfNRm1O3B4+AZ/BnLVgFbUV3eiRYDMXetciH16dwPbbHqwe1uU0Q=="],
|
||||
|
||||
"@esbuild/win32-ia32": ["@esbuild/win32-ia32@0.25.11", "", { "os": "win32", "cpu": "ia32" }, "sha512-3ukss6gb9XZ8TlRyJlgLn17ecsK4NSQTmdIXRASVsiS2sQ6zPPZklNJT5GR5tE/MUarymmy8kCEf5xPCNCqVOA=="],
|
||||
|
||||
"@esbuild/win32-x64": ["@esbuild/win32-x64@0.25.11", "", { "os": "win32", "cpu": "x64" }, "sha512-D7Hpz6A2L4hzsRpPaCYkQnGOotdUpDzSGRIv9I+1ITdHROSFUWW95ZPZWQmGka1Fg7W3zFJowyn9WGwMJ0+KPA=="],
|
||||
|
||||
"@img/colour": ["@img/colour@1.0.0", "", {}, "sha512-A5P/LfWGFSl6nsckYtjw9da+19jB8hkJ6ACTGcDfEJ0aE+l2n2El7dsVM7UVHZQ9s2lmYMWlrS21YLy2IR1LUw=="],
|
||||
|
||||
"@img/sharp-darwin-arm64": ["@img/sharp-darwin-arm64@0.34.4", "", { "optionalDependencies": { "@img/sharp-libvips-darwin-arm64": "1.2.3" }, "os": "darwin", "cpu": "arm64" }, "sha512-sitdlPzDVyvmINUdJle3TNHl+AG9QcwiAMsXmccqsCOMZNIdW2/7S26w0LyU8euiLVzFBL3dXPwVCq/ODnf2vA=="],
|
||||
|
||||
"@img/sharp-darwin-x64": ["@img/sharp-darwin-x64@0.34.4", "", { "optionalDependencies": { "@img/sharp-libvips-darwin-x64": "1.2.3" }, "os": "darwin", "cpu": "x64" }, "sha512-rZheupWIoa3+SOdF/IcUe1ah4ZDpKBGWcsPX6MT0lYniH9micvIU7HQkYTfrx5Xi8u+YqwLtxC/3vl8TQN6rMg=="],
|
||||
|
||||
"@img/sharp-libvips-darwin-arm64": ["@img/sharp-libvips-darwin-arm64@1.2.3", "", { "os": "darwin", "cpu": "arm64" }, "sha512-QzWAKo7kpHxbuHqUC28DZ9pIKpSi2ts2OJnoIGI26+HMgq92ZZ4vk8iJd4XsxN+tYfNJxzH6W62X5eTcsBymHw=="],
|
||||
|
||||
"@img/sharp-libvips-darwin-x64": ["@img/sharp-libvips-darwin-x64@1.2.3", "", { "os": "darwin", "cpu": "x64" }, "sha512-Ju+g2xn1E2AKO6YBhxjj+ACcsPQRHT0bhpglxcEf+3uyPY+/gL8veniKoo96335ZaPo03bdDXMv0t+BBFAbmRA=="],
|
||||
|
||||
"@img/sharp-libvips-linux-arm": ["@img/sharp-libvips-linux-arm@1.2.3", "", { "os": "linux", "cpu": "arm" }, "sha512-x1uE93lyP6wEwGvgAIV0gP6zmaL/a0tGzJs/BIDDG0zeBhMnuUPm7ptxGhUbcGs4okDJrk4nxgrmxpib9g6HpA=="],
|
||||
|
||||
"@img/sharp-libvips-linux-arm64": ["@img/sharp-libvips-linux-arm64@1.2.3", "", { "os": "linux", "cpu": "arm64" }, "sha512-I4RxkXU90cpufazhGPyVujYwfIm9Nk1QDEmiIsaPwdnm013F7RIceaCc87kAH+oUB1ezqEvC6ga4m7MSlqsJvQ=="],
|
||||
|
||||
"@img/sharp-libvips-linux-ppc64": ["@img/sharp-libvips-linux-ppc64@1.2.3", "", { "os": "linux", "cpu": "ppc64" }, "sha512-Y2T7IsQvJLMCBM+pmPbM3bKT/yYJvVtLJGfCs4Sp95SjvnFIjynbjzsa7dY1fRJX45FTSfDksbTp6AGWudiyCg=="],
|
||||
|
||||
"@img/sharp-libvips-linux-s390x": ["@img/sharp-libvips-linux-s390x@1.2.3", "", { "os": "linux", "cpu": "s390x" }, "sha512-RgWrs/gVU7f+K7P+KeHFaBAJlNkD1nIZuVXdQv6S+fNA6syCcoboNjsV2Pou7zNlVdNQoQUpQTk8SWDHUA3y/w=="],
|
||||
|
||||
"@img/sharp-libvips-linux-x64": ["@img/sharp-libvips-linux-x64@1.2.3", "", { "os": "linux", "cpu": "x64" }, "sha512-3JU7LmR85K6bBiRzSUc/Ff9JBVIFVvq6bomKE0e63UXGeRw2HPVEjoJke1Yx+iU4rL7/7kUjES4dZ/81Qjhyxg=="],
|
||||
|
||||
"@img/sharp-libvips-linuxmusl-arm64": ["@img/sharp-libvips-linuxmusl-arm64@1.2.3", "", { "os": "linux", "cpu": "arm64" }, "sha512-F9q83RZ8yaCwENw1GieztSfj5msz7GGykG/BA+MOUefvER69K/ubgFHNeSyUu64amHIYKGDs4sRCMzXVj8sEyw=="],
|
||||
|
||||
"@img/sharp-libvips-linuxmusl-x64": ["@img/sharp-libvips-linuxmusl-x64@1.2.3", "", { "os": "linux", "cpu": "x64" }, "sha512-U5PUY5jbc45ANM6tSJpsgqmBF/VsL6LnxJmIf11kB7J5DctHgqm0SkuXzVWtIY90GnJxKnC/JT251TDnk1fu/g=="],
|
||||
|
||||
"@img/sharp-linux-arm": ["@img/sharp-linux-arm@0.34.4", "", { "optionalDependencies": { "@img/sharp-libvips-linux-arm": "1.2.3" }, "os": "linux", "cpu": "arm" }, "sha512-Xyam4mlqM0KkTHYVSuc6wXRmM7LGN0P12li03jAnZ3EJWZqj83+hi8Y9UxZUbxsgsK1qOEwg7O0Bc0LjqQVtxA=="],
|
||||
|
||||
"@img/sharp-linux-arm64": ["@img/sharp-linux-arm64@0.34.4", "", { "optionalDependencies": { "@img/sharp-libvips-linux-arm64": "1.2.3" }, "os": "linux", "cpu": "arm64" }, "sha512-YXU1F/mN/Wu786tl72CyJjP/Ngl8mGHN1hST4BGl+hiW5jhCnV2uRVTNOcaYPs73NeT/H8Upm3y9582JVuZHrQ=="],
|
||||
|
||||
"@img/sharp-linux-ppc64": ["@img/sharp-linux-ppc64@0.34.4", "", { "optionalDependencies": { "@img/sharp-libvips-linux-ppc64": "1.2.3" }, "os": "linux", "cpu": "ppc64" }, "sha512-F4PDtF4Cy8L8hXA2p3TO6s4aDt93v+LKmpcYFLAVdkkD3hSxZzee0rh6/+94FpAynsuMpLX5h+LRsSG3rIciUQ=="],
|
||||
|
||||
"@img/sharp-linux-s390x": ["@img/sharp-linux-s390x@0.34.4", "", { "optionalDependencies": { "@img/sharp-libvips-linux-s390x": "1.2.3" }, "os": "linux", "cpu": "s390x" }, "sha512-qVrZKE9Bsnzy+myf7lFKvng6bQzhNUAYcVORq2P7bDlvmF6u2sCmK2KyEQEBdYk+u3T01pVsPrkj943T1aJAsw=="],
|
||||
|
||||
"@img/sharp-linux-x64": ["@img/sharp-linux-x64@0.34.4", "", { "optionalDependencies": { "@img/sharp-libvips-linux-x64": "1.2.3" }, "os": "linux", "cpu": "x64" }, "sha512-ZfGtcp2xS51iG79c6Vhw9CWqQC8l2Ot8dygxoDoIQPTat/Ov3qAa8qpxSrtAEAJW+UjTXc4yxCjNfxm4h6Xm2A=="],
|
||||
|
||||
"@img/sharp-linuxmusl-arm64": ["@img/sharp-linuxmusl-arm64@0.34.4", "", { "optionalDependencies": { "@img/sharp-libvips-linuxmusl-arm64": "1.2.3" }, "os": "linux", "cpu": "arm64" }, "sha512-8hDVvW9eu4yHWnjaOOR8kHVrew1iIX+MUgwxSuH2XyYeNRtLUe4VNioSqbNkB7ZYQJj9rUTT4PyRscyk2PXFKA=="],
|
||||
|
||||
"@img/sharp-linuxmusl-x64": ["@img/sharp-linuxmusl-x64@0.34.4", "", { "optionalDependencies": { "@img/sharp-libvips-linuxmusl-x64": "1.2.3" }, "os": "linux", "cpu": "x64" }, "sha512-lU0aA5L8QTlfKjpDCEFOZsTYGn3AEiO6db8W5aQDxj0nQkVrZWmN3ZP9sYKWJdtq3PWPhUNlqehWyXpYDcI9Sg=="],
|
||||
|
||||
"@img/sharp-wasm32": ["@img/sharp-wasm32@0.34.4", "", { "dependencies": { "@emnapi/runtime": "1.6.0" }, "cpu": "none" }, "sha512-33QL6ZO/qpRyG7woB/HUALz28WnTMI2W1jgX3Nu2bypqLIKx/QKMILLJzJjI+SIbvXdG9fUnmrxR7vbi1sTBeA=="],
|
||||
|
||||
"@img/sharp-win32-arm64": ["@img/sharp-win32-arm64@0.34.4", "", { "os": "win32", "cpu": "arm64" }, "sha512-2Q250do/5WXTwxW3zjsEuMSv5sUU4Tq9VThWKlU2EYLm4MB7ZeMwF+SFJutldYODXF6jzc6YEOC+VfX0SZQPqA=="],
|
||||
|
||||
"@img/sharp-win32-ia32": ["@img/sharp-win32-ia32@0.34.4", "", { "os": "win32", "cpu": "ia32" }, "sha512-3ZeLue5V82dT92CNL6rsal6I2weKw1cYu+rGKm8fOCCtJTR2gYeUfY3FqUnIJsMUPIH68oS5jmZ0NiJ508YpEw=="],
|
||||
|
||||
"@img/sharp-win32-x64": ["@img/sharp-win32-x64@0.34.4", "", { "os": "win32", "cpu": "x64" }, "sha512-xIyj4wpYs8J18sVN3mSQjwrw7fKUqRw+Z5rnHNCy5fYTxigBz81u5mOMPmFumwjcn8+ld1ppptMBCLic1nz6ig=="],
|
||||
|
||||
"@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.13", "", { "dependencies": { "@jridgewell/sourcemap-codec": "1.5.5", "@jridgewell/trace-mapping": "0.3.31" } }, "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA=="],
|
||||
|
||||
"@jridgewell/remapping": ["@jridgewell/remapping@2.3.5", "", { "dependencies": { "@jridgewell/gen-mapping": "0.3.13", "@jridgewell/trace-mapping": "0.3.31" } }, "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ=="],
|
||||
|
||||
"@jridgewell/resolve-uri": ["@jridgewell/resolve-uri@3.1.2", "", {}, "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw=="],
|
||||
|
||||
"@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.5.5", "", {}, "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og=="],
|
||||
|
||||
"@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.31", "", { "dependencies": { "@jridgewell/resolve-uri": "3.1.2", "@jridgewell/sourcemap-codec": "1.5.5" } }, "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw=="],
|
||||
|
||||
"@next/env": ["@next/env@15.5.7", "", {}, "sha512-4h6Y2NyEkIEN7Z8YxkA27pq6zTkS09bUSYC0xjd0NpwFxjnIKeZEeH591o5WECSmjpUhLn3H2QLJcDye3Uzcvg=="],
|
||||
|
||||
"@next/swc-darwin-arm64": ["@next/swc-darwin-arm64@15.5.7", "", { "os": "darwin", "cpu": "arm64" }, "sha512-IZwtxCEpI91HVU/rAUOOobWSZv4P2DeTtNaCdHqLcTJU4wdNXgAySvKa/qJCgR5m6KI8UsKDXtO2B31jcaw1Yw=="],
|
||||
|
||||
"@next/swc-darwin-x64": ["@next/swc-darwin-x64@15.5.7", "", { "os": "darwin", "cpu": "x64" }, "sha512-UP6CaDBcqaCBuiq/gfCEJw7sPEoX1aIjZHnBWN9v9qYHQdMKvCKcAVs4OX1vIjeE+tC5EIuwDTVIoXpUes29lg=="],
|
||||
|
||||
"@next/swc-linux-arm64-gnu": ["@next/swc-linux-arm64-gnu@15.5.7", "", { "os": "linux", "cpu": "arm64" }, "sha512-NCslw3GrNIw7OgmRBxHtdWFQYhexoUCq+0oS2ccjyYLtcn1SzGzeM54jpTFonIMUjNbHmpKpziXnpxhSWLcmBA=="],
|
||||
|
||||
"@next/swc-linux-arm64-musl": ["@next/swc-linux-arm64-musl@15.5.7", "", { "os": "linux", "cpu": "arm64" }, "sha512-nfymt+SE5cvtTrG9u1wdoxBr9bVB7mtKTcj0ltRn6gkP/2Nu1zM5ei8rwP9qKQP0Y//umK+TtkKgNtfboBxRrw=="],
|
||||
|
||||
"@next/swc-linux-x64-gnu": ["@next/swc-linux-x64-gnu@15.5.7", "", { "os": "linux", "cpu": "x64" }, "sha512-hvXcZvCaaEbCZcVzcY7E1uXN9xWZfFvkNHwbe/n4OkRhFWrs1J1QV+4U1BN06tXLdaS4DazEGXwgqnu/VMcmqw=="],
|
||||
|
||||
"@next/swc-linux-x64-musl": ["@next/swc-linux-x64-musl@15.5.7", "", { "os": "linux", "cpu": "x64" }, "sha512-4IUO539b8FmF0odY6/SqANJdgwn1xs1GkPO5doZugwZ3ETF6JUdckk7RGmsfSf7ws8Qb2YB5It33mvNL/0acqA=="],
|
||||
|
||||
"@next/swc-win32-arm64-msvc": ["@next/swc-win32-arm64-msvc@15.5.7", "", { "os": "win32", "cpu": "arm64" }, "sha512-CpJVTkYI3ZajQkC5vajM7/ApKJUOlm6uP4BknM3XKvJ7VXAvCqSjSLmM0LKdYzn6nBJVSjdclx8nYJSa3xlTgQ=="],
|
||||
|
||||
"@next/swc-win32-x64-msvc": ["@next/swc-win32-x64-msvc@15.5.7", "", { "os": "win32", "cpu": "x64" }, "sha512-gMzgBX164I6DN+9/PGA+9dQiwmTkE4TloBNx8Kv9UiGARsr9Nba7IpcBRA1iTV9vwlYnrE3Uy6I7Aj6qLjQuqw=="],
|
||||
|
||||
"@panva/hkdf": ["@panva/hkdf@1.2.1", "", {}, "sha512-6oclG6Y3PiDFcoyk8srjLfVKyMfVCKJ27JwNPViuXziFpmdz+MZnZN/aKY0JGXgYuO/VghU0jcOAZgWXZ1Dmrw=="],
|
||||
|
||||
"@petamoriken/float16": ["@petamoriken/float16@3.9.3", "", {}, "sha512-8awtpHXCx/bNpFt4mt2xdkgtgVvKqty8VbjHI/WWWQuEw+KLzFot3f4+LkQY9YmOtq7A5GdOnqoIC8Pdygjk2g=="],
|
||||
|
||||
"@swc/helpers": ["@swc/helpers@0.5.15", "", { "dependencies": { "tslib": "2.8.1" } }, "sha512-JQ5TuMi45Owi4/BIMAJBoSQoOJu12oOk/gADqlcUL9JEdHB8vyjUSsxqeNXnmXHjYKMi2WcYtezGEEhqUI/E2g=="],
|
||||
|
||||
"@t3-oss/env-core": ["@t3-oss/env-core@0.12.0", "", { "optionalDependencies": { "typescript": "5.9.3", "zod": "3.25.76" } }, "sha512-lOPj8d9nJJTt81mMuN9GMk8x5veOt7q9m11OSnCBJhwp1QrL/qR+M8Y467ULBSm9SunosryWNbmQQbgoiMgcdw=="],
|
||||
|
||||
"@t3-oss/env-nextjs": ["@t3-oss/env-nextjs@0.12.0", "", { "dependencies": { "@t3-oss/env-core": "0.12.0" }, "optionalDependencies": { "typescript": "5.9.3", "zod": "3.25.76" } }, "sha512-rFnvYk1049RnNVUPvY8iQ55AuQh1Rr+qZzQBh3t++RttCGK4COpXGNxS4+45afuQq02lu+QAOy/5955aU8hRKw=="],
|
||||
|
||||
"@tailwindcss/node": ["@tailwindcss/node@4.1.16", "", { "dependencies": { "@jridgewell/remapping": "2.3.5", "enhanced-resolve": "5.18.3", "jiti": "2.6.1", "lightningcss": "1.30.2", "magic-string": "0.30.21", "source-map-js": "1.2.1", "tailwindcss": "4.1.16" } }, "sha512-BX5iaSsloNuvKNHRN3k2RcCuTEgASTo77mofW0vmeHkfrDWaoFAFvNHpEgtu0eqyypcyiBkDWzSMxJhp3AUVcw=="],
|
||||
|
||||
"@tailwindcss/oxide": ["@tailwindcss/oxide@4.1.16", "", { "optionalDependencies": { "@tailwindcss/oxide-android-arm64": "4.1.16", "@tailwindcss/oxide-darwin-arm64": "4.1.16", "@tailwindcss/oxide-darwin-x64": "4.1.16", "@tailwindcss/oxide-freebsd-x64": "4.1.16", "@tailwindcss/oxide-linux-arm-gnueabihf": "4.1.16", "@tailwindcss/oxide-linux-arm64-gnu": "4.1.16", "@tailwindcss/oxide-linux-arm64-musl": "4.1.16", "@tailwindcss/oxide-linux-x64-gnu": "4.1.16", "@tailwindcss/oxide-linux-x64-musl": "4.1.16", "@tailwindcss/oxide-wasm32-wasi": "4.1.16", "@tailwindcss/oxide-win32-arm64-msvc": "4.1.16", "@tailwindcss/oxide-win32-x64-msvc": "4.1.16" } }, "sha512-2OSv52FRuhdlgyOQqgtQHuCgXnS8nFSYRp2tJ+4WZXKgTxqPy7SMSls8c3mPT5pkZ17SBToGM5LHEJBO7miEdg=="],
|
||||
|
||||
"@tailwindcss/oxide-android-arm64": ["@tailwindcss/oxide-android-arm64@4.1.16", "", { "os": "android", "cpu": "arm64" }, "sha512-8+ctzkjHgwDJ5caq9IqRSgsP70xhdhJvm+oueS/yhD5ixLhqTw9fSL1OurzMUhBwE5zK26FXLCz2f/RtkISqHA=="],
|
||||
|
||||
"@tailwindcss/oxide-darwin-arm64": ["@tailwindcss/oxide-darwin-arm64@4.1.16", "", { "os": "darwin", "cpu": "arm64" }, "sha512-C3oZy5042v2FOALBZtY0JTDnGNdS6w7DxL/odvSny17ORUnaRKhyTse8xYi3yKGyfnTUOdavRCdmc8QqJYwFKA=="],
|
||||
|
||||
"@tailwindcss/oxide-darwin-x64": ["@tailwindcss/oxide-darwin-x64@4.1.16", "", { "os": "darwin", "cpu": "x64" }, "sha512-vjrl/1Ub9+JwU6BP0emgipGjowzYZMjbWCDqwA2Z4vCa+HBSpP4v6U2ddejcHsolsYxwL5r4bPNoamlV0xDdLg=="],
|
||||
|
||||
"@tailwindcss/oxide-freebsd-x64": ["@tailwindcss/oxide-freebsd-x64@4.1.16", "", { "os": "freebsd", "cpu": "x64" }, "sha512-TSMpPYpQLm+aR1wW5rKuUuEruc/oOX3C7H0BTnPDn7W/eMw8W+MRMpiypKMkXZfwH8wqPIRKppuZoedTtNj2tg=="],
|
||||
|
||||
"@tailwindcss/oxide-linux-arm-gnueabihf": ["@tailwindcss/oxide-linux-arm-gnueabihf@4.1.16", "", { "os": "linux", "cpu": "arm" }, "sha512-p0GGfRg/w0sdsFKBjMYvvKIiKy/LNWLWgV/plR4lUgrsxFAoQBFrXkZ4C0w8IOXfslB9vHK/JGASWD2IefIpvw=="],
|
||||
|
||||
"@tailwindcss/oxide-linux-arm64-gnu": ["@tailwindcss/oxide-linux-arm64-gnu@4.1.16", "", { "os": "linux", "cpu": "arm64" }, "sha512-DoixyMmTNO19rwRPdqviTrG1rYzpxgyYJl8RgQvdAQUzxC1ToLRqtNJpU/ATURSKgIg6uerPw2feW0aS8SNr/w=="],
|
||||
|
||||
"@tailwindcss/oxide-linux-arm64-musl": ["@tailwindcss/oxide-linux-arm64-musl@4.1.16", "", { "os": "linux", "cpu": "arm64" }, "sha512-H81UXMa9hJhWhaAUca6bU2wm5RRFpuHImrwXBUvPbYb+3jo32I9VIwpOX6hms0fPmA6f2pGVlybO6qU8pF4fzQ=="],
|
||||
|
||||
"@tailwindcss/oxide-linux-x64-gnu": ["@tailwindcss/oxide-linux-x64-gnu@4.1.16", "", { "os": "linux", "cpu": "x64" }, "sha512-ZGHQxDtFC2/ruo7t99Qo2TTIvOERULPl5l0K1g0oK6b5PGqjYMga+FcY1wIUnrUxY56h28FxybtDEla+ICOyew=="],
|
||||
|
||||
"@tailwindcss/oxide-linux-x64-musl": ["@tailwindcss/oxide-linux-x64-musl@4.1.16", "", { "os": "linux", "cpu": "x64" }, "sha512-Oi1tAaa0rcKf1Og9MzKeINZzMLPbhxvm7rno5/zuP1WYmpiG0bEHq4AcRUiG2165/WUzvxkW4XDYCscZWbTLZw=="],
|
||||
|
||||
"@tailwindcss/oxide-wasm32-wasi": ["@tailwindcss/oxide-wasm32-wasi@4.1.16", "", { "cpu": "none" }, "sha512-B01u/b8LteGRwucIBmCQ07FVXLzImWESAIMcUU6nvFt/tYsQ6IHz8DmZ5KtvmwxD+iTYBtM1xwoGXswnlu9v0Q=="],
|
||||
|
||||
"@tailwindcss/oxide-win32-arm64-msvc": ["@tailwindcss/oxide-win32-arm64-msvc@4.1.16", "", { "os": "win32", "cpu": "arm64" }, "sha512-zX+Q8sSkGj6HKRTMJXuPvOcP8XfYON24zJBRPlszcH1Np7xuHXhWn8qfFjIujVzvH3BHU+16jBXwgpl20i+v9A=="],
|
||||
|
||||
"@tailwindcss/oxide-win32-x64-msvc": ["@tailwindcss/oxide-win32-x64-msvc@4.1.16", "", { "os": "win32", "cpu": "x64" }, "sha512-m5dDFJUEejbFqP+UXVstd4W/wnxA4F61q8SoL+mqTypId2T2ZpuxosNSgowiCnLp2+Z+rivdU0AqpfgiD7yCBg=="],
|
||||
|
||||
"@tailwindcss/postcss": ["@tailwindcss/postcss@4.1.16", "", { "dependencies": { "@alloc/quick-lru": "5.2.0", "@tailwindcss/node": "4.1.16", "@tailwindcss/oxide": "4.1.16", "postcss": "8.5.6", "tailwindcss": "4.1.16" } }, "sha512-Qn3SFGPXYQMKR/UtqS+dqvPrzEeBZHrFA92maT4zijCVggdsXnDBMsPFJo1eArX3J+O+Gi+8pV4PkqjLCNBk3A=="],
|
||||
|
||||
"@tanstack/query-core": ["@tanstack/query-core@5.90.5", "", {}, "sha512-wLamYp7FaDq6ZnNehypKI5fNvxHPfTYylE0m/ZpuuzJfJqhR5Pxg9gvGBHZx4n7J+V5Rg5mZxHHTlv25Zt5u+w=="],
|
||||
|
||||
"@tanstack/react-query": ["@tanstack/react-query@5.90.5", "", { "dependencies": { "@tanstack/query-core": "5.90.5" }, "peerDependencies": { "react": "19.2.0" } }, "sha512-pN+8UWpxZkEJ/Rnnj2v2Sxpx1WFlaa9L6a4UO89p6tTQbeo+m0MS8oYDjbggrR8QcTyjKoYWKS3xJQGr3ExT8Q=="],
|
||||
|
||||
"@trpc/client": ["@trpc/client@11.7.1", "", { "peerDependencies": { "@trpc/server": "11.7.1", "typescript": "5.9.3" } }, "sha512-uOnAjElKI892/U6aQMcBHYs3x7mme3Cvv1F87ytBL56rBvs7+DyK7r43zgaXKf13+GtPEI6ex5xjVUfyDW8XcQ=="],
|
||||
|
||||
"@trpc/react-query": ["@trpc/react-query@11.7.1", "", { "peerDependencies": { "@tanstack/react-query": "5.90.5", "@trpc/client": "11.7.1", "@trpc/server": "11.7.1", "react": "19.2.0", "react-dom": "19.2.0", "typescript": "5.9.3" } }, "sha512-dEHDjIqSTzO8nLlCbtiFBMBwhbSkK1QP7aYVo3nP3sYBna0b+iCtrPXdxVPCSopr9/aIqDTEh+dMRZa7yBgjfQ=="],
|
||||
|
||||
"@trpc/server": ["@trpc/server@11.7.1", "", { "peerDependencies": { "typescript": "5.9.3" } }, "sha512-N3U8LNLIP4g9C7LJ/sLkjuPHwqlvE3bnspzC4DEFVdvx2+usbn70P80E3wj5cjOTLhmhRiwJCSXhlB+MHfGeCw=="],
|
||||
|
||||
"@types/cookie": ["@types/cookie@0.6.0", "", {}, "sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA=="],
|
||||
|
||||
"@types/node": ["@types/node@20.19.24", "", { "dependencies": { "undici-types": "6.21.0" } }, "sha512-FE5u0ezmi6y9OZEzlJfg37mqqf6ZDSF2V/NLjUyGrR9uTZ7Sb9F7bLNZ03S4XVUNRWGA7Ck4c1kK+YnuWjl+DA=="],
|
||||
|
||||
"@types/react": ["@types/react@19.2.2", "", { "dependencies": { "csstype": "3.1.3" } }, "sha512-6mDvHUFSjyT2B2yeNx2nUgMxh9LtOWvkhIU3uePn2I2oyNymUAX1NIsdgviM4CH+JSrp2D2hsMvJOkxY+0wNRA=="],
|
||||
|
||||
"@types/react-dom": ["@types/react-dom@19.2.2", "", { "peerDependencies": { "@types/react": "19.2.2" } }, "sha512-9KQPoO6mZCi7jcIStSnlOWn2nEF3mNmyr3rIAsGnAbQKYbRLyqmeSc39EVgtxXVia+LMT8j3knZLAZAh+xLmrw=="],
|
||||
|
||||
"buffer-from": ["buffer-from@1.1.2", "", {}, "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ=="],
|
||||
|
||||
"caniuse-lite": ["caniuse-lite@1.0.30001752", "", {}, "sha512-vKUk7beoukxE47P5gcVNKkDRzXdVofotshHwfR9vmpeFKxmI5PBpgOMC18LUJUA/DvJ70Y7RveasIBraqsyO/g=="],
|
||||
|
||||
"client-only": ["client-only@0.0.1", "", {}, "sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA=="],
|
||||
|
||||
"cookie": ["cookie@0.7.1", "", {}, "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w=="],
|
||||
|
||||
"copy-anything": ["copy-anything@4.0.5", "", { "dependencies": { "is-what": "5.5.0" } }, "sha512-7Vv6asjS4gMOuILabD3l739tsaxFQmC+a7pLZm02zyvs8p977bL3zEgq3yDk5rn9B0PbYgIv++jmHcuUab4RhA=="],
|
||||
|
||||
"csstype": ["csstype@3.1.3", "", {}, "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw=="],
|
||||
|
||||
"debug": ["debug@4.4.3", "", { "dependencies": { "ms": "2.1.3" } }, "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA=="],
|
||||
|
||||
"detect-libc": ["detect-libc@2.1.2", "", {}, "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ=="],
|
||||
|
||||
"drizzle-kit": ["drizzle-kit@0.30.6", "", { "dependencies": { "@drizzle-team/brocli": "0.10.2", "@esbuild-kit/esm-loader": "2.6.5", "esbuild": "0.19.12", "esbuild-register": "3.6.0", "gel": "2.1.1" }, "bin": { "drizzle-kit": "bin.cjs" } }, "sha512-U4wWit0fyZuGuP7iNmRleQyK2V8wCuv57vf5l3MnG4z4fzNTjY/U13M8owyQ5RavqvqxBifWORaR3wIUzlN64g=="],
|
||||
|
||||
"drizzle-orm": ["drizzle-orm@0.41.0", "", { "optionalDependencies": { "gel": "2.1.1", "postgres": "3.4.7" } }, "sha512-7A4ZxhHk9gdlXmTdPj/lREtP+3u8KvZ4yEN6MYVxBzZGex5Wtdc+CWSbu7btgF6TB0N+MNPrvW7RKBbxJchs/Q=="],
|
||||
|
||||
"enhanced-resolve": ["enhanced-resolve@5.18.3", "", { "dependencies": { "graceful-fs": "4.2.11", "tapable": "2.3.0" } }, "sha512-d4lC8xfavMeBjzGr2vECC3fsGXziXZQyJxD868h2M/mBI3PwAuODxAkLkq5HYuvrPYcUtiLzsTo8U3PgX3Ocww=="],
|
||||
|
||||
"env-paths": ["env-paths@3.0.0", "", {}, "sha512-dtJUTepzMW3Lm/NPxRf3wP4642UWhjL2sQxc+ym2YMj1m/H2zDNQOlezafzkHwn6sMstjHTwG6iQQsctDW/b1A=="],
|
||||
|
||||
"esbuild": ["esbuild@0.25.11", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.25.11", "@esbuild/android-arm": "0.25.11", "@esbuild/android-arm64": "0.25.11", "@esbuild/android-x64": "0.25.11", "@esbuild/darwin-arm64": "0.25.11", "@esbuild/darwin-x64": "0.25.11", "@esbuild/freebsd-arm64": "0.25.11", "@esbuild/freebsd-x64": "0.25.11", "@esbuild/linux-arm": "0.25.11", "@esbuild/linux-arm64": "0.25.11", "@esbuild/linux-ia32": "0.25.11", "@esbuild/linux-loong64": "0.25.11", "@esbuild/linux-mips64el": "0.25.11", "@esbuild/linux-ppc64": "0.25.11", "@esbuild/linux-riscv64": "0.25.11", "@esbuild/linux-s390x": "0.25.11", "@esbuild/linux-x64": "0.25.11", "@esbuild/netbsd-arm64": "0.25.11", "@esbuild/netbsd-x64": "0.25.11", "@esbuild/openbsd-arm64": "0.25.11", "@esbuild/openbsd-x64": "0.25.11", "@esbuild/openharmony-arm64": "0.25.11", "@esbuild/sunos-x64": "0.25.11", "@esbuild/win32-arm64": "0.25.11", "@esbuild/win32-ia32": "0.25.11", "@esbuild/win32-x64": "0.25.11" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-KohQwyzrKTQmhXDW1PjCv3Tyspn9n5GcY2RTDqeORIdIJY8yKIF7sTSopFmn/wpMPW4rdPXI0UE5LJLuq3bx0Q=="],
|
||||
|
||||
"esbuild-register": ["esbuild-register@3.6.0", "", { "dependencies": { "debug": "4.4.3" }, "peerDependencies": { "esbuild": "0.19.12" } }, "sha512-H2/S7Pm8a9CL1uhp9OvjwrBh5Pvx0H8qVOxNu8Wed9Y7qv56MPtq+GGM8RJpq6glYJn9Wspr8uw7l55uyinNeg=="],
|
||||
|
||||
"gel": ["gel@2.1.1", "", { "dependencies": { "@petamoriken/float16": "3.9.3", "debug": "4.4.3", "env-paths": "3.0.0", "semver": "7.7.3", "shell-quote": "1.8.3", "which": "4.0.0" }, "bin": { "gel": "dist/cli.mjs" } }, "sha512-Newg9X7mRYskoBjSw70l1YnJ/ZGbq64VPyR821H5WVkTGpHG2O0mQILxCeUhxdYERLFY9B4tUyKLyf3uMTjtKw=="],
|
||||
|
||||
"get-tsconfig": ["get-tsconfig@4.13.0", "", { "dependencies": { "resolve-pkg-maps": "1.0.0" } }, "sha512-1VKTZJCwBrvbd+Wn3AOgQP/2Av+TfTCOlE4AcRJE72W1ksZXbAx8PPBR9RzgTeSPzlPMHrbANMH3LbltH73wxQ=="],
|
||||
|
||||
"graceful-fs": ["graceful-fs@4.2.11", "", {}, "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="],
|
||||
|
||||
"is-what": ["is-what@5.5.0", "", {}, "sha512-oG7cgbmg5kLYae2N5IVd3jm2s+vldjxJzK1pcu9LfpGuQ93MQSzo0okvRna+7y5ifrD+20FE8FvjusyGaz14fw=="],
|
||||
|
||||
"isexe": ["isexe@3.1.1", "", {}, "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ=="],
|
||||
|
||||
"jiti": ["jiti@2.6.1", "", { "bin": { "jiti": "lib/jiti-cli.mjs" } }, "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ=="],
|
||||
|
||||
"jose": ["jose@6.1.0", "", {}, "sha512-TTQJyoEoKcC1lscpVDCSsVgYzUDg/0Bt3WE//WiTPK6uOCQC2KZS4MpugbMWt/zyjkopgZoXhZuCi00gLudfUA=="],
|
||||
|
||||
"lightningcss": ["lightningcss@1.30.2", "", { "dependencies": { "detect-libc": "2.1.2" }, "optionalDependencies": { "lightningcss-android-arm64": "1.30.2", "lightningcss-darwin-arm64": "1.30.2", "lightningcss-darwin-x64": "1.30.2", "lightningcss-freebsd-x64": "1.30.2", "lightningcss-linux-arm-gnueabihf": "1.30.2", "lightningcss-linux-arm64-gnu": "1.30.2", "lightningcss-linux-arm64-musl": "1.30.2", "lightningcss-linux-x64-gnu": "1.30.2", "lightningcss-linux-x64-musl": "1.30.2", "lightningcss-win32-arm64-msvc": "1.30.2", "lightningcss-win32-x64-msvc": "1.30.2" } }, "sha512-utfs7Pr5uJyyvDETitgsaqSyjCb2qNRAtuqUeWIAKztsOYdcACf2KtARYXg2pSvhkt+9NfoaNY7fxjl6nuMjIQ=="],
|
||||
|
||||
"lightningcss-android-arm64": ["lightningcss-android-arm64@1.30.2", "", { "os": "android", "cpu": "arm64" }, "sha512-BH9sEdOCahSgmkVhBLeU7Hc9DWeZ1Eb6wNS6Da8igvUwAe0sqROHddIlvU06q3WyXVEOYDZ6ykBZQnjTbmo4+A=="],
|
||||
|
||||
"lightningcss-darwin-arm64": ["lightningcss-darwin-arm64@1.30.2", "", { "os": "darwin", "cpu": "arm64" }, "sha512-ylTcDJBN3Hp21TdhRT5zBOIi73P6/W0qwvlFEk22fkdXchtNTOU4Qc37SkzV+EKYxLouZ6M4LG9NfZ1qkhhBWA=="],
|
||||
|
||||
"lightningcss-darwin-x64": ["lightningcss-darwin-x64@1.30.2", "", { "os": "darwin", "cpu": "x64" }, "sha512-oBZgKchomuDYxr7ilwLcyms6BCyLn0z8J0+ZZmfpjwg9fRVZIR5/GMXd7r9RH94iDhld3UmSjBM6nXWM2TfZTQ=="],
|
||||
|
||||
"lightningcss-freebsd-x64": ["lightningcss-freebsd-x64@1.30.2", "", { "os": "freebsd", "cpu": "x64" }, "sha512-c2bH6xTrf4BDpK8MoGG4Bd6zAMZDAXS569UxCAGcA7IKbHNMlhGQ89eRmvpIUGfKWNVdbhSbkQaWhEoMGmGslA=="],
|
||||
|
||||
"lightningcss-linux-arm-gnueabihf": ["lightningcss-linux-arm-gnueabihf@1.30.2", "", { "os": "linux", "cpu": "arm" }, "sha512-eVdpxh4wYcm0PofJIZVuYuLiqBIakQ9uFZmipf6LF/HRj5Bgm0eb3qL/mr1smyXIS1twwOxNWndd8z0E374hiA=="],
|
||||
|
||||
"lightningcss-linux-arm64-gnu": ["lightningcss-linux-arm64-gnu@1.30.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-UK65WJAbwIJbiBFXpxrbTNArtfuznvxAJw4Q2ZGlU8kPeDIWEX1dg3rn2veBVUylA2Ezg89ktszWbaQnxD/e3A=="],
|
||||
|
||||
"lightningcss-linux-arm64-musl": ["lightningcss-linux-arm64-musl@1.30.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-5Vh9dGeblpTxWHpOx8iauV02popZDsCYMPIgiuw97OJ5uaDsL86cnqSFs5LZkG3ghHoX5isLgWzMs+eD1YzrnA=="],
|
||||
|
||||
"lightningcss-linux-x64-gnu": ["lightningcss-linux-x64-gnu@1.30.2", "", { "os": "linux", "cpu": "x64" }, "sha512-Cfd46gdmj1vQ+lR6VRTTadNHu6ALuw2pKR9lYq4FnhvgBc4zWY1EtZcAc6EffShbb1MFrIPfLDXD6Xprbnni4w=="],
|
||||
|
||||
"lightningcss-linux-x64-musl": ["lightningcss-linux-x64-musl@1.30.2", "", { "os": "linux", "cpu": "x64" }, "sha512-XJaLUUFXb6/QG2lGIW6aIk6jKdtjtcffUT0NKvIqhSBY3hh9Ch+1LCeH80dR9q9LBjG3ewbDjnumefsLsP6aiA=="],
|
||||
|
||||
"lightningcss-win32-arm64-msvc": ["lightningcss-win32-arm64-msvc@1.30.2", "", { "os": "win32", "cpu": "arm64" }, "sha512-FZn+vaj7zLv//D/192WFFVA0RgHawIcHqLX9xuWiQt7P0PtdFEVaxgF9rjM/IRYHQXNnk61/H/gb2Ei+kUQ4xQ=="],
|
||||
|
||||
"lightningcss-win32-x64-msvc": ["lightningcss-win32-x64-msvc@1.30.2", "", { "os": "win32", "cpu": "x64" }, "sha512-5g1yc73p+iAkid5phb4oVFMB45417DkRevRbt/El/gKXJk4jid+vPFF/AXbxn05Aky8PapwzZrdJShv5C0avjw=="],
|
||||
|
||||
"magic-string": ["magic-string@0.30.21", "", { "dependencies": { "@jridgewell/sourcemap-codec": "1.5.5" } }, "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ=="],
|
||||
|
||||
"ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
|
||||
|
||||
"nanoid": ["nanoid@3.3.11", "", { "bin": { "nanoid": "bin/nanoid.cjs" } }, "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w=="],
|
||||
|
||||
"next": ["next@15.5.7", "", { "dependencies": { "@next/env": "15.5.7", "@swc/helpers": "0.5.15", "caniuse-lite": "^1.0.30001579", "postcss": "8.4.31", "styled-jsx": "5.1.6" }, "optionalDependencies": { "@next/swc-darwin-arm64": "15.5.7", "@next/swc-darwin-x64": "15.5.7", "@next/swc-linux-arm64-gnu": "15.5.7", "@next/swc-linux-arm64-musl": "15.5.7", "@next/swc-linux-x64-gnu": "15.5.7", "@next/swc-linux-x64-musl": "15.5.7", "@next/swc-win32-arm64-msvc": "15.5.7", "@next/swc-win32-x64-msvc": "15.5.7", "sharp": "^0.34.3" }, "peerDependencies": { "@opentelemetry/api": "^1.1.0", "@playwright/test": "^1.51.1", "babel-plugin-react-compiler": "*", "react": "^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0", "react-dom": "^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0", "sass": "^1.3.0" }, "optionalPeers": ["@opentelemetry/api", "@playwright/test", "babel-plugin-react-compiler", "sass"], "bin": { "next": "dist/bin/next" } }, "sha512-+t2/0jIJ48kUpGKkdlhgkv+zPTEOoXyr60qXe68eB/pl3CMJaLeIGjzp5D6Oqt25hCBiBTt8wEeeAzfJvUKnPQ=="],
|
||||
|
||||
"next-auth": ["next-auth@5.0.0-beta.25", "", { "dependencies": { "@auth/core": "0.37.2" }, "peerDependencies": { "next": "15.5.6", "react": "19.2.0" } }, "sha512-2dJJw1sHQl2qxCrRk+KTQbeH+izFbGFPuJj5eGgBZFYyiYYtvlrBeUw1E/OJJxTRjuxbSYGnCTkUIRsIIW0bog=="],
|
||||
|
||||
"oauth4webapi": ["oauth4webapi@3.8.2", "", {}, "sha512-FzZZ+bht5X0FKe7Mwz3DAVAmlH1BV5blSak/lHMBKz0/EBMhX6B10GlQYI51+oRp8ObJaX0g6pXrAxZh5s8rjw=="],
|
||||
|
||||
"picocolors": ["picocolors@1.1.1", "", {}, "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="],
|
||||
|
||||
"postcss": ["postcss@8.5.6", "", { "dependencies": { "nanoid": "3.3.11", "picocolors": "1.1.1", "source-map-js": "1.2.1" } }, "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg=="],
|
||||
|
||||
"postgres": ["postgres@3.4.7", "", {}, "sha512-Jtc2612XINuBjIl/QTWsV5UvE8UHuNblcO3vVADSrKsrc6RqGX6lOW1cEo3CM2v0XG4Nat8nI+YM7/f26VxXLw=="],
|
||||
|
||||
"preact": ["preact@10.24.3", "", {}, "sha512-Z2dPnBnMUfyQfSQ+GBdsGa16hz35YmLmtTLhM169uW944hYL6xzTYkJjC07j+Wosz733pMWx0fgON3JNw1jJQA=="],
|
||||
|
||||
"preact-render-to-string": ["preact-render-to-string@6.5.11", "", { "peerDependencies": { "preact": "10.24.3" } }, "sha512-ubnauqoGczeGISiOh6RjX0/cdaF8v/oDXIjO85XALCQjwQP+SB4RDXXtvZ6yTYSjG+PC1QRP2AhPgCEsM2EvUw=="],
|
||||
|
||||
"pretty-format": ["pretty-format@3.8.0", "", {}, "sha512-WuxUnVtlWL1OfZFQFuqvnvs6MiAGk9UNsBostyBOB0Is9wb5uRESevA6rnl/rkksXaGX3GzZhPup5d6Vp1nFew=="],
|
||||
|
||||
"react": ["react@19.2.0", "", {}, "sha512-tmbWg6W31tQLeB5cdIBOicJDJRR2KzXsV7uSK9iNfLWQ5bIZfxuPEHp7M8wiHyHnn0DD1i7w3Zmin0FtkrwoCQ=="],
|
||||
|
||||
"react-dom": ["react-dom@19.2.0", "", { "dependencies": { "scheduler": "0.27.0" }, "peerDependencies": { "react": "19.2.0" } }, "sha512-UlbRu4cAiGaIewkPyiRGJk0imDN2T3JjieT6spoL2UeSf5od4n5LB/mQ4ejmxhCFT1tYe8IvaFulzynWovsEFQ=="],
|
||||
|
||||
"resolve-pkg-maps": ["resolve-pkg-maps@1.0.0", "", {}, "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw=="],
|
||||
|
||||
"scheduler": ["scheduler@0.27.0", "", {}, "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q=="],
|
||||
|
||||
"semver": ["semver@7.7.3", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q=="],
|
||||
|
||||
"server-only": ["server-only@0.0.1", "", {}, "sha512-qepMx2JxAa5jjfzxG79yPPq+8BuFToHd1hm7kI+Z4zAq1ftQiP7HcxMhDDItrbtwVeLg/cY2JnKnrcFkmiswNA=="],
|
||||
|
||||
"sharp": ["sharp@0.34.4", "", { "dependencies": { "@img/colour": "1.0.0", "detect-libc": "2.1.2", "semver": "7.7.3" }, "optionalDependencies": { "@img/sharp-darwin-arm64": "0.34.4", "@img/sharp-darwin-x64": "0.34.4", "@img/sharp-libvips-darwin-arm64": "1.2.3", "@img/sharp-libvips-darwin-x64": "1.2.3", "@img/sharp-libvips-linux-arm": "1.2.3", "@img/sharp-libvips-linux-arm64": "1.2.3", "@img/sharp-libvips-linux-ppc64": "1.2.3", "@img/sharp-libvips-linux-s390x": "1.2.3", "@img/sharp-libvips-linux-x64": "1.2.3", "@img/sharp-libvips-linuxmusl-arm64": "1.2.3", "@img/sharp-libvips-linuxmusl-x64": "1.2.3", "@img/sharp-linux-arm": "0.34.4", "@img/sharp-linux-arm64": "0.34.4", "@img/sharp-linux-ppc64": "0.34.4", "@img/sharp-linux-s390x": "0.34.4", "@img/sharp-linux-x64": "0.34.4", "@img/sharp-linuxmusl-arm64": "0.34.4", "@img/sharp-linuxmusl-x64": "0.34.4", "@img/sharp-wasm32": "0.34.4", "@img/sharp-win32-arm64": "0.34.4", "@img/sharp-win32-ia32": "0.34.4", "@img/sharp-win32-x64": "0.34.4" } }, "sha512-FUH39xp3SBPnxWvd5iib1X8XY7J0K0X7d93sie9CJg2PO8/7gmg89Nve6OjItK53/MlAushNNxteBYfM6DEuoA=="],
|
||||
|
||||
"shell-quote": ["shell-quote@1.8.3", "", {}, "sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw=="],
|
||||
|
||||
"source-map": ["source-map@0.6.1", "", {}, "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="],
|
||||
|
||||
"source-map-js": ["source-map-js@1.2.1", "", {}, "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA=="],
|
||||
|
||||
"source-map-support": ["source-map-support@0.5.21", "", { "dependencies": { "buffer-from": "1.1.2", "source-map": "0.6.1" } }, "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w=="],
|
||||
|
||||
"styled-jsx": ["styled-jsx@5.1.6", "", { "dependencies": { "client-only": "0.0.1" }, "peerDependencies": { "react": "19.2.0" } }, "sha512-qSVyDTeMotdvQYoHWLNGwRFJHC+i+ZvdBRYosOFgC+Wg1vx4frN2/RG/NA7SYqqvKNLf39P2LSRA2pu6n0XYZA=="],
|
||||
|
||||
"superjson": ["superjson@2.2.5", "", { "dependencies": { "copy-anything": "4.0.5" } }, "sha512-zWPTX96LVsA/eVYnqOM2+ofcdPqdS1dAF1LN4TS2/MWuUpfitd9ctTa87wt4xrYnZnkLtS69xpBdSxVBP5Rm6w=="],
|
||||
|
||||
"tailwindcss": ["tailwindcss@4.1.16", "", {}, "sha512-pONL5awpaQX4LN5eiv7moSiSPd/DLDzKVRJz8Q9PgzmAdd1R4307GQS2ZpfiN7ZmekdQrfhZZiSE5jkLR4WNaA=="],
|
||||
|
||||
"tapable": ["tapable@2.3.0", "", {}, "sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg=="],
|
||||
|
||||
"tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="],
|
||||
|
||||
"typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="],
|
||||
|
||||
"undici-types": ["undici-types@6.21.0", "", {}, "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ=="],
|
||||
|
||||
"which": ["which@4.0.0", "", { "dependencies": { "isexe": "3.1.1" }, "bin": { "node-which": "bin/which.js" } }, "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg=="],
|
||||
|
||||
"zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild": ["esbuild@0.18.20", "", { "optionalDependencies": { "@esbuild/android-arm": "0.18.20", "@esbuild/android-arm64": "0.18.20", "@esbuild/android-x64": "0.18.20", "@esbuild/darwin-arm64": "0.18.20", "@esbuild/darwin-x64": "0.18.20", "@esbuild/freebsd-arm64": "0.18.20", "@esbuild/freebsd-x64": "0.18.20", "@esbuild/linux-arm": "0.18.20", "@esbuild/linux-arm64": "0.18.20", "@esbuild/linux-ia32": "0.18.20", "@esbuild/linux-loong64": "0.18.20", "@esbuild/linux-mips64el": "0.18.20", "@esbuild/linux-ppc64": "0.18.20", "@esbuild/linux-riscv64": "0.18.20", "@esbuild/linux-s390x": "0.18.20", "@esbuild/linux-x64": "0.18.20", "@esbuild/netbsd-x64": "0.18.20", "@esbuild/openbsd-x64": "0.18.20", "@esbuild/sunos-x64": "0.18.20", "@esbuild/win32-arm64": "0.18.20", "@esbuild/win32-ia32": "0.18.20", "@esbuild/win32-x64": "0.18.20" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-ceqxoedUrcayh7Y7ZX6NdbbDzGROiyVBgC4PriJThBKSVPWnnFHZAkfI1lJT8QFkOwH4qOS2SJkS4wvpGl8BpA=="],
|
||||
|
||||
"drizzle-kit/esbuild": ["esbuild@0.19.12", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.19.12", "@esbuild/android-arm": "0.19.12", "@esbuild/android-arm64": "0.19.12", "@esbuild/android-x64": "0.19.12", "@esbuild/darwin-arm64": "0.19.12", "@esbuild/darwin-x64": "0.19.12", "@esbuild/freebsd-arm64": "0.19.12", "@esbuild/freebsd-x64": "0.19.12", "@esbuild/linux-arm": "0.19.12", "@esbuild/linux-arm64": "0.19.12", "@esbuild/linux-ia32": "0.19.12", "@esbuild/linux-loong64": "0.19.12", "@esbuild/linux-mips64el": "0.19.12", "@esbuild/linux-ppc64": "0.19.12", "@esbuild/linux-riscv64": "0.19.12", "@esbuild/linux-s390x": "0.19.12", "@esbuild/linux-x64": "0.19.12", "@esbuild/netbsd-x64": "0.19.12", "@esbuild/openbsd-x64": "0.19.12", "@esbuild/sunos-x64": "0.19.12", "@esbuild/win32-arm64": "0.19.12", "@esbuild/win32-ia32": "0.19.12", "@esbuild/win32-x64": "0.19.12" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-aARqgq8roFBj054KvQr5f1sFu0D65G+miZRCuJyJ0G13Zwx7vRar5Zhn2tkQNzIXcBrNVsv/8stehpj+GAjgbg=="],
|
||||
|
||||
"next/postcss": ["postcss@8.4.31", "", { "dependencies": { "nanoid": "3.3.11", "picocolors": "1.1.1", "source-map-js": "1.2.1" } }, "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ=="],
|
||||
|
||||
"next-auth/@auth/core": ["@auth/core@0.37.2", "", { "dependencies": { "@panva/hkdf": "1.2.1", "@types/cookie": "0.6.0", "cookie": "0.7.1", "jose": "5.10.0", "oauth4webapi": "3.8.2", "preact": "10.11.3", "preact-render-to-string": "5.2.3" } }, "sha512-kUvzyvkcd6h1vpeMAojK2y7+PAV5H+0Cc9+ZlKYDFhDY31AlvsB+GW5vNO4qE3Y07KeQgvNO9U0QUx/fN62kBw=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/android-arm": ["@esbuild/android-arm@0.18.20", "", { "os": "android", "cpu": "arm" }, "sha512-fyi7TDI/ijKKNZTUJAQqiG5T7YjJXgnzkURqmGj13C6dCqckZBLdl4h7bkhHt/t0WP+zO9/zwroDvANaOqO5Sw=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/android-arm64": ["@esbuild/android-arm64@0.18.20", "", { "os": "android", "cpu": "arm64" }, "sha512-Nz4rJcchGDtENV0eMKUNa6L12zz2zBDXuhj/Vjh18zGqB44Bi7MBMSXjgunJgjRhCmKOjnPuZp4Mb6OKqtMHLQ=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/android-x64": ["@esbuild/android-x64@0.18.20", "", { "os": "android", "cpu": "x64" }, "sha512-8GDdlePJA8D6zlZYJV/jnrRAi6rOiNaCC/JclcXpB+KIuvfBN4owLtgzY2bsxnx666XjJx2kDPUmnTtR8qKQUg=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.18.20", "", { "os": "darwin", "cpu": "arm64" }, "sha512-bxRHW5kHU38zS2lPTPOyuyTm+S+eobPUnTNkdJEfAddYgEcll4xkT8DB9d2008DtTbl7uJag2HuE5NZAZgnNEA=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.18.20", "", { "os": "darwin", "cpu": "x64" }, "sha512-pc5gxlMDxzm513qPGbCbDukOdsGtKhfxD1zJKXjCCcU7ju50O7MeAZ8c4krSJcOIJGFR+qx21yMMVYwiQvyTyQ=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/freebsd-arm64": ["@esbuild/freebsd-arm64@0.18.20", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-yqDQHy4QHevpMAaxhhIwYPMv1NECwOvIpGCZkECn8w2WFHXjEwrBn3CeNIYsibZ/iZEUemj++M26W3cNR5h+Tw=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/freebsd-x64": ["@esbuild/freebsd-x64@0.18.20", "", { "os": "freebsd", "cpu": "x64" }, "sha512-tgWRPPuQsd3RmBZwarGVHZQvtzfEBOreNuxEMKFcd5DaDn2PbBxfwLcj4+aenoh7ctXcbXmOQIn8HI6mCSw5MQ=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/linux-arm": ["@esbuild/linux-arm@0.18.20", "", { "os": "linux", "cpu": "arm" }, "sha512-/5bHkMWnq1EgKr1V+Ybz3s1hWXok7mDFUMQ4cG10AfW3wL02PSZi5kFpYKrptDsgb2WAJIvRcDm+qIvXf/apvg=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.18.20", "", { "os": "linux", "cpu": "arm64" }, "sha512-2YbscF+UL7SQAVIpnWvYwM+3LskyDmPhe31pE7/aoTMFKKzIc9lLbyGUpmmb8a8AixOL61sQ/mFh3jEjHYFvdA=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/linux-ia32": ["@esbuild/linux-ia32@0.18.20", "", { "os": "linux", "cpu": "ia32" }, "sha512-P4etWwq6IsReT0E1KHU40bOnzMHoH73aXp96Fs8TIT6z9Hu8G6+0SHSw9i2isWrD2nbx2qo5yUqACgdfVGx7TA=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.18.20", "", { "os": "linux", "cpu": "none" }, "sha512-nXW8nqBTrOpDLPgPY9uV+/1DjxoQ7DoB2N8eocyq8I9XuqJ7BiAMDMf9n1xZM9TgW0J8zrquIb/A7s3BJv7rjg=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/linux-mips64el": ["@esbuild/linux-mips64el@0.18.20", "", { "os": "linux", "cpu": "none" }, "sha512-d5NeaXZcHp8PzYy5VnXV3VSd2D328Zb+9dEq5HE6bw6+N86JVPExrA6O68OPwobntbNJ0pzCpUFZTo3w0GyetQ=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/linux-ppc64": ["@esbuild/linux-ppc64@0.18.20", "", { "os": "linux", "cpu": "ppc64" }, "sha512-WHPyeScRNcmANnLQkq6AfyXRFr5D6N2sKgkFo2FqguP44Nw2eyDlbTdZwd9GYk98DZG9QItIiTlFLHJHjxP3FA=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/linux-riscv64": ["@esbuild/linux-riscv64@0.18.20", "", { "os": "linux", "cpu": "none" }, "sha512-WSxo6h5ecI5XH34KC7w5veNnKkju3zBRLEQNY7mv5mtBmrP/MjNBCAlsM2u5hDBlS3NGcTQpoBvRzqBcRtpq1A=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/linux-s390x": ["@esbuild/linux-s390x@0.18.20", "", { "os": "linux", "cpu": "s390x" }, "sha512-+8231GMs3mAEth6Ja1iK0a1sQ3ohfcpzpRLH8uuc5/KVDFneH6jtAJLFGafpzpMRO6DzJ6AvXKze9LfFMrIHVQ=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/linux-x64": ["@esbuild/linux-x64@0.18.20", "", { "os": "linux", "cpu": "x64" }, "sha512-UYqiqemphJcNsFEskc73jQ7B9jgwjWrSayxawS6UVFZGWrAAtkzjxSqnoclCXxWtfwLdzU+vTpcNYhpn43uP1w=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/netbsd-x64": ["@esbuild/netbsd-x64@0.18.20", "", { "os": "none", "cpu": "x64" }, "sha512-iO1c++VP6xUBUmltHZoMtCUdPlnPGdBom6IrO4gyKPFFVBKioIImVooR5I83nTew5UOYrk3gIJhbZh8X44y06A=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/openbsd-x64": ["@esbuild/openbsd-x64@0.18.20", "", { "os": "openbsd", "cpu": "x64" }, "sha512-e5e4YSsuQfX4cxcygw/UCPIEP6wbIL+se3sxPdCiMbFLBWu0eiZOJ7WoD+ptCLrmjZBK1Wk7I6D/I3NglUGOxg=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/sunos-x64": ["@esbuild/sunos-x64@0.18.20", "", { "os": "sunos", "cpu": "x64" }, "sha512-kDbFRFp0YpTQVVrqUd5FTYmWo45zGaXe0X8E1G/LKFC0v8x0vWrhOWSLITcCn63lmZIxfOMXtCfti/RxN/0wnQ=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/win32-arm64": ["@esbuild/win32-arm64@0.18.20", "", { "os": "win32", "cpu": "arm64" }, "sha512-ddYFR6ItYgoaq4v4JmQQaAI5s7npztfV4Ag6NrhiaW0RrnOXqBkgwZLofVTlq1daVTQNhtI5oieTvkRPfZrePg=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/win32-ia32": ["@esbuild/win32-ia32@0.18.20", "", { "os": "win32", "cpu": "ia32" }, "sha512-Wv7QBi3ID/rROT08SABTS7eV4hX26sVduqDOTe1MvGMjNd3EjOz4b7zeexIR62GTIEKrfJXKL9LFxTYgkyeu7g=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/win32-x64": ["@esbuild/win32-x64@0.18.20", "", { "os": "win32", "cpu": "x64" }, "sha512-kTdfRcSiDfQca/y9QIkng02avJ+NCaQvrMejlsB3RRv5sE9rRoeBPISaZpKxHELzRxZyLvNts1P27W3wV+8geQ=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.19.12", "", { "os": "aix", "cpu": "ppc64" }, "sha512-bmoCYyWdEL3wDQIVbcyzRyeKLgk2WtWLTWz1ZIAZF/EGbNOwSA6ew3PftJ1PqMiOOGu0OyFMzG53L0zqIpPeNA=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/android-arm": ["@esbuild/android-arm@0.19.12", "", { "os": "android", "cpu": "arm" }, "sha512-qg/Lj1mu3CdQlDEEiWrlC4eaPZ1KztwGJ9B6J+/6G+/4ewxJg7gqj8eVYWvao1bXrqGiW2rsBZFSX3q2lcW05w=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/android-arm64": ["@esbuild/android-arm64@0.19.12", "", { "os": "android", "cpu": "arm64" }, "sha512-P0UVNGIienjZv3f5zq0DP3Nt2IE/3plFzuaS96vihvD0Hd6H/q4WXUGpCxD/E8YrSXfNyRPbpTq+T8ZQioSuPA=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/android-x64": ["@esbuild/android-x64@0.19.12", "", { "os": "android", "cpu": "x64" }, "sha512-3k7ZoUW6Q6YqhdhIaq/WZ7HwBpnFBlW905Fa4s4qWJyiNOgT1dOqDiVAQFwBH7gBRZr17gLrlFCRzF6jFh7Kew=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.19.12", "", { "os": "darwin", "cpu": "arm64" }, "sha512-B6IeSgZgtEzGC42jsI+YYu9Z3HKRxp8ZT3cqhvliEHovq8HSX2YX8lNocDn79gCKJXOSaEot9MVYky7AKjCs8g=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.19.12", "", { "os": "darwin", "cpu": "x64" }, "sha512-hKoVkKzFiToTgn+41qGhsUJXFlIjxI/jSYeZf3ugemDYZldIXIxhvwN6erJGlX4t5h417iFuheZ7l+YVn05N3A=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/freebsd-arm64": ["@esbuild/freebsd-arm64@0.19.12", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-4aRvFIXmwAcDBw9AueDQ2YnGmz5L6obe5kmPT8Vd+/+x/JMVKCgdcRwH6APrbpNXsPz+K653Qg8HB/oXvXVukA=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/freebsd-x64": ["@esbuild/freebsd-x64@0.19.12", "", { "os": "freebsd", "cpu": "x64" }, "sha512-EYoXZ4d8xtBoVN7CEwWY2IN4ho76xjYXqSXMNccFSx2lgqOG/1TBPW0yPx1bJZk94qu3tX0fycJeeQsKovA8gg=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/linux-arm": ["@esbuild/linux-arm@0.19.12", "", { "os": "linux", "cpu": "arm" }, "sha512-J5jPms//KhSNv+LO1S1TX1UWp1ucM6N6XuL6ITdKWElCu8wXP72l9MM0zDTzzeikVyqFE6U8YAV9/tFyj0ti+w=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.19.12", "", { "os": "linux", "cpu": "arm64" }, "sha512-EoTjyYyLuVPfdPLsGVVVC8a0p1BFFvtpQDB/YLEhaXyf/5bczaGeN15QkR+O4S5LeJ92Tqotve7i1jn35qwvdA=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/linux-ia32": ["@esbuild/linux-ia32@0.19.12", "", { "os": "linux", "cpu": "ia32" }, "sha512-Thsa42rrP1+UIGaWz47uydHSBOgTUnwBwNq59khgIwktK6x60Hivfbux9iNR0eHCHzOLjLMLfUMLCypBkZXMHA=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.19.12", "", { "os": "linux", "cpu": "none" }, "sha512-LiXdXA0s3IqRRjm6rV6XaWATScKAXjI4R4LoDlvO7+yQqFdlr1Bax62sRwkVvRIrwXxvtYEHHI4dm50jAXkuAA=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/linux-mips64el": ["@esbuild/linux-mips64el@0.19.12", "", { "os": "linux", "cpu": "none" }, "sha512-fEnAuj5VGTanfJ07ff0gOA6IPsvrVHLVb6Lyd1g2/ed67oU1eFzL0r9WL7ZzscD+/N6i3dWumGE1Un4f7Amf+w=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/linux-ppc64": ["@esbuild/linux-ppc64@0.19.12", "", { "os": "linux", "cpu": "ppc64" }, "sha512-nYJA2/QPimDQOh1rKWedNOe3Gfc8PabU7HT3iXWtNUbRzXS9+vgB0Fjaqr//XNbd82mCxHzik2qotuI89cfixg=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/linux-riscv64": ["@esbuild/linux-riscv64@0.19.12", "", { "os": "linux", "cpu": "none" }, "sha512-2MueBrlPQCw5dVJJpQdUYgeqIzDQgw3QtiAHUC4RBz9FXPrskyyU3VI1hw7C0BSKB9OduwSJ79FTCqtGMWqJHg=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/linux-s390x": ["@esbuild/linux-s390x@0.19.12", "", { "os": "linux", "cpu": "s390x" }, "sha512-+Pil1Nv3Umes4m3AZKqA2anfhJiVmNCYkPchwFJNEJN5QxmTs1uzyy4TvmDrCRNT2ApwSari7ZIgrPeUx4UZDg=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/linux-x64": ["@esbuild/linux-x64@0.19.12", "", { "os": "linux", "cpu": "x64" }, "sha512-B71g1QpxfwBvNrfyJdVDexenDIt1CiDN1TIXLbhOw0KhJzE78KIFGX6OJ9MrtC0oOqMWf+0xop4qEU8JrJTwCg=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/netbsd-x64": ["@esbuild/netbsd-x64@0.19.12", "", { "os": "none", "cpu": "x64" }, "sha512-3ltjQ7n1owJgFbuC61Oj++XhtzmymoCihNFgT84UAmJnxJfm4sYCiSLTXZtE00VWYpPMYc+ZQmB6xbSdVh0JWA=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/openbsd-x64": ["@esbuild/openbsd-x64@0.19.12", "", { "os": "openbsd", "cpu": "x64" }, "sha512-RbrfTB9SWsr0kWmb9srfF+L933uMDdu9BIzdA7os2t0TXhCRjrQyCeOt6wVxr79CKD4c+p+YhCj31HBkYcXebw=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/sunos-x64": ["@esbuild/sunos-x64@0.19.12", "", { "os": "sunos", "cpu": "x64" }, "sha512-HKjJwRrW8uWtCQnQOz9qcU3mUZhTUQvi56Q8DPTLLB+DawoiQdjsYq+j+D3s9I8VFtDr+F9CjgXKKC4ss89IeA=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/win32-arm64": ["@esbuild/win32-arm64@0.19.12", "", { "os": "win32", "cpu": "arm64" }, "sha512-URgtR1dJnmGvX864pn1B2YUYNzjmXkuJOIqG2HdU62MVS4EHpU2946OZoTMnRUHklGtJdJZ33QfzdjGACXhn1A=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/win32-ia32": ["@esbuild/win32-ia32@0.19.12", "", { "os": "win32", "cpu": "ia32" }, "sha512-+ZOE6pUkMOJfmxmBZElNOx72NKpIa/HFOMGzu8fqzQJ5kgf6aTGrcJaFsNiVMH4JKpMipyK+7k0n2UXN7a8YKQ=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/win32-x64": ["@esbuild/win32-x64@0.19.12", "", { "os": "win32", "cpu": "x64" }, "sha512-T1QyPSDCyMXaO3pzBkF96E8xMkiRYbUEZADd29SyPGabqxMViNoii+NcK7eWJAEoU6RZyEm5lVSIjTmcdoB9HA=="],
|
||||
|
||||
"next-auth/@auth/core/jose": ["jose@5.10.0", "", {}, "sha512-s+3Al/p9g32Iq+oqXxkW//7jk2Vig6FF1CFqzVXoTUXt2qz89YWbL+OwS17NFYEvxC35n0FKeGO2LGYSxeM2Gg=="],
|
||||
|
||||
"next-auth/@auth/core/preact": ["preact@10.11.3", "", {}, "sha512-eY93IVpod/zG3uMF22Unl8h9KkrcKIRs2EGar8hwLZZDU1lkjph303V9HZBwufh2s736U6VXuhD109LYqPoffg=="],
|
||||
|
||||
"next-auth/@auth/core/preact-render-to-string": ["preact-render-to-string@5.2.3", "", { "dependencies": { "pretty-format": "3.8.0" }, "peerDependencies": { "preact": "10.11.3" } }, "sha512-aPDxUn5o3GhWdtJtW0svRC2SS/l8D9MAgo2+AWml+BhDImb27ALf04Q2d+AHqUUOc6RdSXFIBVa2gxzgMKgtZA=="],
|
||||
}
|
||||
}
|
||||
5
bench/install/next-env.d.ts
vendored
5
bench/install/next-env.d.ts
vendored
@@ -1,5 +0,0 @@
|
||||
/// <reference types="next" />
|
||||
/// <reference types="next/image-types/global" />
|
||||
|
||||
// NOTE: This file should not be edited
|
||||
// see https://nextjs.org/docs/basic-features/typescript for more information.
|
||||
@@ -1,10 +0,0 @@
|
||||
/**
|
||||
* Run `build` or `dev` with `SKIP_ENV_VALIDATION` to skip env validation. This is especially useful
|
||||
* for Docker builds.
|
||||
*/
|
||||
import "./src/env.js";
|
||||
|
||||
/** @type {import("next").NextConfig} */
|
||||
const config = {};
|
||||
|
||||
export default config;
|
||||
@@ -1,52 +1,31 @@
|
||||
{
|
||||
"name": "installbench",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"sideEffects": false,
|
||||
"scripts": {
|
||||
"build": "next build",
|
||||
"check": "biome check .",
|
||||
"check:unsafe": "biome check --write --unsafe .",
|
||||
"check:write": "biome check --write .",
|
||||
"db:generate": "drizzle-kit generate",
|
||||
"db:migrate": "drizzle-kit migrate",
|
||||
"db:push": "drizzle-kit push",
|
||||
"db:studio": "drizzle-kit studio",
|
||||
"dev": "next dev --turbo",
|
||||
"preview": "next build && next start",
|
||||
"start": "next start",
|
||||
"typecheck": "tsc --noEmit"
|
||||
"build": "remix build",
|
||||
"dev": "remix dev",
|
||||
"start": "remix-serve build",
|
||||
"typecheck": "tsc",
|
||||
"clean": "rm -rf node_modules",
|
||||
"bench": "hyperfine --prepare 'rm -rf node_modules' --warmup 1 --runs 3 'bun install' 'pnpm install' 'yarn' 'npm install'"
|
||||
},
|
||||
"dependencies": {
|
||||
"@auth/drizzle-adapter": "^1.7.2",
|
||||
"@t3-oss/env-nextjs": "^0.12.0",
|
||||
"@tanstack/react-query": "^5.69.0",
|
||||
"@trpc/client": "^11.0.0",
|
||||
"@trpc/react-query": "^11.0.0",
|
||||
"@trpc/server": "^11.0.0",
|
||||
"drizzle-orm": "^0.41.0",
|
||||
"esbuild": "^0.25.11",
|
||||
"next": "15.5.7",
|
||||
"next-auth": "5.0.0-beta.25",
|
||||
"postgres": "^3.4.4",
|
||||
"react": "^19.0.0",
|
||||
"react-dom": "^19.0.0",
|
||||
"server-only": "^0.0.1",
|
||||
"superjson": "^2.2.1",
|
||||
"zod": "^3.24.2"
|
||||
"@remix-run/node": "^1.15.0",
|
||||
"@remix-run/react": "^1.15.0",
|
||||
"@remix-run/serve": "^1.15.0",
|
||||
"isbot": "^3.6.5",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "1.9.4",
|
||||
"@tailwindcss/postcss": "^4.0.15",
|
||||
"@types/node": "^20.14.10",
|
||||
"@types/react": "^19.0.0",
|
||||
"@types/react-dom": "^19.0.0",
|
||||
"drizzle-kit": "^0.30.5",
|
||||
"postcss": "^8.5.3",
|
||||
"tailwindcss": "^4.0.15",
|
||||
"typescript": "^5.8.2"
|
||||
"@remix-run/dev": "^1.15.0",
|
||||
"@remix-run/eslint-config": "^1.15.0",
|
||||
"@types/react": "^18.0.25",
|
||||
"@types/react-dom": "^18.0.8",
|
||||
"eslint": "^8.27.0",
|
||||
"typescript": "^4.8.4"
|
||||
},
|
||||
"ct3aMetadata": {
|
||||
"initVersion": "7.39.3"
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
}
|
||||
}
|
||||
|
||||
BIN
bench/install/public/favicon.ico
Normal file
BIN
bench/install/public/favicon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 17 KiB |
14
bench/install/remix.config.js
Normal file
14
bench/install/remix.config.js
Normal file
@@ -0,0 +1,14 @@
|
||||
/** @type {import('@remix-run/dev').AppConfig} */
|
||||
module.exports = {
|
||||
ignoredRouteFiles: ["**/.*"],
|
||||
// appDirectory: "app",
|
||||
// assetsBuildDirectory: "public/build",
|
||||
// serverBuildPath: "build/index.js",
|
||||
// publicPath: "/build/",
|
||||
future: {
|
||||
v2_errorBoundary: true,
|
||||
v2_meta: true,
|
||||
v2_normalizeFormMethod: true,
|
||||
v2_routeConvention: true,
|
||||
},
|
||||
};
|
||||
2
bench/install/remix.env.d.ts
vendored
Normal file
2
bench/install/remix.env.d.ts
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
/// <reference types="@remix-run/dev" />
|
||||
/// <reference types="@remix-run/node" />
|
||||
22
bench/install/tsconfig.json
Normal file
22
bench/install/tsconfig.json
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"include": ["remix.env.d.ts", "**/*.ts", "**/*.tsx"],
|
||||
"compilerOptions": {
|
||||
"lib": ["DOM", "DOM.Iterable", "ES2019"],
|
||||
"isolatedModules": true,
|
||||
"esModuleInterop": true,
|
||||
"jsx": "react-jsx",
|
||||
"moduleResolution": "node",
|
||||
"resolveJsonModule": true,
|
||||
"target": "ES2019",
|
||||
"strict": true,
|
||||
"allowJs": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"~/*": ["./app/*"]
|
||||
},
|
||||
|
||||
// Remix takes care of building everything in `remix build`.
|
||||
"noEmit": true
|
||||
}
|
||||
}
|
||||
@@ -19,7 +19,6 @@
|
||||
"react-dom": "^18.3.1",
|
||||
"string-width": "7.1.0",
|
||||
"strip-ansi": "^7.1.0",
|
||||
"tar": "^7.4.3",
|
||||
"tinycolor2": "^1.6.0",
|
||||
"zx": "^7.2.3"
|
||||
},
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const isBun = typeof globalThis?.Bun?.sql !== "undefined";
|
||||
import postgres from "postgres";
|
||||
const sql = isBun ? Bun.sql : postgres();
|
||||
const sql = isBun ? Bun.sql : postgres;
|
||||
|
||||
// Create the table if it doesn't exist
|
||||
await sql`
|
||||
|
||||
@@ -4,16 +4,20 @@
|
||||
"": {
|
||||
"name": "react-hello-world",
|
||||
"dependencies": {
|
||||
"react": "^19.2.0",
|
||||
"react-dom": "^19.2.0",
|
||||
"react": "next",
|
||||
"react-dom": "next",
|
||||
},
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"react": ["react@19.2.0", "", {}, "sha512-tmbWg6W31tQLeB5cdIBOicJDJRR2KzXsV7uSK9iNfLWQ5bIZfxuPEHp7M8wiHyHnn0DD1i7w3Zmin0FtkrwoCQ=="],
|
||||
"js-tokens": ["js-tokens@4.0.0", "", {}, "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="],
|
||||
|
||||
"react-dom": ["react-dom@19.2.0", "", { "dependencies": { "scheduler": "^0.27.0" }, "peerDependencies": { "react": "^19.2.0" } }, "sha512-UlbRu4cAiGaIewkPyiRGJk0imDN2T3JjieT6spoL2UeSf5od4n5LB/mQ4ejmxhCFT1tYe8IvaFulzynWovsEFQ=="],
|
||||
"loose-envify": ["loose-envify@1.4.0", "", { "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" }, "bin": { "loose-envify": "cli.js" } }, "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q=="],
|
||||
|
||||
"scheduler": ["scheduler@0.27.0", "", {}, "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q=="],
|
||||
"react": ["react@18.3.0-next-b72ed698f-20230303", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-l6RbwXa9Peerh9pQEq62DDypxSQfavbybY0wV1vwZ63X0P5VaaEesZAz1KPpnVvXjTtQaOMQsIPvnQwmaVqzTQ=="],
|
||||
|
||||
"react-dom": ["react-dom@18.3.0-next-b72ed698f-20230303", "", { "dependencies": { "loose-envify": "^1.1.0", "scheduler": "0.24.0-next-b72ed698f-20230303" }, "peerDependencies": { "react": "18.3.0-next-b72ed698f-20230303" } }, "sha512-0Gh/gmTT6H8KxswIQB/8shdTTfs6QIu86nNqZf3Y0RBqIwgTVxRaQVz14/Fw4/Nt81nK/Jt6KT4bx3yvOxZDGQ=="],
|
||||
|
||||
"scheduler": ["scheduler@0.24.0-next-b72ed698f-20230303", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-ct4DMMFbc2kFxCdvbG+i/Jn1S1oqrIFSn2VX/mam+Ya0iuNy+lb8rgT7A+YBUqrQNDaNEqABYI2sOQgqoRxp7w=="],
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,14 +4,13 @@
|
||||
"description": "",
|
||||
"main": "react-hello-world.node.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"build:workerd": "bun build react-hello-world.workerd.jsx --outfile=react-hello-world.workerd.js --format=esm --production && (echo '// MessageChannel polyfill for workerd'; echo 'if (typeof MessageChannel === \"undefined\") {'; echo ' globalThis.MessageChannel = class MessageChannel {'; echo ' constructor() {'; echo ' this.port1 = { onmessage: null, postMessage: () => {} };'; echo ' this.port2 = {'; echo ' postMessage: (msg) => {'; echo ' if (this.port1.onmessage) {'; echo ' queueMicrotask(() => this.port1.onmessage({ data: msg }));'; echo ' }'; echo ' }'; echo ' };'; echo ' }'; echo ' };'; echo '}'; cat react-hello-world.workerd.js) > temp.js && mv temp.js react-hello-world.workerd.js"
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "Colin McDonnell",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"react": "^19.2.0",
|
||||
"react-dom": "^19.2.0"
|
||||
"react": "next",
|
||||
"react-dom": "next"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
using Workerd = import "/workerd/workerd.capnp";
|
||||
|
||||
const config :Workerd.Config = (
|
||||
services = [
|
||||
(name = "main", worker = .mainWorker),
|
||||
],
|
||||
|
||||
sockets = [
|
||||
( name = "http",
|
||||
address = "*:3001",
|
||||
http = (),
|
||||
service = "main"
|
||||
),
|
||||
]
|
||||
);
|
||||
|
||||
const mainWorker :Workerd.Worker = (
|
||||
modules = [
|
||||
(name = "worker", esModule = embed "react-hello-world.workerd.js"),
|
||||
],
|
||||
compatibilityDate = "2025-01-01",
|
||||
compatibilityFlags = ["nodejs_compat_v2"],
|
||||
);
|
||||
File diff suppressed because one or more lines are too long
@@ -1,40 +0,0 @@
|
||||
// Cloudflare Workers version with export default fetch
|
||||
// Run with: workerd serve react-hello-world.workerd.config.capnp
|
||||
|
||||
// Polyfill MessageChannel for workerd
|
||||
if (typeof MessageChannel === 'undefined') {
|
||||
globalThis.MessageChannel = class MessageChannel {
|
||||
constructor() {
|
||||
this.port1 = { onmessage: null, postMessage: () => {} };
|
||||
this.port2 = {
|
||||
postMessage: (msg) => {
|
||||
if (this.port1.onmessage) {
|
||||
queueMicrotask(() => this.port1.onmessage({ data: msg }));
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
import React from "react";
|
||||
import { renderToReadableStream } from "react-dom/server";
|
||||
|
||||
const headers = {
|
||||
"Content-Type": "text/html",
|
||||
};
|
||||
|
||||
const App = () => (
|
||||
<html>
|
||||
<body>
|
||||
<h1>Hello World</h1>
|
||||
<p>This is an example.</p>
|
||||
</body>
|
||||
</html>
|
||||
);
|
||||
|
||||
export default {
|
||||
async fetch(request) {
|
||||
return new Response(await renderToReadableStream(<App />), { headers });
|
||||
},
|
||||
};
|
||||
@@ -13,4 +13,7 @@ export function run(opts = {}) {
|
||||
}
|
||||
|
||||
export const bench = Mitata.bench;
|
||||
export const group = Mitata.group;
|
||||
|
||||
export function group(_name, fn) {
|
||||
return Mitata.group(fn);
|
||||
}
|
||||
|
||||
@@ -1,477 +0,0 @@
|
||||
import { mkdirSync, mkdtempSync, rmSync, writeFileSync } from "node:fs";
|
||||
import { tmpdir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
import { Pack, Unpack } from "tar";
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
|
||||
// Check if Bun.Archive is available
|
||||
const hasBunArchive = typeof Bun !== "undefined" && typeof Bun.Archive !== "undefined";
|
||||
|
||||
// Test data sizes
|
||||
const smallContent = "Hello, World!";
|
||||
const mediumContent = Buffer.alloc(10 * 1024, "x").toString(); // 10KB
|
||||
const largeContent = Buffer.alloc(100 * 1024, "x").toString(); // 100KB
|
||||
|
||||
// Create test files for node-tar (it reads from filesystem)
|
||||
const setupDir = mkdtempSync(join(tmpdir(), "archive-bench-setup-"));
|
||||
|
||||
function setupNodeTarFiles(prefix, files) {
|
||||
const dir = join(setupDir, prefix);
|
||||
mkdirSync(dir, { recursive: true });
|
||||
for (const [name, content] of Object.entries(files)) {
|
||||
const filePath = join(dir, name);
|
||||
const fileDir = join(filePath, "..");
|
||||
mkdirSync(fileDir, { recursive: true });
|
||||
writeFileSync(filePath, content);
|
||||
}
|
||||
return dir;
|
||||
}
|
||||
|
||||
// Setup directories for different test cases
|
||||
const smallFilesDir = setupNodeTarFiles("small", {
|
||||
"file1.txt": smallContent,
|
||||
"file2.txt": smallContent,
|
||||
"file3.txt": smallContent,
|
||||
});
|
||||
|
||||
const mediumFilesDir = setupNodeTarFiles("medium", {
|
||||
"file1.txt": mediumContent,
|
||||
"file2.txt": mediumContent,
|
||||
"file3.txt": mediumContent,
|
||||
});
|
||||
|
||||
const largeFilesDir = setupNodeTarFiles("large", {
|
||||
"file1.txt": largeContent,
|
||||
"file2.txt": largeContent,
|
||||
"file3.txt": largeContent,
|
||||
});
|
||||
|
||||
const manyFilesEntries = {};
|
||||
for (let i = 0; i < 100; i++) {
|
||||
manyFilesEntries[`file${i}.txt`] = smallContent;
|
||||
}
|
||||
const manyFilesDir = setupNodeTarFiles("many", manyFilesEntries);
|
||||
|
||||
// Pre-create archives for extraction benchmarks
|
||||
let smallTarGzBuffer, mediumTarGzBuffer, largeTarGzBuffer, manyFilesTarGzBuffer;
|
||||
let smallTarBuffer, mediumTarBuffer, largeTarBuffer, manyFilesTarBuffer;
|
||||
let smallBunArchiveGz, mediumBunArchiveGz, largeBunArchiveGz, manyFilesBunArchiveGz;
|
||||
let smallBunArchive, mediumBunArchive, largeBunArchive, manyFilesBunArchive;
|
||||
|
||||
// Create tar buffer using node-tar (with optional gzip)
|
||||
async function createNodeTarBuffer(cwd, files, gzip = false) {
|
||||
return new Promise(resolve => {
|
||||
const pack = new Pack({ cwd, gzip });
|
||||
const bufs = [];
|
||||
pack.on("data", chunk => bufs.push(chunk));
|
||||
pack.on("end", () => resolve(Buffer.concat(bufs)));
|
||||
for (const file of files) {
|
||||
pack.add(file);
|
||||
}
|
||||
pack.end();
|
||||
});
|
||||
}
|
||||
|
||||
// Extract tar buffer using node-tar
|
||||
async function extractNodeTarBuffer(buffer, cwd) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const unpack = new Unpack({ cwd });
|
||||
unpack.on("end", resolve);
|
||||
unpack.on("error", reject);
|
||||
unpack.end(buffer);
|
||||
});
|
||||
}
|
||||
|
||||
// Initialize gzipped archives
|
||||
smallTarGzBuffer = await createNodeTarBuffer(smallFilesDir, ["file1.txt", "file2.txt", "file3.txt"], true);
|
||||
mediumTarGzBuffer = await createNodeTarBuffer(mediumFilesDir, ["file1.txt", "file2.txt", "file3.txt"], true);
|
||||
largeTarGzBuffer = await createNodeTarBuffer(largeFilesDir, ["file1.txt", "file2.txt", "file3.txt"], true);
|
||||
manyFilesTarGzBuffer = await createNodeTarBuffer(manyFilesDir, Object.keys(manyFilesEntries), true);
|
||||
|
||||
// Initialize uncompressed archives
|
||||
smallTarBuffer = await createNodeTarBuffer(smallFilesDir, ["file1.txt", "file2.txt", "file3.txt"], false);
|
||||
mediumTarBuffer = await createNodeTarBuffer(mediumFilesDir, ["file1.txt", "file2.txt", "file3.txt"], false);
|
||||
largeTarBuffer = await createNodeTarBuffer(largeFilesDir, ["file1.txt", "file2.txt", "file3.txt"], false);
|
||||
manyFilesTarBuffer = await createNodeTarBuffer(manyFilesDir, Object.keys(manyFilesEntries), false);
|
||||
|
||||
const smallFiles = { "file1.txt": smallContent, "file2.txt": smallContent, "file3.txt": smallContent };
|
||||
const mediumFiles = { "file1.txt": mediumContent, "file2.txt": mediumContent, "file3.txt": mediumContent };
|
||||
const largeFiles = { "file1.txt": largeContent, "file2.txt": largeContent, "file3.txt": largeContent };
|
||||
|
||||
if (hasBunArchive) {
|
||||
smallBunArchiveGz = await Bun.Archive.from(smallFiles).bytes("gzip");
|
||||
mediumBunArchiveGz = await Bun.Archive.from(mediumFiles).bytes("gzip");
|
||||
largeBunArchiveGz = await Bun.Archive.from(largeFiles).bytes("gzip");
|
||||
manyFilesBunArchiveGz = await Bun.Archive.from(manyFilesEntries).bytes("gzip");
|
||||
|
||||
smallBunArchive = await Bun.Archive.from(smallFiles).bytes();
|
||||
mediumBunArchive = await Bun.Archive.from(mediumFiles).bytes();
|
||||
largeBunArchive = await Bun.Archive.from(largeFiles).bytes();
|
||||
manyFilesBunArchive = await Bun.Archive.from(manyFilesEntries).bytes();
|
||||
}
|
||||
|
||||
// Create reusable extraction directories (overwriting is fine)
|
||||
const extractDirNodeTar = mkdtempSync(join(tmpdir(), "archive-bench-extract-node-"));
|
||||
const extractDirBun = mkdtempSync(join(tmpdir(), "archive-bench-extract-bun-"));
|
||||
const writeDirNodeTar = mkdtempSync(join(tmpdir(), "archive-bench-write-node-"));
|
||||
const writeDirBun = mkdtempSync(join(tmpdir(), "archive-bench-write-bun-"));
|
||||
|
||||
// ============================================================================
|
||||
// Create .tar (uncompressed) benchmarks
|
||||
// ============================================================================
|
||||
|
||||
group("create .tar (3 small files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await createNodeTarBuffer(smallFilesDir, ["file1.txt", "file2.txt", "file3.txt"], false);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive", async () => {
|
||||
await Bun.Archive.from(smallFiles).bytes();
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("create .tar (3 x 100KB files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await createNodeTarBuffer(largeFilesDir, ["file1.txt", "file2.txt", "file3.txt"], false);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive", async () => {
|
||||
await Bun.Archive.from(largeFiles).bytes();
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("create .tar (100 small files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await createNodeTarBuffer(manyFilesDir, Object.keys(manyFilesEntries), false);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive", async () => {
|
||||
await Bun.Archive.from(manyFilesEntries).bytes();
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Create .tar.gz (compressed) benchmarks
|
||||
// ============================================================================
|
||||
|
||||
group("create .tar.gz (3 small files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await createNodeTarBuffer(smallFilesDir, ["file1.txt", "file2.txt", "file3.txt"], true);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive", async () => {
|
||||
await Bun.Archive.from(smallFiles).bytes("gzip");
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("create .tar.gz (3 x 100KB files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await createNodeTarBuffer(largeFilesDir, ["file1.txt", "file2.txt", "file3.txt"], true);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive", async () => {
|
||||
await Bun.Archive.from(largeFiles).bytes("gzip");
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("create .tar.gz (100 small files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await createNodeTarBuffer(manyFilesDir, Object.keys(manyFilesEntries), true);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive", async () => {
|
||||
await Bun.Archive.from(manyFilesEntries).bytes("gzip");
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Extract .tar (uncompressed) benchmarks
|
||||
// ============================================================================
|
||||
|
||||
group("extract .tar (3 small files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await extractNodeTarBuffer(smallTarBuffer, extractDirNodeTar);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive", async () => {
|
||||
await Bun.Archive.from(smallBunArchive).extract(extractDirBun);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("extract .tar (3 x 100KB files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await extractNodeTarBuffer(largeTarBuffer, extractDirNodeTar);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive", async () => {
|
||||
await Bun.Archive.from(largeBunArchive).extract(extractDirBun);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("extract .tar (100 small files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await extractNodeTarBuffer(manyFilesTarBuffer, extractDirNodeTar);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive", async () => {
|
||||
await Bun.Archive.from(manyFilesBunArchive).extract(extractDirBun);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Extract .tar.gz (compressed) benchmarks
|
||||
// ============================================================================
|
||||
|
||||
group("extract .tar.gz (3 small files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await extractNodeTarBuffer(smallTarGzBuffer, extractDirNodeTar);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive", async () => {
|
||||
await Bun.Archive.from(smallBunArchiveGz).extract(extractDirBun);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("extract .tar.gz (3 x 100KB files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await extractNodeTarBuffer(largeTarGzBuffer, extractDirNodeTar);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive", async () => {
|
||||
await Bun.Archive.from(largeBunArchiveGz).extract(extractDirBun);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("extract .tar.gz (100 small files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await extractNodeTarBuffer(manyFilesTarGzBuffer, extractDirNodeTar);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive", async () => {
|
||||
await Bun.Archive.from(manyFilesBunArchiveGz).extract(extractDirBun);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Write .tar to disk benchmarks
|
||||
// ============================================================================
|
||||
|
||||
let writeCounter = 0;
|
||||
|
||||
group("write .tar to disk (3 small files)", () => {
|
||||
bench("node-tar + writeFileSync", async () => {
|
||||
const buffer = await createNodeTarBuffer(smallFilesDir, ["file1.txt", "file2.txt", "file3.txt"], false);
|
||||
writeFileSync(join(writeDirNodeTar, `archive-${writeCounter++}.tar`), buffer);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive.write", async () => {
|
||||
await Bun.Archive.write(join(writeDirBun, `archive-${writeCounter++}.tar`), smallFiles);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("write .tar to disk (3 x 100KB files)", () => {
|
||||
bench("node-tar + writeFileSync", async () => {
|
||||
const buffer = await createNodeTarBuffer(largeFilesDir, ["file1.txt", "file2.txt", "file3.txt"], false);
|
||||
writeFileSync(join(writeDirNodeTar, `archive-${writeCounter++}.tar`), buffer);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive.write", async () => {
|
||||
await Bun.Archive.write(join(writeDirBun, `archive-${writeCounter++}.tar`), largeFiles);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("write .tar to disk (100 small files)", () => {
|
||||
bench("node-tar + writeFileSync", async () => {
|
||||
const buffer = await createNodeTarBuffer(manyFilesDir, Object.keys(manyFilesEntries), false);
|
||||
writeFileSync(join(writeDirNodeTar, `archive-${writeCounter++}.tar`), buffer);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive.write", async () => {
|
||||
await Bun.Archive.write(join(writeDirBun, `archive-${writeCounter++}.tar`), manyFilesEntries);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Write .tar.gz to disk benchmarks
|
||||
// ============================================================================
|
||||
|
||||
group("write .tar.gz to disk (3 small files)", () => {
|
||||
bench("node-tar + writeFileSync", async () => {
|
||||
const buffer = await createNodeTarBuffer(smallFilesDir, ["file1.txt", "file2.txt", "file3.txt"], true);
|
||||
writeFileSync(join(writeDirNodeTar, `archive-${writeCounter++}.tar.gz`), buffer);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive.write", async () => {
|
||||
await Bun.Archive.write(join(writeDirBun, `archive-${writeCounter++}.tar.gz`), smallFiles, "gzip");
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("write .tar.gz to disk (3 x 100KB files)", () => {
|
||||
bench("node-tar + writeFileSync", async () => {
|
||||
const buffer = await createNodeTarBuffer(largeFilesDir, ["file1.txt", "file2.txt", "file3.txt"], true);
|
||||
writeFileSync(join(writeDirNodeTar, `archive-${writeCounter++}.tar.gz`), buffer);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive.write", async () => {
|
||||
await Bun.Archive.write(join(writeDirBun, `archive-${writeCounter++}.tar.gz`), largeFiles, "gzip");
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("write .tar.gz to disk (100 small files)", () => {
|
||||
bench("node-tar + writeFileSync", async () => {
|
||||
const buffer = await createNodeTarBuffer(manyFilesDir, Object.keys(manyFilesEntries), true);
|
||||
writeFileSync(join(writeDirNodeTar, `archive-${writeCounter++}.tar.gz`), buffer);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive.write", async () => {
|
||||
await Bun.Archive.write(join(writeDirBun, `archive-${writeCounter++}.tar.gz`), manyFilesEntries, "gzip");
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Get files array from archive (files() method) benchmarks
|
||||
// ============================================================================
|
||||
|
||||
// Helper to get files array from node-tar (reads all entries into memory)
|
||||
async function getFilesArrayNodeTar(buffer) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const files = new Map();
|
||||
let pending = 0;
|
||||
let closed = false;
|
||||
|
||||
const maybeResolve = () => {
|
||||
if (closed && pending === 0) {
|
||||
resolve(files);
|
||||
}
|
||||
};
|
||||
|
||||
const unpack = new Unpack({
|
||||
onReadEntry: entry => {
|
||||
if (entry.type === "File") {
|
||||
pending++;
|
||||
const chunks = [];
|
||||
entry.on("data", chunk => chunks.push(chunk));
|
||||
entry.on("end", () => {
|
||||
const content = Buffer.concat(chunks);
|
||||
// Create a File-like object similar to Bun.Archive.files()
|
||||
files.set(entry.path, new Blob([content]));
|
||||
pending--;
|
||||
maybeResolve();
|
||||
});
|
||||
}
|
||||
entry.resume(); // Drain the entry
|
||||
},
|
||||
});
|
||||
unpack.on("close", () => {
|
||||
closed = true;
|
||||
maybeResolve();
|
||||
});
|
||||
unpack.on("error", reject);
|
||||
unpack.end(buffer);
|
||||
});
|
||||
}
|
||||
|
||||
group("files() - get all files as Map (3 small files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await getFilesArrayNodeTar(smallTarBuffer);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive.files()", async () => {
|
||||
await Bun.Archive.from(smallBunArchive).files();
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("files() - get all files as Map (3 x 100KB files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await getFilesArrayNodeTar(largeTarBuffer);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive.files()", async () => {
|
||||
await Bun.Archive.from(largeBunArchive).files();
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("files() - get all files as Map (100 small files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await getFilesArrayNodeTar(manyFilesTarBuffer);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive.files()", async () => {
|
||||
await Bun.Archive.from(manyFilesBunArchive).files();
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("files() - get all files as Map from .tar.gz (3 small files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await getFilesArrayNodeTar(smallTarGzBuffer);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive.files()", async () => {
|
||||
await Bun.Archive.from(smallBunArchiveGz).files();
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("files() - get all files as Map from .tar.gz (100 small files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await getFilesArrayNodeTar(manyFilesTarGzBuffer);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive.files()", async () => {
|
||||
await Bun.Archive.from(manyFilesBunArchiveGz).files();
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
// Cleanup
|
||||
rmSync(setupDir, { recursive: true, force: true });
|
||||
rmSync(extractDirNodeTar, { recursive: true, force: true });
|
||||
rmSync(extractDirBun, { recursive: true, force: true });
|
||||
rmSync(writeDirNodeTar, { recursive: true, force: true });
|
||||
rmSync(writeDirBun, { recursive: true, force: true });
|
||||
@@ -1,335 +0,0 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
let sink;
|
||||
|
||||
// Integers
|
||||
bench("int: Array.of(1,2,3,4,5)", () => {
|
||||
sink = Array.of(1, 2, 3, 4, 5);
|
||||
});
|
||||
|
||||
bench("int: Array.of(100 elements)", () => {
|
||||
sink = Array.of(
|
||||
0,
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
9,
|
||||
10,
|
||||
11,
|
||||
12,
|
||||
13,
|
||||
14,
|
||||
15,
|
||||
16,
|
||||
17,
|
||||
18,
|
||||
19,
|
||||
20,
|
||||
21,
|
||||
22,
|
||||
23,
|
||||
24,
|
||||
25,
|
||||
26,
|
||||
27,
|
||||
28,
|
||||
29,
|
||||
30,
|
||||
31,
|
||||
32,
|
||||
33,
|
||||
34,
|
||||
35,
|
||||
36,
|
||||
37,
|
||||
38,
|
||||
39,
|
||||
40,
|
||||
41,
|
||||
42,
|
||||
43,
|
||||
44,
|
||||
45,
|
||||
46,
|
||||
47,
|
||||
48,
|
||||
49,
|
||||
50,
|
||||
51,
|
||||
52,
|
||||
53,
|
||||
54,
|
||||
55,
|
||||
56,
|
||||
57,
|
||||
58,
|
||||
59,
|
||||
60,
|
||||
61,
|
||||
62,
|
||||
63,
|
||||
64,
|
||||
65,
|
||||
66,
|
||||
67,
|
||||
68,
|
||||
69,
|
||||
70,
|
||||
71,
|
||||
72,
|
||||
73,
|
||||
74,
|
||||
75,
|
||||
76,
|
||||
77,
|
||||
78,
|
||||
79,
|
||||
80,
|
||||
81,
|
||||
82,
|
||||
83,
|
||||
84,
|
||||
85,
|
||||
86,
|
||||
87,
|
||||
88,
|
||||
89,
|
||||
90,
|
||||
91,
|
||||
92,
|
||||
93,
|
||||
94,
|
||||
95,
|
||||
96,
|
||||
97,
|
||||
98,
|
||||
99,
|
||||
);
|
||||
});
|
||||
|
||||
// Doubles
|
||||
bench("double: Array.of(1.1,2.2,3.3,4.4,5.5)", () => {
|
||||
sink = Array.of(1.1, 2.2, 3.3, 4.4, 5.5);
|
||||
});
|
||||
|
||||
bench("double: Array.of(100 elements)", () => {
|
||||
sink = Array.of(
|
||||
0.1,
|
||||
1.1,
|
||||
2.1,
|
||||
3.1,
|
||||
4.1,
|
||||
5.1,
|
||||
6.1,
|
||||
7.1,
|
||||
8.1,
|
||||
9.1,
|
||||
10.1,
|
||||
11.1,
|
||||
12.1,
|
||||
13.1,
|
||||
14.1,
|
||||
15.1,
|
||||
16.1,
|
||||
17.1,
|
||||
18.1,
|
||||
19.1,
|
||||
20.1,
|
||||
21.1,
|
||||
22.1,
|
||||
23.1,
|
||||
24.1,
|
||||
25.1,
|
||||
26.1,
|
||||
27.1,
|
||||
28.1,
|
||||
29.1,
|
||||
30.1,
|
||||
31.1,
|
||||
32.1,
|
||||
33.1,
|
||||
34.1,
|
||||
35.1,
|
||||
36.1,
|
||||
37.1,
|
||||
38.1,
|
||||
39.1,
|
||||
40.1,
|
||||
41.1,
|
||||
42.1,
|
||||
43.1,
|
||||
44.1,
|
||||
45.1,
|
||||
46.1,
|
||||
47.1,
|
||||
48.1,
|
||||
49.1,
|
||||
50.1,
|
||||
51.1,
|
||||
52.1,
|
||||
53.1,
|
||||
54.1,
|
||||
55.1,
|
||||
56.1,
|
||||
57.1,
|
||||
58.1,
|
||||
59.1,
|
||||
60.1,
|
||||
61.1,
|
||||
62.1,
|
||||
63.1,
|
||||
64.1,
|
||||
65.1,
|
||||
66.1,
|
||||
67.1,
|
||||
68.1,
|
||||
69.1,
|
||||
70.1,
|
||||
71.1,
|
||||
72.1,
|
||||
73.1,
|
||||
74.1,
|
||||
75.1,
|
||||
76.1,
|
||||
77.1,
|
||||
78.1,
|
||||
79.1,
|
||||
80.1,
|
||||
81.1,
|
||||
82.1,
|
||||
83.1,
|
||||
84.1,
|
||||
85.1,
|
||||
86.1,
|
||||
87.1,
|
||||
88.1,
|
||||
89.1,
|
||||
90.1,
|
||||
91.1,
|
||||
92.1,
|
||||
93.1,
|
||||
94.1,
|
||||
95.1,
|
||||
96.1,
|
||||
97.1,
|
||||
98.1,
|
||||
99.1,
|
||||
);
|
||||
});
|
||||
|
||||
// Objects
|
||||
bench("object: Array.of(obj x5)", () => {
|
||||
sink = Array.of({ a: 1 }, { a: 2 }, { a: 3 }, { a: 4 }, { a: 5 });
|
||||
});
|
||||
|
||||
bench("object: Array.of(100 elements)", () => {
|
||||
sink = Array.of(
|
||||
{ a: 0 },
|
||||
{ a: 1 },
|
||||
{ a: 2 },
|
||||
{ a: 3 },
|
||||
{ a: 4 },
|
||||
{ a: 5 },
|
||||
{ a: 6 },
|
||||
{ a: 7 },
|
||||
{ a: 8 },
|
||||
{ a: 9 },
|
||||
{ a: 10 },
|
||||
{ a: 11 },
|
||||
{ a: 12 },
|
||||
{ a: 13 },
|
||||
{ a: 14 },
|
||||
{ a: 15 },
|
||||
{ a: 16 },
|
||||
{ a: 17 },
|
||||
{ a: 18 },
|
||||
{ a: 19 },
|
||||
{ a: 20 },
|
||||
{ a: 21 },
|
||||
{ a: 22 },
|
||||
{ a: 23 },
|
||||
{ a: 24 },
|
||||
{ a: 25 },
|
||||
{ a: 26 },
|
||||
{ a: 27 },
|
||||
{ a: 28 },
|
||||
{ a: 29 },
|
||||
{ a: 30 },
|
||||
{ a: 31 },
|
||||
{ a: 32 },
|
||||
{ a: 33 },
|
||||
{ a: 34 },
|
||||
{ a: 35 },
|
||||
{ a: 36 },
|
||||
{ a: 37 },
|
||||
{ a: 38 },
|
||||
{ a: 39 },
|
||||
{ a: 40 },
|
||||
{ a: 41 },
|
||||
{ a: 42 },
|
||||
{ a: 43 },
|
||||
{ a: 44 },
|
||||
{ a: 45 },
|
||||
{ a: 46 },
|
||||
{ a: 47 },
|
||||
{ a: 48 },
|
||||
{ a: 49 },
|
||||
{ a: 50 },
|
||||
{ a: 51 },
|
||||
{ a: 52 },
|
||||
{ a: 53 },
|
||||
{ a: 54 },
|
||||
{ a: 55 },
|
||||
{ a: 56 },
|
||||
{ a: 57 },
|
||||
{ a: 58 },
|
||||
{ a: 59 },
|
||||
{ a: 60 },
|
||||
{ a: 61 },
|
||||
{ a: 62 },
|
||||
{ a: 63 },
|
||||
{ a: 64 },
|
||||
{ a: 65 },
|
||||
{ a: 66 },
|
||||
{ a: 67 },
|
||||
{ a: 68 },
|
||||
{ a: 69 },
|
||||
{ a: 70 },
|
||||
{ a: 71 },
|
||||
{ a: 72 },
|
||||
{ a: 73 },
|
||||
{ a: 74 },
|
||||
{ a: 75 },
|
||||
{ a: 76 },
|
||||
{ a: 77 },
|
||||
{ a: 78 },
|
||||
{ a: 79 },
|
||||
{ a: 80 },
|
||||
{ a: 81 },
|
||||
{ a: 82 },
|
||||
{ a: 83 },
|
||||
{ a: 84 },
|
||||
{ a: 85 },
|
||||
{ a: 86 },
|
||||
{ a: 87 },
|
||||
{ a: 88 },
|
||||
{ a: 89 },
|
||||
{ a: 90 },
|
||||
{ a: 91 },
|
||||
{ a: 92 },
|
||||
{ a: 93 },
|
||||
{ a: 94 },
|
||||
{ a: 95 },
|
||||
{ a: 96 },
|
||||
{ a: 97 },
|
||||
{ a: 98 },
|
||||
{ a: 99 },
|
||||
);
|
||||
});
|
||||
|
||||
await run();
|
||||
@@ -1,156 +0,0 @@
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
|
||||
const runAll = !process.argv.includes("--simple");
|
||||
|
||||
const small = new Uint8Array(1024);
|
||||
const medium = new Uint8Array(1024 * 100);
|
||||
const large = new Uint8Array(1024 * 1024);
|
||||
|
||||
for (let i = 0; i < large.length; i++) {
|
||||
const value = Math.floor(Math.sin(i / 100) * 128 + 128);
|
||||
if (i < small.length) small[i] = value;
|
||||
if (i < medium.length) medium[i] = value;
|
||||
large[i] = value;
|
||||
}
|
||||
|
||||
const format = new Intl.NumberFormat("en-US", { notation: "compact", unit: "byte" });
|
||||
|
||||
async function compress(data, format) {
|
||||
const cs = new CompressionStream(format);
|
||||
const writer = cs.writable.getWriter();
|
||||
const reader = cs.readable.getReader();
|
||||
|
||||
writer.write(data);
|
||||
writer.close();
|
||||
|
||||
const chunks = [];
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
chunks.push(value);
|
||||
}
|
||||
|
||||
const result = new Uint8Array(chunks.reduce((acc, chunk) => acc + chunk.length, 0));
|
||||
let offset = 0;
|
||||
for (const chunk of chunks) {
|
||||
result.set(chunk, offset);
|
||||
offset += chunk.length;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
async function decompress(data, format) {
|
||||
const ds = new DecompressionStream(format);
|
||||
const writer = ds.writable.getWriter();
|
||||
const reader = ds.readable.getReader();
|
||||
|
||||
writer.write(data);
|
||||
writer.close();
|
||||
|
||||
const chunks = [];
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
chunks.push(value);
|
||||
}
|
||||
|
||||
const result = new Uint8Array(chunks.reduce((acc, chunk) => acc + chunk.length, 0));
|
||||
let offset = 0;
|
||||
for (const chunk of chunks) {
|
||||
result.set(chunk, offset);
|
||||
offset += chunk.length;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
async function roundTrip(data, format) {
|
||||
const compressed = await compress(data, format);
|
||||
return await decompress(compressed, format);
|
||||
}
|
||||
|
||||
const formats = ["deflate", "gzip", "deflate-raw"];
|
||||
if (runAll) formats.push("brotli", "zstd");
|
||||
|
||||
// Small data benchmarks (1KB)
|
||||
group(`CompressionStream ${format.format(small.length)}`, () => {
|
||||
for (const fmt of formats) {
|
||||
try {
|
||||
new CompressionStream(fmt);
|
||||
bench(fmt, async () => await compress(small, fmt));
|
||||
} catch (e) {
|
||||
// Skip unsupported formats
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Medium data benchmarks (100KB)
|
||||
group(`CompressionStream ${format.format(medium.length)}`, () => {
|
||||
for (const fmt of formats) {
|
||||
try {
|
||||
new CompressionStream(fmt);
|
||||
bench(fmt, async () => await compress(medium, fmt));
|
||||
} catch (e) {}
|
||||
}
|
||||
});
|
||||
|
||||
// Large data benchmarks (1MB)
|
||||
group(`CompressionStream ${format.format(large.length)}`, () => {
|
||||
for (const fmt of formats) {
|
||||
try {
|
||||
new CompressionStream(fmt);
|
||||
bench(fmt, async () => await compress(large, fmt));
|
||||
} catch (e) {
|
||||
// Skip unsupported formats
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const compressedData = {};
|
||||
for (const fmt of formats) {
|
||||
try {
|
||||
compressedData[fmt] = {
|
||||
small: await compress(small, fmt),
|
||||
medium: await compress(medium, fmt),
|
||||
large: await compress(large, fmt),
|
||||
};
|
||||
} catch (e) {
|
||||
// Skip unsupported formats
|
||||
}
|
||||
}
|
||||
|
||||
group(`DecompressionStream ${format.format(small.length)}`, () => {
|
||||
for (const fmt of formats) {
|
||||
if (compressedData[fmt]) {
|
||||
bench(fmt, async () => await decompress(compressedData[fmt].small, fmt));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
group(`DecompressionStream ${format.format(medium.length)}`, () => {
|
||||
for (const fmt of formats) {
|
||||
if (compressedData[fmt]) {
|
||||
bench(fmt, async () => await decompress(compressedData[fmt].medium, fmt));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
group(`DecompressionStream ${format.format(large.length)}`, () => {
|
||||
for (const fmt of formats) {
|
||||
if (compressedData[fmt]) {
|
||||
bench(fmt, async () => await decompress(compressedData[fmt].large, fmt));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
group(`roundtrip ${format.format(large.length)}`, () => {
|
||||
for (const fmt of formats) {
|
||||
try {
|
||||
new CompressionStream(fmt);
|
||||
bench(fmt, async () => await roundTrip(large, fmt));
|
||||
} catch (e) {
|
||||
// Skip unsupported formats
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
await run();
|
||||
@@ -1,4 +0,0 @@
|
||||
// Child process for IPC benchmarks - echoes messages back to parent
|
||||
process.on("message", message => {
|
||||
process.send(message);
|
||||
});
|
||||
@@ -1,45 +0,0 @@
|
||||
import { fork } from "node:child_process";
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const childPath = path.join(__dirname, "ipc-json-child.mjs");
|
||||
|
||||
const smallMessage = { type: "ping", id: 1 };
|
||||
const largeString = Buffer.alloc(10 * 1024 * 1024, "A").toString();
|
||||
const largeMessage = { type: "ping", id: 1, data: largeString };
|
||||
|
||||
async function runBenchmark(message, count) {
|
||||
let received = 0;
|
||||
const { promise, resolve } = Promise.withResolvers();
|
||||
|
||||
const child = fork(childPath, [], {
|
||||
stdio: ["ignore", "ignore", "ignore", "ipc"],
|
||||
serialization: "json",
|
||||
});
|
||||
|
||||
child.on("message", () => {
|
||||
received++;
|
||||
if (received >= count) {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
child.send(message);
|
||||
}
|
||||
|
||||
await promise;
|
||||
child.kill();
|
||||
}
|
||||
|
||||
bench("ipc json - small messages (1000 roundtrips)", async () => {
|
||||
await runBenchmark(smallMessage, 1000);
|
||||
});
|
||||
|
||||
bench("ipc json - 10MB messages (10 roundtrips)", async () => {
|
||||
await runBenchmark(largeMessage, 10);
|
||||
});
|
||||
|
||||
await run();
|
||||
@@ -1,57 +0,0 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const obj = { a: 1, b: 2, c: 3 };
|
||||
const objDeep = { a: 1, b: 2, c: 3, d: 4, e: 5, f: 6, g: 7, h: 8 };
|
||||
const sym = Symbol("test");
|
||||
const objWithSymbol = { [sym]: 1, a: 2 };
|
||||
|
||||
const objs = [
|
||||
{ f: 50 },
|
||||
{ f: 50, g: 70 },
|
||||
{ g: 50, f: 70 },
|
||||
{ h: 50, f: 70 },
|
||||
{ z: 50, f: 70 },
|
||||
{ k: 50, f: 70 },
|
||||
];
|
||||
|
||||
bench("Object.hasOwn - hit", () => {
|
||||
return Object.hasOwn(obj, "a");
|
||||
});
|
||||
|
||||
bench("Object.hasOwn - miss", () => {
|
||||
return Object.hasOwn(obj, "z");
|
||||
});
|
||||
|
||||
bench("Object.hasOwn - symbol hit", () => {
|
||||
return Object.hasOwn(objWithSymbol, sym);
|
||||
});
|
||||
|
||||
bench("Object.hasOwn - symbol miss", () => {
|
||||
return Object.hasOwn(objWithSymbol, Symbol("other"));
|
||||
});
|
||||
|
||||
bench("Object.hasOwn - multiple shapes", () => {
|
||||
let result = true;
|
||||
for (let i = 0; i < objs.length; i++) {
|
||||
result = Object.hasOwn(objs[i], "f") && result;
|
||||
}
|
||||
return result;
|
||||
});
|
||||
|
||||
bench("Object.prototype.hasOwnProperty - hit", () => {
|
||||
return obj.hasOwnProperty("a");
|
||||
});
|
||||
|
||||
bench("Object.prototype.hasOwnProperty - miss", () => {
|
||||
return obj.hasOwnProperty("z");
|
||||
});
|
||||
|
||||
bench("in operator - hit", () => {
|
||||
return "a" in obj;
|
||||
});
|
||||
|
||||
bench("in operator - miss", () => {
|
||||
return "z" in obj;
|
||||
});
|
||||
|
||||
await run();
|
||||
@@ -1,7 +0,0 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
bench("Promise.race([p1, p2])", async function () {
|
||||
return await Promise.race([Promise.resolve(1), Promise.resolve(2)]);
|
||||
});
|
||||
|
||||
await run();
|
||||
@@ -11,10 +11,10 @@ const builtin = ${JSON.stringify(builtin)};
|
||||
const now = performance.now();
|
||||
require(builtin);
|
||||
const end = performance.now();
|
||||
process.stdout.write(JSON.stringify({ builtin, time: end - now }) + "\\n");
|
||||
process.stdout.write(JSON.stringify({builtin, time: end - now}) + "\\n");
|
||||
`,
|
||||
);
|
||||
spawnSync(process.execPath, [path], {
|
||||
const result = spawnSync(typeof Bun !== "undefined" ? "bun" : "node", [path], {
|
||||
stdio: ["inherit", "inherit", "inherit"],
|
||||
env: {
|
||||
...process.env,
|
||||
|
||||
@@ -112,40 +112,12 @@ const obj = {
|
||||
},
|
||||
};
|
||||
|
||||
const smallObj = { id: 1, name: "test" };
|
||||
|
||||
const arrayObj = {
|
||||
items: Array.from({ length: 100 }, (_, i) => ({ id: i, value: `item-${i}` })),
|
||||
};
|
||||
|
||||
bench("Response.json(obj)", () => {
|
||||
bench("Response.json(obj)", async () => {
|
||||
return Response.json(obj);
|
||||
});
|
||||
|
||||
bench("new Response(JSON.stringify(obj))", () => {
|
||||
return new Response(JSON.stringify(obj), {
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
});
|
||||
|
||||
bench("Response.json(smallObj)", () => {
|
||||
return Response.json(smallObj);
|
||||
});
|
||||
|
||||
bench("new Response(JSON.stringify(smallObj))", () => {
|
||||
return new Response(JSON.stringify(smallObj), {
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
});
|
||||
|
||||
bench("Response.json(arrayObj)", () => {
|
||||
return Response.json(arrayObj);
|
||||
});
|
||||
|
||||
bench("new Response(JSON.stringify(arrayObj))", () => {
|
||||
return new Response(JSON.stringify(arrayObj), {
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
bench("Response.json(obj).json()", async () => {
|
||||
return await Response.json(obj).json();
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const shortStr = "The quick brown fox jumps over the lazy dog";
|
||||
const longStr = shortStr.repeat(100);
|
||||
|
||||
bench("String.includes - short, hit (middle)", () => {
|
||||
return shortStr.includes("jumps");
|
||||
});
|
||||
|
||||
bench("String.includes - short, hit (start)", () => {
|
||||
return shortStr.includes("The");
|
||||
});
|
||||
|
||||
bench("String.includes - short, hit (end)", () => {
|
||||
return shortStr.includes("dog");
|
||||
});
|
||||
|
||||
bench("String.includes - short, miss", () => {
|
||||
return shortStr.includes("cat");
|
||||
});
|
||||
|
||||
bench("String.includes - long, hit (middle)", () => {
|
||||
return longStr.includes("jumps");
|
||||
});
|
||||
|
||||
bench("String.includes - long, miss", () => {
|
||||
return longStr.includes("cat");
|
||||
});
|
||||
|
||||
bench("String.includes - with position", () => {
|
||||
return shortStr.includes("fox", 10);
|
||||
});
|
||||
|
||||
await run();
|
||||
@@ -1,48 +0,0 @@
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
|
||||
const patterns = [
|
||||
{ name: "string pattern", input: "https://(sub.)?example(.com/)foo" },
|
||||
{ name: "hostname IDN", input: { hostname: "xn--caf-dma.com" } },
|
||||
{
|
||||
name: "pathname + search + hash + baseURL",
|
||||
input: {
|
||||
pathname: "/foo",
|
||||
search: "bar",
|
||||
hash: "baz",
|
||||
baseURL: "https://example.com:8080",
|
||||
},
|
||||
},
|
||||
{ name: "pathname with regex", input: { pathname: "/([[a-z]--a])" } },
|
||||
{ name: "named groups", input: { pathname: "/users/:id/posts/:postId" } },
|
||||
{ name: "wildcard", input: { pathname: "/files/*" } },
|
||||
];
|
||||
|
||||
const testURL = "https://sub.example.com/foo";
|
||||
|
||||
group("URLPattern parse (constructor)", () => {
|
||||
for (const { name, input } of patterns) {
|
||||
bench(name, () => {
|
||||
return new URLPattern(input);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("URLPattern.test()", () => {
|
||||
for (const { name, input } of patterns) {
|
||||
const pattern = new URLPattern(input);
|
||||
bench(name, () => {
|
||||
return pattern.test(testURL);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("URLPattern.exec()", () => {
|
||||
for (const { name, input } of patterns) {
|
||||
const pattern = new URLPattern(input);
|
||||
bench(name, () => {
|
||||
return pattern.exec(testURL);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
await run();
|
||||
93
build.zig
93
build.zig
@@ -18,6 +18,22 @@ const OperatingSystem = @import("src/env.zig").OperatingSystem;
|
||||
|
||||
const pathRel = fs.path.relative;
|
||||
|
||||
/// When updating this, make sure to adjust SetupZig.cmake
|
||||
const recommended_zig_version = "0.14.0";
|
||||
|
||||
// comptime {
|
||||
// if (!std.mem.eql(u8, builtin.zig_version_string, recommended_zig_version)) {
|
||||
// @compileError(
|
||||
// "" ++
|
||||
// "Bun requires Zig version " ++ recommended_zig_version ++ ", but you have " ++
|
||||
// builtin.zig_version_string ++ ". This is automatically configured via Bun's " ++
|
||||
// "CMake setup. You likely meant to run `bun run build`. If you are trying to " ++
|
||||
// "upgrade the Zig compiler, edit ZIG_COMMIT in cmake/tools/SetupZig.cmake or " ++
|
||||
// "comment this error out.",
|
||||
// );
|
||||
// }
|
||||
// }
|
||||
|
||||
const zero_sha = "0000000000000000000000000000000000000000";
|
||||
|
||||
const BunBuildOptions = struct {
|
||||
@@ -32,9 +48,6 @@ const BunBuildOptions = struct {
|
||||
/// enable debug logs in release builds
|
||||
enable_logs: bool = false,
|
||||
enable_asan: bool,
|
||||
enable_fuzzilli: bool,
|
||||
enable_valgrind: bool,
|
||||
use_mimalloc: bool,
|
||||
tracy_callstack_depth: u16,
|
||||
reported_nodejs_version: Version,
|
||||
/// To make iterating on some '@embedFile's faster, we load them at runtime
|
||||
@@ -54,7 +67,6 @@ const BunBuildOptions = struct {
|
||||
|
||||
cached_options_module: ?*Module = null,
|
||||
windows_shim: ?WindowsShim = null,
|
||||
llvm_codegen_threads: ?u32 = null,
|
||||
|
||||
pub fn isBaseline(this: *const BunBuildOptions) bool {
|
||||
return this.arch.isX86() and
|
||||
@@ -82,10 +94,7 @@ const BunBuildOptions = struct {
|
||||
opts.addOption(bool, "baseline", this.isBaseline());
|
||||
opts.addOption(bool, "enable_logs", this.enable_logs);
|
||||
opts.addOption(bool, "enable_asan", this.enable_asan);
|
||||
opts.addOption(bool, "enable_fuzzilli", this.enable_fuzzilli);
|
||||
opts.addOption(bool, "enable_valgrind", this.enable_valgrind);
|
||||
opts.addOption(bool, "use_mimalloc", this.use_mimalloc);
|
||||
opts.addOption([]const u8, "reported_nodejs_version", b.fmt("{f}", .{this.reported_nodejs_version}));
|
||||
opts.addOption([]const u8, "reported_nodejs_version", b.fmt("{}", .{this.reported_nodejs_version}));
|
||||
opts.addOption(bool, "zig_self_hosted_backend", this.no_llvm);
|
||||
opts.addOption(bool, "override_no_export_cpp_apis", this.override_no_export_cpp_apis);
|
||||
|
||||
@@ -120,8 +129,8 @@ pub fn getOSVersionMin(os: OperatingSystem) ?Target.Query.OsVersion {
|
||||
|
||||
pub fn getOSGlibCVersion(os: OperatingSystem) ?Version {
|
||||
return switch (os) {
|
||||
// Compiling with a newer glibc than this will break certain cloud environments. See symbols.test.ts.
|
||||
.linux => .{ .major = 2, .minor = 26, .patch = 0 },
|
||||
// Compiling with a newer glibc than this will break certain cloud environments.
|
||||
.linux => .{ .major = 2, .minor = 27, .patch = 0 },
|
||||
|
||||
else => null,
|
||||
};
|
||||
@@ -204,21 +213,26 @@ pub fn build(b: *Build) !void {
|
||||
var build_options = BunBuildOptions{
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
|
||||
.os = os,
|
||||
.arch = arch,
|
||||
|
||||
.codegen_path = codegen_path,
|
||||
.codegen_embed = codegen_embed,
|
||||
.no_llvm = no_llvm,
|
||||
.override_no_export_cpp_apis = override_no_export_cpp_apis,
|
||||
|
||||
.version = try Version.parse(bun_version),
|
||||
.canary_revision = canary: {
|
||||
const rev = b.option(u32, "canary", "Treat this as a canary build") orelse 0;
|
||||
break :canary if (rev == 0) null else rev;
|
||||
},
|
||||
|
||||
.reported_nodejs_version = try Version.parse(
|
||||
b.option([]const u8, "reported_nodejs_version", "Reported Node.js version") orelse
|
||||
"0.0.0-unset",
|
||||
),
|
||||
|
||||
.sha = sha: {
|
||||
const sha_buildoption = b.option([]const u8, "sha", "Force the git sha");
|
||||
const sha_github = b.graph.env_map.get("GITHUB_SHA");
|
||||
@@ -254,13 +268,10 @@ pub fn build(b: *Build) !void {
|
||||
|
||||
break :sha sha;
|
||||
},
|
||||
|
||||
.tracy_callstack_depth = b.option(u16, "tracy_callstack_depth", "") orelse 10,
|
||||
.enable_logs = b.option(bool, "enable_logs", "Enable logs in release") orelse false,
|
||||
.enable_asan = b.option(bool, "enable_asan", "Enable asan") orelse false,
|
||||
.enable_fuzzilli = b.option(bool, "enable_fuzzilli", "Enable fuzzilli instrumentation") orelse false,
|
||||
.enable_valgrind = b.option(bool, "enable_valgrind", "Enable valgrind") orelse false,
|
||||
.use_mimalloc = b.option(bool, "use_mimalloc", "Use mimalloc as default allocator") orelse false,
|
||||
.llvm_codegen_threads = b.option(u32, "llvm_codegen_threads", "Number of threads to use for LLVM codegen") orelse 1,
|
||||
};
|
||||
|
||||
// zig build obj
|
||||
@@ -277,16 +288,14 @@ pub fn build(b: *Build) !void {
|
||||
var o = build_options;
|
||||
var unit_tests = b.addTest(.{
|
||||
.name = "bun-test",
|
||||
.optimize = build_options.optimize,
|
||||
.root_source_file = b.path("src/unit_test.zig"),
|
||||
.test_runner = .{ .path = b.path("src/main_test.zig"), .mode = .simple },
|
||||
.root_module = b.createModule(.{
|
||||
.optimize = build_options.optimize,
|
||||
.root_source_file = b.path("src/unit_test.zig"),
|
||||
.target = build_options.target,
|
||||
.omit_frame_pointer = false,
|
||||
.strip = false,
|
||||
}),
|
||||
.target = build_options.target,
|
||||
.use_llvm = !build_options.no_llvm,
|
||||
.use_lld = if (build_options.os == .mac) false else !build_options.no_llvm,
|
||||
.omit_frame_pointer = false,
|
||||
.strip = false,
|
||||
});
|
||||
configureObj(b, &o, unit_tests);
|
||||
// Setting `linker_allow_shlib_undefined` causes the linker to ignore
|
||||
@@ -320,7 +329,6 @@ pub fn build(b: *Build) !void {
|
||||
var step = b.step("check", "Check for semantic analysis errors");
|
||||
var bun_check_obj = addBunObject(b, &build_options);
|
||||
bun_check_obj.generated_bin = null;
|
||||
// bun_check_obj.use_llvm = false;
|
||||
step.dependOn(&bun_check_obj.step);
|
||||
|
||||
// The default install step will run zig build check. This is so ZLS
|
||||
@@ -492,9 +500,6 @@ fn addMultiCheck(
|
||||
.codegen_path = root_build_options.codegen_path,
|
||||
.no_llvm = root_build_options.no_llvm,
|
||||
.enable_asan = root_build_options.enable_asan,
|
||||
.enable_valgrind = root_build_options.enable_valgrind,
|
||||
.enable_fuzzilli = root_build_options.enable_fuzzilli,
|
||||
.use_mimalloc = root_build_options.use_mimalloc,
|
||||
.override_no_export_cpp_apis = root_build_options.override_no_export_cpp_apis,
|
||||
};
|
||||
|
||||
@@ -582,15 +587,9 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
|
||||
.root_module = root,
|
||||
});
|
||||
configureObj(b, opts, obj);
|
||||
if (enableFastBuild(b)) obj.root_module.strip = true;
|
||||
return obj;
|
||||
}
|
||||
|
||||
fn enableFastBuild(b: *Build) bool {
|
||||
const val = b.graph.env_map.get("BUN_BUILD_FAST") orelse return false;
|
||||
return std.mem.eql(u8, val, "1");
|
||||
}
|
||||
|
||||
fn configureObj(b: *Build, opts: *BunBuildOptions, obj: *Compile) void {
|
||||
// Flags on root module get used for the compilation
|
||||
obj.root_module.omit_frame_pointer = false;
|
||||
@@ -600,29 +599,14 @@ fn configureObj(b: *Build, opts: *BunBuildOptions, obj: *Compile) void {
|
||||
|
||||
// Object options
|
||||
obj.use_llvm = !opts.no_llvm;
|
||||
obj.use_lld = if (opts.os == .mac or opts.os == .linux) false else !opts.no_llvm;
|
||||
|
||||
if (opts.optimize == .Debug) {
|
||||
if (@hasField(std.meta.Child(@TypeOf(obj)), "llvm_codegen_threads"))
|
||||
obj.llvm_codegen_threads = opts.llvm_codegen_threads orelse 0;
|
||||
}
|
||||
|
||||
obj.no_link_obj = opts.os != .windows and !opts.no_llvm;
|
||||
|
||||
|
||||
if (opts.enable_asan and !enableFastBuild(b)) {
|
||||
obj.use_lld = if (opts.os == .mac) false else !opts.no_llvm;
|
||||
if (opts.enable_asan) {
|
||||
if (@hasField(Build.Module, "sanitize_address")) {
|
||||
if (opts.enable_fuzzilli) {
|
||||
obj.sanitize_coverage_trace_pc_guard = true;
|
||||
}
|
||||
obj.root_module.sanitize_address = true;
|
||||
} else {
|
||||
const fail_step = b.addFail("asan is not supported on this platform");
|
||||
obj.step.dependOn(&fail_step.step);
|
||||
}
|
||||
} else if (opts.enable_fuzzilli) {
|
||||
const fail_step = b.addFail("fuzzilli requires asan");
|
||||
obj.step.dependOn(&fail_step.step);
|
||||
}
|
||||
obj.bundle_compiler_rt = false;
|
||||
obj.bundle_ubsan_rt = false;
|
||||
@@ -646,7 +630,7 @@ fn configureObj(b: *Build, opts: *BunBuildOptions, obj: *Compile) void {
|
||||
obj.link_function_sections = true;
|
||||
obj.link_data_sections = true;
|
||||
|
||||
if (opts.optimize == .Debug and opts.enable_valgrind) {
|
||||
if (opts.optimize == .Debug) {
|
||||
obj.root_module.valgrind = true;
|
||||
}
|
||||
}
|
||||
@@ -722,7 +706,6 @@ fn addInternalImports(b: *Build, mod: *Module, opts: *BunBuildOptions) void {
|
||||
// Generated code exposed as individual modules.
|
||||
inline for (.{
|
||||
.{ .file = "ZigGeneratedClasses.zig", .import = "ZigGeneratedClasses" },
|
||||
.{ .file = "bindgen_generated.zig", .import = "bindgen_generated" },
|
||||
.{ .file = "ResolvedSourceTag.zig", .import = "ResolvedSourceTag" },
|
||||
.{ .file = "ErrorCode.zig", .import = "ErrorCode" },
|
||||
.{ .file = "runtime.out.js", .enable = opts.shouldEmbedCode() },
|
||||
@@ -756,7 +739,6 @@ fn addInternalImports(b: *Build, mod: *Module, opts: *BunBuildOptions) void {
|
||||
.{ .file = "node-fallbacks/url.js", .enable = opts.shouldEmbedCode() },
|
||||
.{ .file = "node-fallbacks/util.js", .enable = opts.shouldEmbedCode() },
|
||||
.{ .file = "node-fallbacks/zlib.js", .enable = opts.shouldEmbedCode() },
|
||||
.{ .file = "eval/feedback.ts", .enable = opts.shouldEmbedCode() },
|
||||
}) |entry| {
|
||||
if (!@hasField(@TypeOf(entry), "enable") or entry.enable) {
|
||||
const path = b.pathJoin(&.{ opts.codegen_path, entry.file });
|
||||
@@ -777,13 +759,6 @@ fn addInternalImports(b: *Build, mod: *Module, opts: *BunBuildOptions) void {
|
||||
mod.addImport("cpp", cppImport);
|
||||
cppImport.addImport("bun", mod);
|
||||
}
|
||||
{
|
||||
const ciInfoImport = b.createModule(.{
|
||||
.root_source_file = (std.Build.LazyPath{ .cwd_relative = opts.codegen_path }).path(b, "ci_info.zig"),
|
||||
});
|
||||
mod.addImport("ci_info", ciInfoImport);
|
||||
ciInfoImport.addImport("bun", mod);
|
||||
}
|
||||
inline for (.{
|
||||
.{ .import = "completions-bash", .file = b.path("completions/bun.bash") },
|
||||
.{ .import = "completions-zsh", .file = b.path("completions/bun.zsh") },
|
||||
@@ -809,7 +784,7 @@ fn addInternalImports(b: *Build, mod: *Module, opts: *BunBuildOptions) void {
|
||||
fn propagateImports(source_mod: *Module) !void {
|
||||
var seen = std.AutoHashMap(*Module, void).init(source_mod.owner.graph.arena);
|
||||
defer seen.deinit();
|
||||
var queue = std.array_list.Managed(*Module).init(source_mod.owner.graph.arena);
|
||||
var queue = std.ArrayList(*Module).init(source_mod.owner.graph.arena);
|
||||
defer queue.deinit();
|
||||
try queue.appendSlice(source_mod.import_table.values());
|
||||
while (queue.pop()) |mod| {
|
||||
|
||||
64
bun.lock
64
bun.lock
@@ -1,6 +1,5 @@
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"configVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "bun",
|
||||
@@ -9,14 +8,14 @@
|
||||
"@lezer/cpp": "^1.1.3",
|
||||
"@types/bun": "workspace:*",
|
||||
"bun-tracestrings": "github:oven-sh/bun.report#912ca63e26c51429d3e6799aa2a6ab079b188fd8",
|
||||
"esbuild": "^0.21.5",
|
||||
"mitata": "^0.1.14",
|
||||
"esbuild": "^0.21.4",
|
||||
"mitata": "^0.1.11",
|
||||
"peechy": "0.4.34",
|
||||
"prettier": "^3.6.2",
|
||||
"prettier-plugin-organize-imports": "^4.3.0",
|
||||
"prettier": "^3.5.3",
|
||||
"prettier-plugin-organize-imports": "^4.0.0",
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"source-map-js": "^1.2.1",
|
||||
"source-map-js": "^1.2.0",
|
||||
"typescript": "5.9.2",
|
||||
},
|
||||
},
|
||||
@@ -32,11 +31,16 @@
|
||||
"dependencies": {
|
||||
"@types/node": "*",
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "^19",
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "^19",
|
||||
},
|
||||
},
|
||||
},
|
||||
"overrides": {
|
||||
"@types/bun": "workspace:packages/@types/bun",
|
||||
"@types/node": "25.0.0",
|
||||
"bun-types": "workspace:packages/bun-types",
|
||||
},
|
||||
"packages": {
|
||||
@@ -86,13 +90,13 @@
|
||||
|
||||
"@esbuild/win32-x64": ["@esbuild/win32-x64@0.21.5", "", { "os": "win32", "cpu": "x64" }, "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw=="],
|
||||
|
||||
"@lezer/common": ["@lezer/common@1.3.0", "", {}, "sha512-L9X8uHCYU310o99L3/MpJKYxPzXPOS7S0NmBaM7UO/x2Kb2WbmMLSkfvdr1KxRIFYOpbY0Jhn7CfLSUDzL8arQ=="],
|
||||
"@lezer/common": ["@lezer/common@1.2.3", "", {}, "sha512-w7ojc8ejBqr2REPsWxJjrMFsA/ysDCFICn8zEOR9mrqzOu2amhITYuLD8ag6XZf0CFXDrhKqw7+tW8cX66NaDA=="],
|
||||
|
||||
"@lezer/cpp": ["@lezer/cpp@1.1.3", "", { "dependencies": { "@lezer/common": "^1.2.0", "@lezer/highlight": "^1.0.0", "@lezer/lr": "^1.0.0" } }, "sha512-ykYvuFQKGsRi6IcE+/hCSGUhb/I4WPjd3ELhEblm2wS2cOznDFzO+ubK2c+ioysOnlZ3EduV+MVQFCPzAIoY3w=="],
|
||||
|
||||
"@lezer/highlight": ["@lezer/highlight@1.2.3", "", { "dependencies": { "@lezer/common": "^1.3.0" } }, "sha512-qXdH7UqTvGfdVBINrgKhDsVTJTxactNNxLk7+UMwZhU13lMHaOBlJe9Vqp907ya56Y3+ed2tlqzys7jDkTmW0g=="],
|
||||
"@lezer/highlight": ["@lezer/highlight@1.2.1", "", { "dependencies": { "@lezer/common": "^1.0.0" } }, "sha512-Z5duk4RN/3zuVO7Jq0pGLJ3qynpxUVsh7IbUbGj88+uV2ApSAn6kWg2au3iJb+0Zi7kKtqffIESgNcRXWZWmSA=="],
|
||||
|
||||
"@lezer/lr": ["@lezer/lr@1.4.3", "", { "dependencies": { "@lezer/common": "^1.0.0" } }, "sha512-yenN5SqAxAPv/qMnpWW0AT7l+SxVrgG+u0tNsRQWqbrz66HIl8DnEbBObvy21J5K7+I1v7gsAnlE2VQ5yYVSeA=="],
|
||||
"@lezer/lr": ["@lezer/lr@1.4.2", "", { "dependencies": { "@lezer/common": "^1.0.0" } }, "sha512-pu0K1jCIdnQ12aWNaAVU5bzi7Bd1w54J3ECgANPmYLtQKP0HBj2cE/5coBD66MT10xbtIuUr7tg0Shbsvk0mDA=="],
|
||||
|
||||
"@octokit/app": ["@octokit/app@14.1.0", "", { "dependencies": { "@octokit/auth-app": "^6.0.0", "@octokit/auth-unauthenticated": "^5.0.0", "@octokit/core": "^5.0.0", "@octokit/oauth-app": "^6.0.0", "@octokit/plugin-paginate-rest": "^9.0.0", "@octokit/types": "^12.0.0", "@octokit/webhooks": "^12.0.4" } }, "sha512-g3uEsGOQCBl1+W1rgfwoRFUIR6PtvB2T1E4RpygeUU5LrLvlOqcxrt5lfykIeRpUPpupreGJUYl70fqMDXdTpw=="],
|
||||
|
||||
@@ -146,7 +150,7 @@
|
||||
|
||||
"@sentry/types": ["@sentry/types@7.120.4", "", {}, "sha512-cUq2hSSe6/qrU6oZsEP4InMI5VVdD86aypE+ENrQ6eZEVLTCYm1w6XhW1NvIu3UuWh7gZec4a9J7AFpYxki88Q=="],
|
||||
|
||||
"@types/aws-lambda": ["@types/aws-lambda@8.10.159", "", {}, "sha512-SAP22WSGNN12OQ8PlCzGzRCZ7QDCwI85dQZbmpz7+mAk+L7j+wI7qnvmdKh+o7A5LaOp6QnOZ2NJphAZQTTHQg=="],
|
||||
"@types/aws-lambda": ["@types/aws-lambda@8.10.152", "", {}, "sha512-soT/c2gYBnT5ygwiHPmd9a1bftj462NWVk2tKCc1PYHSIacB2UwbTS2zYG4jzag1mRDuzg/OjtxQjQ2NKRB6Rw=="],
|
||||
|
||||
"@types/btoa-lite": ["@types/btoa-lite@1.0.2", "", {}, "sha512-ZYbcE2x7yrvNFJiU7xJGrpF/ihpkM7zKgw8bha3LNJSesvTtUNxbpzaT7WXBIryf6jovisrxTBvymxMeLLj1Mg=="],
|
||||
|
||||
@@ -156,7 +160,9 @@
|
||||
|
||||
"@types/ms": ["@types/ms@2.1.0", "", {}, "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA=="],
|
||||
|
||||
"@types/node": ["@types/node@25.0.0", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-rl78HwuZlaDIUSeUKkmogkhebA+8K1Hy7tddZuJ3D0xV8pZSfsYGTsliGUol1JPzu9EKnTxPC4L1fiWouStRew=="],
|
||||
"@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
|
||||
|
||||
"@types/react": ["@types/react@19.1.10", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-EhBeSYX0Y6ye8pNebpKrwFJq7BoQ8J5SO6NlvNwwHjSj6adXJViPQrKlsyPw7hLBLvckEMO1yxeGdR82YBBlDg=="],
|
||||
|
||||
"aggregate-error": ["aggregate-error@3.1.0", "", { "dependencies": { "clean-stack": "^2.0.0", "indent-string": "^4.0.0" } }, "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA=="],
|
||||
|
||||
@@ -186,9 +192,11 @@
|
||||
|
||||
"constant-case": ["constant-case@3.0.4", "", { "dependencies": { "no-case": "^3.0.4", "tslib": "^2.0.3", "upper-case": "^2.0.2" } }, "sha512-I2hSBi7Vvs7BEuJDr5dDHfzb/Ruj3FyvFyh7KLilAjNQw3Be+xgqUBA2W6scVEcL0hL1dwPRtIqEPVUCKkSsyQ=="],
|
||||
|
||||
"csstype": ["csstype@3.1.3", "", {}, "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw=="],
|
||||
|
||||
"deprecation": ["deprecation@2.3.1", "", {}, "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ=="],
|
||||
|
||||
"detect-libc": ["detect-libc@2.1.2", "", {}, "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ=="],
|
||||
"detect-libc": ["detect-libc@2.0.4", "", {}, "sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA=="],
|
||||
|
||||
"dot-case": ["dot-case@3.0.4", "", { "dependencies": { "no-case": "^3.0.4", "tslib": "^2.0.3" } }, "sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w=="],
|
||||
|
||||
@@ -212,29 +220,27 @@
|
||||
|
||||
"jws": ["jws@3.2.2", "", { "dependencies": { "jwa": "^1.4.1", "safe-buffer": "^5.0.1" } }, "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA=="],
|
||||
|
||||
"lightningcss": ["lightningcss@1.30.2", "", { "dependencies": { "detect-libc": "^2.0.3" }, "optionalDependencies": { "lightningcss-android-arm64": "1.30.2", "lightningcss-darwin-arm64": "1.30.2", "lightningcss-darwin-x64": "1.30.2", "lightningcss-freebsd-x64": "1.30.2", "lightningcss-linux-arm-gnueabihf": "1.30.2", "lightningcss-linux-arm64-gnu": "1.30.2", "lightningcss-linux-arm64-musl": "1.30.2", "lightningcss-linux-x64-gnu": "1.30.2", "lightningcss-linux-x64-musl": "1.30.2", "lightningcss-win32-arm64-msvc": "1.30.2", "lightningcss-win32-x64-msvc": "1.30.2" } }, "sha512-utfs7Pr5uJyyvDETitgsaqSyjCb2qNRAtuqUeWIAKztsOYdcACf2KtARYXg2pSvhkt+9NfoaNY7fxjl6nuMjIQ=="],
|
||||
"lightningcss": ["lightningcss@1.30.1", "", { "dependencies": { "detect-libc": "^2.0.3" }, "optionalDependencies": { "lightningcss-darwin-arm64": "1.30.1", "lightningcss-darwin-x64": "1.30.1", "lightningcss-freebsd-x64": "1.30.1", "lightningcss-linux-arm-gnueabihf": "1.30.1", "lightningcss-linux-arm64-gnu": "1.30.1", "lightningcss-linux-arm64-musl": "1.30.1", "lightningcss-linux-x64-gnu": "1.30.1", "lightningcss-linux-x64-musl": "1.30.1", "lightningcss-win32-arm64-msvc": "1.30.1", "lightningcss-win32-x64-msvc": "1.30.1" } }, "sha512-xi6IyHML+c9+Q3W0S4fCQJOym42pyurFiJUHEcEyHS0CeKzia4yZDEsLlqOFykxOdHpNy0NmvVO31vcSqAxJCg=="],
|
||||
|
||||
"lightningcss-android-arm64": ["lightningcss-android-arm64@1.30.2", "", { "os": "android", "cpu": "arm64" }, "sha512-BH9sEdOCahSgmkVhBLeU7Hc9DWeZ1Eb6wNS6Da8igvUwAe0sqROHddIlvU06q3WyXVEOYDZ6ykBZQnjTbmo4+A=="],
|
||||
"lightningcss-darwin-arm64": ["lightningcss-darwin-arm64@1.30.1", "", { "os": "darwin", "cpu": "arm64" }, "sha512-c8JK7hyE65X1MHMN+Viq9n11RRC7hgin3HhYKhrMyaXflk5GVplZ60IxyoVtzILeKr+xAJwg6zK6sjTBJ0FKYQ=="],
|
||||
|
||||
"lightningcss-darwin-arm64": ["lightningcss-darwin-arm64@1.30.2", "", { "os": "darwin", "cpu": "arm64" }, "sha512-ylTcDJBN3Hp21TdhRT5zBOIi73P6/W0qwvlFEk22fkdXchtNTOU4Qc37SkzV+EKYxLouZ6M4LG9NfZ1qkhhBWA=="],
|
||||
"lightningcss-darwin-x64": ["lightningcss-darwin-x64@1.30.1", "", { "os": "darwin", "cpu": "x64" }, "sha512-k1EvjakfumAQoTfcXUcHQZhSpLlkAuEkdMBsI/ivWw9hL+7FtilQc0Cy3hrx0AAQrVtQAbMI7YjCgYgvn37PzA=="],
|
||||
|
||||
"lightningcss-darwin-x64": ["lightningcss-darwin-x64@1.30.2", "", { "os": "darwin", "cpu": "x64" }, "sha512-oBZgKchomuDYxr7ilwLcyms6BCyLn0z8J0+ZZmfpjwg9fRVZIR5/GMXd7r9RH94iDhld3UmSjBM6nXWM2TfZTQ=="],
|
||||
"lightningcss-freebsd-x64": ["lightningcss-freebsd-x64@1.30.1", "", { "os": "freebsd", "cpu": "x64" }, "sha512-kmW6UGCGg2PcyUE59K5r0kWfKPAVy4SltVeut+umLCFoJ53RdCUWxcRDzO1eTaxf/7Q2H7LTquFHPL5R+Gjyig=="],
|
||||
|
||||
"lightningcss-freebsd-x64": ["lightningcss-freebsd-x64@1.30.2", "", { "os": "freebsd", "cpu": "x64" }, "sha512-c2bH6xTrf4BDpK8MoGG4Bd6zAMZDAXS569UxCAGcA7IKbHNMlhGQ89eRmvpIUGfKWNVdbhSbkQaWhEoMGmGslA=="],
|
||||
"lightningcss-linux-arm-gnueabihf": ["lightningcss-linux-arm-gnueabihf@1.30.1", "", { "os": "linux", "cpu": "arm" }, "sha512-MjxUShl1v8pit+6D/zSPq9S9dQ2NPFSQwGvxBCYaBYLPlCWuPh9/t1MRS8iUaR8i+a6w7aps+B4N0S1TYP/R+Q=="],
|
||||
|
||||
"lightningcss-linux-arm-gnueabihf": ["lightningcss-linux-arm-gnueabihf@1.30.2", "", { "os": "linux", "cpu": "arm" }, "sha512-eVdpxh4wYcm0PofJIZVuYuLiqBIakQ9uFZmipf6LF/HRj5Bgm0eb3qL/mr1smyXIS1twwOxNWndd8z0E374hiA=="],
|
||||
"lightningcss-linux-arm64-gnu": ["lightningcss-linux-arm64-gnu@1.30.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-gB72maP8rmrKsnKYy8XUuXi/4OctJiuQjcuqWNlJQ6jZiWqtPvqFziskH3hnajfvKB27ynbVCucKSm2rkQp4Bw=="],
|
||||
|
||||
"lightningcss-linux-arm64-gnu": ["lightningcss-linux-arm64-gnu@1.30.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-UK65WJAbwIJbiBFXpxrbTNArtfuznvxAJw4Q2ZGlU8kPeDIWEX1dg3rn2veBVUylA2Ezg89ktszWbaQnxD/e3A=="],
|
||||
"lightningcss-linux-arm64-musl": ["lightningcss-linux-arm64-musl@1.30.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-jmUQVx4331m6LIX+0wUhBbmMX7TCfjF5FoOH6SD1CttzuYlGNVpA7QnrmLxrsub43ClTINfGSYyHe2HWeLl5CQ=="],
|
||||
|
||||
"lightningcss-linux-arm64-musl": ["lightningcss-linux-arm64-musl@1.30.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-5Vh9dGeblpTxWHpOx8iauV02popZDsCYMPIgiuw97OJ5uaDsL86cnqSFs5LZkG3ghHoX5isLgWzMs+eD1YzrnA=="],
|
||||
"lightningcss-linux-x64-gnu": ["lightningcss-linux-x64-gnu@1.30.1", "", { "os": "linux", "cpu": "x64" }, "sha512-piWx3z4wN8J8z3+O5kO74+yr6ze/dKmPnI7vLqfSqI8bccaTGY5xiSGVIJBDd5K5BHlvVLpUB3S2YCfelyJ1bw=="],
|
||||
|
||||
"lightningcss-linux-x64-gnu": ["lightningcss-linux-x64-gnu@1.30.2", "", { "os": "linux", "cpu": "x64" }, "sha512-Cfd46gdmj1vQ+lR6VRTTadNHu6ALuw2pKR9lYq4FnhvgBc4zWY1EtZcAc6EffShbb1MFrIPfLDXD6Xprbnni4w=="],
|
||||
"lightningcss-linux-x64-musl": ["lightningcss-linux-x64-musl@1.30.1", "", { "os": "linux", "cpu": "x64" }, "sha512-rRomAK7eIkL+tHY0YPxbc5Dra2gXlI63HL+v1Pdi1a3sC+tJTcFrHX+E86sulgAXeI7rSzDYhPSeHHjqFhqfeQ=="],
|
||||
|
||||
"lightningcss-linux-x64-musl": ["lightningcss-linux-x64-musl@1.30.2", "", { "os": "linux", "cpu": "x64" }, "sha512-XJaLUUFXb6/QG2lGIW6aIk6jKdtjtcffUT0NKvIqhSBY3hh9Ch+1LCeH80dR9q9LBjG3ewbDjnumefsLsP6aiA=="],
|
||||
"lightningcss-win32-arm64-msvc": ["lightningcss-win32-arm64-msvc@1.30.1", "", { "os": "win32", "cpu": "arm64" }, "sha512-mSL4rqPi4iXq5YVqzSsJgMVFENoa4nGTT/GjO2c0Yl9OuQfPsIfncvLrEW6RbbB24WtZ3xP/2CCmI3tNkNV4oA=="],
|
||||
|
||||
"lightningcss-win32-arm64-msvc": ["lightningcss-win32-arm64-msvc@1.30.2", "", { "os": "win32", "cpu": "arm64" }, "sha512-FZn+vaj7zLv//D/192WFFVA0RgHawIcHqLX9xuWiQt7P0PtdFEVaxgF9rjM/IRYHQXNnk61/H/gb2Ei+kUQ4xQ=="],
|
||||
|
||||
"lightningcss-win32-x64-msvc": ["lightningcss-win32-x64-msvc@1.30.2", "", { "os": "win32", "cpu": "x64" }, "sha512-5g1yc73p+iAkid5phb4oVFMB45417DkRevRbt/El/gKXJk4jid+vPFF/AXbxn05Aky8PapwzZrdJShv5C0avjw=="],
|
||||
"lightningcss-win32-x64-msvc": ["lightningcss-win32-x64-msvc@1.30.1", "", { "os": "win32", "cpu": "x64" }, "sha512-PVqXh48wh4T53F/1CCu8PIPCxLzWyCnn/9T5W1Jpmdy5h9Cwd+0YQS6/LwhHXSafuc61/xg9Lv5OrCby6a++jg=="],
|
||||
|
||||
"lodash.includes": ["lodash.includes@4.3.0", "", {}, "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w=="],
|
||||
|
||||
@@ -278,7 +284,7 @@
|
||||
|
||||
"prettier": ["prettier@3.6.2", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ=="],
|
||||
|
||||
"prettier-plugin-organize-imports": ["prettier-plugin-organize-imports@4.3.0", "", { "peerDependencies": { "prettier": ">=2.0", "typescript": ">=2.9", "vue-tsc": "^2.1.0 || 3" }, "optionalPeers": ["vue-tsc"] }, "sha512-FxFz0qFhyBsGdIsb697f/EkvHzi5SZOhWAjxcx2dLt+Q532bAlhswcXGYB1yzjZ69kW8UoadFBw7TyNwlq96Iw=="],
|
||||
"prettier-plugin-organize-imports": ["prettier-plugin-organize-imports@4.2.0", "", { "peerDependencies": { "prettier": ">=2.0", "typescript": ">=2.9", "vue-tsc": "^2.1.0 || 3" }, "optionalPeers": ["vue-tsc"] }, "sha512-Zdy27UhlmyvATZi67BTnLcKTo8fm6Oik59Sz6H64PgZJVs6NJpPD1mT240mmJn62c98/QaL+r3kx9Q3gRpDajg=="],
|
||||
|
||||
"react": ["react@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ=="],
|
||||
|
||||
@@ -290,7 +296,7 @@
|
||||
|
||||
"scheduler": ["scheduler@0.23.2", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ=="],
|
||||
|
||||
"semver": ["semver@7.7.3", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q=="],
|
||||
"semver": ["semver@7.7.2", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA=="],
|
||||
|
||||
"sentence-case": ["sentence-case@3.0.4", "", { "dependencies": { "no-case": "^3.0.4", "tslib": "^2.0.3", "upper-case-first": "^2.0.2" } }, "sha512-8LS0JInaQMCRoQ7YUytAo/xUu5W2XnQxV2HI/6uM6U7CITS1RqPElr30V6uIqyMKM9lJGRVFy5/4CuzcixNYSg=="],
|
||||
|
||||
@@ -306,7 +312,7 @@
|
||||
|
||||
"uglify-js": ["uglify-js@3.19.3", "", { "bin": { "uglifyjs": "bin/uglifyjs" } }, "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ=="],
|
||||
|
||||
"undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="],
|
||||
"undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
|
||||
|
||||
"universal-github-app-jwt": ["universal-github-app-jwt@1.2.0", "", { "dependencies": { "@types/jsonwebtoken": "^9.0.0", "jsonwebtoken": "^9.0.2" } }, "sha512-dncpMpnsKBk0eetwfN8D8OUHGfiDhhJ+mtsbMl+7PfW7mYjiH8LIcqRmYMtzYLgSh47HjfdBtrBwIQ/gizKR3g=="],
|
||||
|
||||
|
||||
@@ -10,4 +10,3 @@ preload = "./test/preload.ts"
|
||||
|
||||
[install]
|
||||
linker = "isolated"
|
||||
minimumReleaseAge = 259200 # three days
|
||||
|
||||
@@ -51,23 +51,6 @@ if(ENABLE_ASAN)
|
||||
)
|
||||
endif()
|
||||
|
||||
if(ENABLE_FUZZILLI)
|
||||
register_compiler_flags(
|
||||
DESCRIPTION "Enable coverage instrumentation for fuzzing"
|
||||
-fsanitize-coverage=trace-pc-guard
|
||||
)
|
||||
|
||||
register_linker_flags(
|
||||
DESCRIPTION "Link coverage instrumentation"
|
||||
-fsanitize-coverage=trace-pc-guard
|
||||
)
|
||||
|
||||
register_compiler_flags(
|
||||
DESCRIPTION "Enable fuzzilli-specific code"
|
||||
-DFUZZILLI_ENABLED
|
||||
)
|
||||
endif()
|
||||
|
||||
# --- Optimization level ---
|
||||
if(DEBUG)
|
||||
register_compiler_flags(
|
||||
@@ -103,20 +86,11 @@ elseif(APPLE)
|
||||
endif()
|
||||
|
||||
if(UNIX)
|
||||
# Nix LLVM doesn't support zstd compression, use zlib instead
|
||||
if(DEFINED ENV{NIX_CC})
|
||||
register_compiler_flags(
|
||||
DESCRIPTION "Enable debug symbols (zlib-compressed for Nix)"
|
||||
-g3 -gz=zlib ${DEBUG}
|
||||
-g1 ${RELEASE}
|
||||
)
|
||||
else()
|
||||
register_compiler_flags(
|
||||
DESCRIPTION "Enable debug symbols (zstd-compressed)"
|
||||
-g3 -gz=zstd ${DEBUG}
|
||||
-g1 ${RELEASE}
|
||||
)
|
||||
endif()
|
||||
register_compiler_flags(
|
||||
DESCRIPTION "Enable debug symbols"
|
||||
-g3 -gz=zstd ${DEBUG}
|
||||
-g1 ${RELEASE}
|
||||
)
|
||||
|
||||
register_compiler_flags(
|
||||
DESCRIPTION "Optimize debug symbols for LLDB"
|
||||
@@ -232,6 +206,43 @@ if(ENABLE_ASSERTIONS)
|
||||
DESCRIPTION "Do not eliminate null-pointer checks"
|
||||
-fno-delete-null-pointer-checks
|
||||
)
|
||||
|
||||
register_compiler_definitions(
|
||||
DESCRIPTION "Enable libc++ assertions"
|
||||
_LIBCPP_ENABLE_ASSERTIONS=1
|
||||
_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_EXTENSIVE ${RELEASE}
|
||||
_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_DEBUG ${DEBUG}
|
||||
)
|
||||
|
||||
register_compiler_definitions(
|
||||
DESCRIPTION "Enable fortified sources"
|
||||
_FORTIFY_SOURCE=3
|
||||
)
|
||||
|
||||
if(LINUX)
|
||||
register_compiler_definitions(
|
||||
DESCRIPTION "Enable glibc++ assertions"
|
||||
_GLIBCXX_ASSERTIONS=1
|
||||
)
|
||||
endif()
|
||||
else()
|
||||
register_compiler_definitions(
|
||||
DESCRIPTION "Disable debug assertions"
|
||||
NDEBUG=1
|
||||
)
|
||||
|
||||
register_compiler_definitions(
|
||||
DESCRIPTION "Disable libc++ assertions"
|
||||
_LIBCPP_ENABLE_ASSERTIONS=0
|
||||
_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_NONE
|
||||
)
|
||||
|
||||
if(LINUX)
|
||||
register_compiler_definitions(
|
||||
DESCRIPTION "Disable glibc++ assertions"
|
||||
_GLIBCXX_ASSERTIONS=0
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# --- Diagnostics ---
|
||||
@@ -282,6 +293,14 @@ if(UNIX AND CI)
|
||||
)
|
||||
endif()
|
||||
|
||||
# --- Features ---
|
||||
|
||||
# Valgrind cannot handle SSE4.2 instructions
|
||||
# This is needed for picohttpparser
|
||||
if(ENABLE_VALGRIND AND ARCH STREQUAL "x64")
|
||||
register_compiler_definitions(__SSE4_2__=0)
|
||||
endif()
|
||||
|
||||
# --- Other ---
|
||||
|
||||
# Workaround for CMake and clang-cl bug.
|
||||
|
||||
@@ -125,8 +125,7 @@ setx(CWD ${CMAKE_SOURCE_DIR})
|
||||
setx(BUILD_PATH ${CMAKE_BINARY_DIR})
|
||||
|
||||
optionx(CACHE_PATH FILEPATH "The path to the cache directory" DEFAULT ${BUILD_PATH}/cache)
|
||||
optionx(CACHE_STRATEGY "auto|distributed|local|none" "The strategy to use for caching" DEFAULT
|
||||
"auto")
|
||||
optionx(CACHE_STRATEGY "read-write|read-only|write-only|none" "The strategy to use for caching" DEFAULT "read-write")
|
||||
|
||||
optionx(CI BOOL "If CI is enabled" DEFAULT OFF)
|
||||
optionx(ENABLE_ANALYSIS BOOL "If static analysis targets should be enabled" DEFAULT OFF)
|
||||
@@ -137,44 +136,21 @@ else()
|
||||
set(WARNING WARNING)
|
||||
endif()
|
||||
|
||||
# TODO: This causes flaky zig builds in CI, so temporarily disable it.
|
||||
# if(CI)
|
||||
# set(DEFAULT_VENDOR_PATH ${CACHE_PATH}/vendor)
|
||||
# else()
|
||||
# set(DEFAULT_VENDOR_PATH ${CWD}/vendor)
|
||||
# endif()
|
||||
|
||||
optionx(VENDOR_PATH FILEPATH "The path to the vendor directory" DEFAULT ${CWD}/vendor)
|
||||
optionx(TMP_PATH FILEPATH "The path to the temporary directory" DEFAULT ${BUILD_PATH}/tmp)
|
||||
|
||||
# --- Helper functions ---
|
||||
|
||||
# list_filter_out_regex()
|
||||
#
|
||||
# Description:
|
||||
# Filters out elements from a list that match a regex pattern.
|
||||
#
|
||||
# Arguments:
|
||||
# list - The list of strings to traverse
|
||||
# pattern - The regex pattern to filter out
|
||||
# touched - A variable to set if any items were removed
|
||||
function(list_filter_out_regex list pattern touched)
|
||||
set(result_list "${${list}}")
|
||||
set(keep_list)
|
||||
set(was_modified OFF)
|
||||
|
||||
foreach(line IN LISTS result_list)
|
||||
if(line MATCHES "${pattern}")
|
||||
set(was_modified ON)
|
||||
else()
|
||||
list(APPEND keep_list ${line})
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
set(${list} "${keep_list}" PARENT_SCOPE)
|
||||
set(${touched} ${was_modified} PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
# setenv()
|
||||
# Description:
|
||||
# Sets an environment variable during the build step, and writes it to a .env file.
|
||||
#
|
||||
# See Also:
|
||||
# unsetenv()
|
||||
#
|
||||
# Arguments:
|
||||
# variable string - The variable to set
|
||||
# value string - The value to set the variable to
|
||||
@@ -187,7 +163,13 @@ function(setenv variable value)
|
||||
|
||||
if(EXISTS ${ENV_PATH})
|
||||
file(STRINGS ${ENV_PATH} ENV_FILE ENCODING UTF-8)
|
||||
list_filter_out_regex(ENV_FILE "^${variable}=" ENV_MODIFIED)
|
||||
|
||||
foreach(line ${ENV_FILE})
|
||||
if(line MATCHES "^${variable}=")
|
||||
list(REMOVE_ITEM ENV_FILE ${line})
|
||||
set(ENV_MODIFIED ON)
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
if(ENV_MODIFIED)
|
||||
list(APPEND ENV_FILE "${variable}=${value}")
|
||||
@@ -203,28 +185,6 @@ function(setenv variable value)
|
||||
message(STATUS "Set ENV ${variable}: ${value}")
|
||||
endfunction()
|
||||
|
||||
# See setenv()
|
||||
# Description:
|
||||
# Exact opposite of setenv().
|
||||
# Arguments:
|
||||
# variable string - The variable to unset.
|
||||
# See Also:
|
||||
# setenv()
|
||||
function(unsetenv variable)
|
||||
set(ENV_PATH ${BUILD_PATH}/.env)
|
||||
if(NOT EXISTS ${ENV_PATH})
|
||||
return()
|
||||
endif()
|
||||
|
||||
file(STRINGS ${ENV_PATH} ENV_FILE ENCODING UTF-8)
|
||||
list_filter_out_regex(ENV_FILE "^${variable}=" ENV_MODIFIED)
|
||||
|
||||
if(ENV_MODIFIED)
|
||||
list(JOIN ENV_FILE "\n" ENV_FILE)
|
||||
file(WRITE ${ENV_PATH} ${ENV_FILE})
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
# satisfies_range()
|
||||
# Description:
|
||||
# Check if a version satisfies a version range or list of ranges
|
||||
@@ -357,7 +317,7 @@ function(find_command)
|
||||
${FIND_VALIDATOR}
|
||||
)
|
||||
|
||||
if(FIND_REQUIRED AND ${FIND_VARIABLE} MATCHES "NOTFOUND")
|
||||
if(NOT FIND_REQUIRED STREQUAL "OFF" AND ${FIND_VARIABLE} MATCHES "NOTFOUND")
|
||||
set(error "Command not found: \"${FIND_NAME}\"")
|
||||
|
||||
if(FIND_VERSION)
|
||||
@@ -957,6 +917,10 @@ function(register_compiler_flags)
|
||||
endforeach()
|
||||
endfunction()
|
||||
|
||||
function(register_compiler_definitions)
|
||||
|
||||
endfunction()
|
||||
|
||||
# register_linker_flags()
|
||||
# Description:
|
||||
# Registers a linker flag, similar to `add_link_options()`.
|
||||
|
||||
@@ -60,10 +60,10 @@ endif()
|
||||
# Windows Code Signing Option
|
||||
if(WIN32)
|
||||
optionx(ENABLE_WINDOWS_CODESIGNING BOOL "Enable Windows code signing with DigiCert KeyLocker" DEFAULT OFF)
|
||||
|
||||
|
||||
if(ENABLE_WINDOWS_CODESIGNING)
|
||||
message(STATUS "Windows code signing: ENABLED")
|
||||
|
||||
|
||||
# Check for required environment variables
|
||||
if(NOT DEFINED ENV{SM_API_KEY})
|
||||
message(WARNING "SM_API_KEY not set - code signing may fail")
|
||||
@@ -114,10 +114,8 @@ endif()
|
||||
|
||||
if(DEBUG AND ((APPLE AND ARCH STREQUAL "aarch64") OR LINUX))
|
||||
set(DEFAULT_ASAN ON)
|
||||
set(DEFAULT_VALGRIND OFF)
|
||||
else()
|
||||
set(DEFAULT_ASAN OFF)
|
||||
set(DEFAULT_VALGRIND OFF)
|
||||
endif()
|
||||
|
||||
optionx(ENABLE_ASAN BOOL "If ASAN support should be enabled" DEFAULT ${DEFAULT_ASAN})
|
||||
@@ -127,8 +125,6 @@ if (NOT ENABLE_ASAN)
|
||||
set(ENABLE_ZIG_ASAN OFF)
|
||||
endif()
|
||||
|
||||
optionx(ENABLE_FUZZILLI BOOL "If fuzzilli support should be enabled" DEFAULT OFF)
|
||||
|
||||
if(RELEASE AND LINUX AND CI AND NOT ENABLE_ASSERTIONS AND NOT ENABLE_ASAN)
|
||||
set(DEFAULT_LTO ON)
|
||||
else()
|
||||
@@ -142,6 +138,11 @@ if(ENABLE_ASAN AND ENABLE_LTO)
|
||||
setx(ENABLE_LTO OFF)
|
||||
endif()
|
||||
|
||||
if(USE_VALGRIND AND NOT USE_BASELINE)
|
||||
message(WARNING "If valgrind is enabled, baseline must also be enabled")
|
||||
setx(USE_BASELINE ON)
|
||||
endif()
|
||||
|
||||
if(BUILDKITE_COMMIT)
|
||||
set(DEFAULT_REVISION ${BUILDKITE_COMMIT})
|
||||
else()
|
||||
@@ -199,9 +200,4 @@ optionx(USE_WEBKIT_ICU BOOL "Use the ICU libraries from WebKit" DEFAULT ${DEFAUL
|
||||
|
||||
optionx(ERROR_LIMIT STRING "Maximum number of errors to show when compiling C++ code" DEFAULT "100")
|
||||
|
||||
# This is not an `option` because setting this variable to OFF is experimental
|
||||
# and unsupported. This replaces the `use_mimalloc` variable previously in
|
||||
# bun.zig, and enables C++ code to also be aware of the option.
|
||||
set(USE_MIMALLOC_AS_DEFAULT_ALLOCATOR ON)
|
||||
|
||||
list(APPEND CMAKE_ARGS -DCMAKE_EXPORT_COMPILE_COMMANDS=ON)
|
||||
|
||||
@@ -13,10 +13,7 @@
|
||||
},
|
||||
{
|
||||
"output": "JavaScriptSources.txt",
|
||||
"paths": [
|
||||
"src/js/**/*.{js,ts}",
|
||||
"src/install/PackageManager/scanner-entry.ts"
|
||||
]
|
||||
"paths": ["src/js/**/*.{js,ts}"]
|
||||
},
|
||||
{
|
||||
"output": "JavaScriptCodegenSources.txt",
|
||||
@@ -31,14 +28,6 @@
|
||||
"output": "BindgenSources.txt",
|
||||
"paths": ["src/**/*.bind.ts"]
|
||||
},
|
||||
{
|
||||
"output": "BindgenV2Sources.txt",
|
||||
"paths": ["src/**/*.bindv2.ts"]
|
||||
},
|
||||
{
|
||||
"output": "BindgenV2InternalSources.txt",
|
||||
"paths": ["src/codegen/bindgenv2/**/*.ts"]
|
||||
},
|
||||
{
|
||||
"output": "ZigSources.txt",
|
||||
"paths": ["src/**/*.zig"]
|
||||
|
||||
@@ -34,6 +34,26 @@ register_command(
|
||||
ALWAYS_RUN
|
||||
)
|
||||
|
||||
if(GIT_CHANGED_SOURCES)
|
||||
set(CLANG_FORMAT_CHANGED_SOURCES)
|
||||
foreach(source ${CLANG_FORMAT_SOURCES})
|
||||
list(FIND GIT_CHANGED_SOURCES ${source} index)
|
||||
if(NOT ${index} EQUAL -1)
|
||||
list(APPEND CLANG_FORMAT_CHANGED_SOURCES ${source})
|
||||
endif()
|
||||
endforeach()
|
||||
endif()
|
||||
|
||||
if(CLANG_FORMAT_CHANGED_SOURCES)
|
||||
set(CLANG_FORMAT_DIFF_COMMAND ${CLANG_FORMAT_PROGRAM}
|
||||
-i # edits files in-place
|
||||
--verbose
|
||||
${CLANG_FORMAT_CHANGED_SOURCES}
|
||||
)
|
||||
else()
|
||||
set(CLANG_FORMAT_DIFF_COMMAND ${CMAKE_COMMAND} -E echo "No changed files for clang-format")
|
||||
endif()
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
clang-format-diff
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
set(CLANG_TIDY_SOURCES ${BUN_C_SOURCES} ${BUN_CXX_SOURCES})
|
||||
|
||||
set(CLANG_TIDY_COMMAND ${CLANG_TIDY_PROGRAM}
|
||||
-p ${BUILD_PATH}
|
||||
-p ${BUILD_PATH}
|
||||
--config-file=${CWD}/.clang-tidy
|
||||
)
|
||||
|
||||
@@ -40,6 +40,27 @@ register_command(
|
||||
ALWAYS_RUN
|
||||
)
|
||||
|
||||
if(GIT_CHANGED_SOURCES)
|
||||
set(CLANG_TIDY_CHANGED_SOURCES)
|
||||
foreach(source ${CLANG_TIDY_SOURCES})
|
||||
list(FIND GIT_CHANGED_SOURCES ${source} index)
|
||||
if(NOT ${index} EQUAL -1)
|
||||
list(APPEND CLANG_TIDY_CHANGED_SOURCES ${source})
|
||||
endif()
|
||||
endforeach()
|
||||
endif()
|
||||
|
||||
if(CLANG_TIDY_CHANGED_SOURCES)
|
||||
set(CLANG_TIDY_DIFF_COMMAND ${CLANG_TIDY_PROGRAM}
|
||||
${CLANG_TIDY_CHANGED_SOURCES}
|
||||
--fix
|
||||
--fix-errors
|
||||
--fix-notes
|
||||
)
|
||||
else()
|
||||
set(CLANG_TIDY_DIFF_COMMAND ${CMAKE_COMMAND} -E echo "No changed files for clang-tidy")
|
||||
endif()
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
clang-tidy-diff
|
||||
|
||||
33
cmake/analysis/RunCppCheck.cmake
Normal file
33
cmake/analysis/RunCppCheck.cmake
Normal file
@@ -0,0 +1,33 @@
|
||||
# https://cppcheck.sourceforge.io/
|
||||
|
||||
find_command(
|
||||
VARIABLE
|
||||
CPPCHECK_EXECUTABLE
|
||||
COMMAND
|
||||
cppcheck
|
||||
REQUIRED
|
||||
OFF
|
||||
)
|
||||
|
||||
set(CPPCHECK_COMMAND ${CPPCHECK_EXECUTABLE}
|
||||
--cppcheck-build-dir=${BUILD_PATH}/cppcheck
|
||||
--project=${BUILD_PATH}/compile_commands.json
|
||||
--clang=${CMAKE_CXX_COMPILER}
|
||||
--std=c++${CMAKE_CXX_STANDARD}
|
||||
--report-progress
|
||||
--showtime=summary
|
||||
)
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
cppcheck
|
||||
COMMENT
|
||||
"Running cppcheck"
|
||||
COMMAND
|
||||
${CMAKE_COMMAND} -E make_directory cppcheck
|
||||
&& ${CPPCHECK_COMMAND}
|
||||
CWD
|
||||
${BUILD_PATH}
|
||||
TARGETS
|
||||
${bun}
|
||||
)
|
||||
22
cmake/analysis/RunCppLint.cmake
Normal file
22
cmake/analysis/RunCppLint.cmake
Normal file
@@ -0,0 +1,22 @@
|
||||
find_command(
|
||||
VARIABLE
|
||||
CPPLINT_PROGRAM
|
||||
COMMAND
|
||||
cpplint
|
||||
REQUIRED
|
||||
OFF
|
||||
)
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
cpplint
|
||||
COMMENT
|
||||
"Running cpplint"
|
||||
COMMAND
|
||||
${CPPLINT_PROGRAM}
|
||||
${BUN_CPP_SOURCES}
|
||||
CWD
|
||||
${BUILD_PATH}
|
||||
TARGETS
|
||||
${bun}
|
||||
)
|
||||
67
cmake/analysis/RunIWYU.cmake
Normal file
67
cmake/analysis/RunIWYU.cmake
Normal file
@@ -0,0 +1,67 @@
|
||||
# IWYU = "Include What You Use"
|
||||
# https://include-what-you-use.org/
|
||||
|
||||
setx(IWYU_SOURCE_PATH ${CACHE_PATH}/iwyu-${LLVM_VERSION})
|
||||
setx(IWYU_BUILD_PATH ${IWYU_SOURCE_PATH}/build)
|
||||
setx(IWYU_PROGRAM ${IWYU_BUILD_PATH}/bin/include-what-you-use)
|
||||
|
||||
register_repository(
|
||||
NAME
|
||||
iwyu
|
||||
REPOSITORY
|
||||
include-what-you-use/include-what-you-use
|
||||
BRANCH
|
||||
clang_${LLVM_VERSION}
|
||||
PATH
|
||||
${IWYU_SOURCE_PATH}
|
||||
)
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
build-iwyu
|
||||
COMMENT
|
||||
"Building iwyu"
|
||||
COMMAND
|
||||
${CMAKE_COMMAND}
|
||||
-B${IWYU_BUILD_PATH}
|
||||
-G${CMAKE_GENERATOR}
|
||||
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
|
||||
-DCMAKE_CXX_COMPILER_LAUNCHER=${CMAKE_CXX_COMPILER_LAUNCHER}
|
||||
-DIWYU_LLVM_ROOT_PATH=${LLVM_PREFIX}
|
||||
&& ${CMAKE_COMMAND}
|
||||
--build ${IWYU_BUILD_PATH}
|
||||
CWD
|
||||
${IWYU_SOURCE_PATH}
|
||||
TARGETS
|
||||
clone-iwyu
|
||||
)
|
||||
|
||||
find_command(
|
||||
VARIABLE
|
||||
PYTHON_EXECUTABLE
|
||||
COMMAND
|
||||
python3
|
||||
python
|
||||
VERSION
|
||||
>=3.0.0
|
||||
REQUIRED
|
||||
OFF
|
||||
)
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
iwyu
|
||||
COMMENT
|
||||
"Running iwyu"
|
||||
COMMAND
|
||||
${CMAKE_COMMAND}
|
||||
-E env IWYU_BINARY=${IWYU_PROGRAM}
|
||||
${PYTHON_EXECUTABLE}
|
||||
${IWYU_SOURCE_PATH}/iwyu_tool.py
|
||||
-p ${BUILD_PATH}
|
||||
CWD
|
||||
${BUILD_PATH}
|
||||
TARGETS
|
||||
build-iwyu
|
||||
${bun}
|
||||
)
|
||||
@@ -92,6 +92,26 @@ register_command(
|
||||
ALWAYS_RUN
|
||||
)
|
||||
|
||||
if(GIT_CHANGED_SOURCES)
|
||||
set(PRETTIER_CHANGED_SOURCES)
|
||||
foreach(source ${PRETTIER_SOURCES})
|
||||
list(FIND GIT_CHANGED_SOURCES ${source} index)
|
||||
if(NOT ${index} EQUAL -1)
|
||||
list(APPEND PRETTIER_CHANGED_SOURCES ${source})
|
||||
endif()
|
||||
endforeach()
|
||||
endif()
|
||||
|
||||
if(PRETTIER_CHANGED_SOURCES)
|
||||
set(PRETTIER_DIFF_COMMAND ${PRETTIER_COMMAND}
|
||||
--write
|
||||
--plugin=prettier-plugin-organize-imports
|
||||
${PRETTIER_CHANGED_SOURCES}
|
||||
)
|
||||
else()
|
||||
set(PRETTIER_DIFF_COMMAND ${CMAKE_COMMAND} -E echo "No changed files for prettier")
|
||||
endif()
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
prettier-diff
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user