mirror of
https://github.com/oven-sh/bun
synced 2026-02-05 08:28:55 +00:00
Compare commits
3 Commits
claude/nod
...
dylan/_wri
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
db04688674 | ||
|
|
5b70d1f767 | ||
|
|
b18cc5c2c7 |
78
.agent/agent.mjs
Normal file
78
.agent/agent.mjs
Normal file
@@ -0,0 +1,78 @@
|
||||
import { spawnSync } from "node:child_process";
|
||||
import { readFileSync, existsSync } from "node:fs";
|
||||
import { parseArgs } from "node:util";
|
||||
|
||||
const { positionals, values } = parseArgs({
|
||||
allowPositionals: true,
|
||||
options: {
|
||||
help: {
|
||||
type: "boolean",
|
||||
short: "h",
|
||||
default: false,
|
||||
},
|
||||
interactive: {
|
||||
type: "boolean",
|
||||
short: "i",
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (values.help || positionals.length === 0) {
|
||||
console.log("Usage: node agent.mjs <prompt_name> [extra_args...]");
|
||||
console.log("Example: node agent.mjs triage fix bug in authentication");
|
||||
console.log("Options:");
|
||||
console.log(" -h, --help Show this help message");
|
||||
console.log(" -i, --interactive Run in interactive mode");
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const promptName = positionals[0].toUpperCase();
|
||||
const promptFile = `.agent/${promptName}.md`;
|
||||
const extraArgs = positionals.slice(1);
|
||||
|
||||
if (!existsSync(promptFile)) {
|
||||
console.error(`Error: Prompt file "${promptFile}" not found`);
|
||||
console.error(`Available prompts should be named like: .agent/triage.md, .agent/debug.md, etc.`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
try {
|
||||
let prompt = readFileSync(promptFile, "utf-8");
|
||||
|
||||
const githubEnvs = Object.entries(process.env)
|
||||
.filter(([key]) => key.startsWith("GITHUB_"))
|
||||
.sort(([a], [b]) => a.localeCompare(b));
|
||||
|
||||
if (githubEnvs.length > 0) {
|
||||
const githubContext = `## GitHub Environment\n\n${githubEnvs
|
||||
.map(([key, value]) => `**${key}**: \`${value}\``)
|
||||
.join("\n")}\n\n---\n\n`;
|
||||
prompt = githubContext + prompt;
|
||||
}
|
||||
|
||||
if (extraArgs.length > 0) {
|
||||
const extraArgsContext = `\n\n## Additional Arguments\n\n${extraArgs.join(" ")}\n\n---\n\n`;
|
||||
prompt = prompt + extraArgsContext;
|
||||
}
|
||||
|
||||
const claudeArgs = [prompt, "--allowedTools=Edit,Write,Replace,Search", "--output-format=json"];
|
||||
if (!values.interactive) {
|
||||
claudeArgs.unshift("--print");
|
||||
}
|
||||
|
||||
const { status, error } = spawnSync("claude", claudeArgs, {
|
||||
stdio: "inherit",
|
||||
encoding: "utf-8",
|
||||
});
|
||||
|
||||
if (error) {
|
||||
console.error("Error running claude:", error);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
process.exit(status || 0);
|
||||
} catch (error) {
|
||||
console.error(`Error reading prompt file "${promptFile}":`, error);
|
||||
process.exit(1);
|
||||
}
|
||||
@@ -127,11 +127,8 @@ const testPlatforms = [
|
||||
{ os: "linux", arch: "x64", distro: "debian", release: "12", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "debian", release: "12", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", profile: "asan", distro: "debian", release: "12", tier: "latest" },
|
||||
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "25.04", tier: "latest" },
|
||||
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "24.04", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", distro: "ubuntu", release: "25.04", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", distro: "ubuntu", release: "24.04", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "25.04", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "24.04", tier: "latest" },
|
||||
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.21", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.21", tier: "latest" },
|
||||
@@ -569,7 +566,7 @@ function getTestBunStep(platform, options, testOptions = {}) {
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
parallelism: unifiedTests ? undefined : os === "darwin" ? 2 : 10,
|
||||
timeout_in_minutes: profile === "asan" || os === "windows" ? 45 : 30,
|
||||
timeout_in_minutes: profile === "asan" ? 45 : 30,
|
||||
command:
|
||||
os === "windows"
|
||||
? `node .\\scripts\\runner.node.mjs ${args.join(" ")}`
|
||||
|
||||
@@ -360,8 +360,9 @@ JSC_DEFINE_HOST_FUNCTION(x509CertificateConstructorConstruct, (JSGlobalObject *
|
||||
|
||||
auto* functionGlobalObject = defaultGlobalObject(getFunctionRealm(globalObject, newTarget.getObject()));
|
||||
RETURN_IF_EXCEPTION(scope, {});
|
||||
structure = InternalFunction::createSubclassStructure(globalObject, newTarget.getObject(), functionGlobalObject->NodeVMScriptStructure());
|
||||
RETURN_IF_EXCEPTION(scope, {});
|
||||
structure = InternalFunction::createSubclassStructure(
|
||||
globalObject, newTarget.getObject(), functionGlobalObject->NodeVMScriptStructure());
|
||||
scope.release();
|
||||
}
|
||||
|
||||
return JSValue::encode(createX509Certificate(vm, globalObject, structure, arg));
|
||||
|
||||
47
.github/pull_request_template.md
vendored
47
.github/pull_request_template.md
vendored
@@ -1,3 +1,50 @@
|
||||
### What does this PR do?
|
||||
|
||||
<!-- **Please explain what your changes do**, example: -->
|
||||
|
||||
<!--
|
||||
|
||||
This adds a new flag --bail to bun test. When set, it will stop running tests after the first failure. This is useful for CI environments where you want to fail fast.
|
||||
|
||||
-->
|
||||
|
||||
- [ ] Documentation or TypeScript types (it's okay to leave the rest blank in this case)
|
||||
- [ ] Code changes
|
||||
|
||||
### How did you verify your code works?
|
||||
|
||||
<!-- **For code changes, please include automated tests**. Feel free to uncomment the line below -->
|
||||
|
||||
<!-- I wrote automated tests -->
|
||||
|
||||
<!-- If JavaScript/TypeScript modules or builtins changed:
|
||||
|
||||
- [ ] I included a test for the new code, or existing tests cover it
|
||||
- [ ] I ran my tests locally and they pass (`bun-debug test test-file-name.test`)
|
||||
|
||||
-->
|
||||
|
||||
<!-- If Zig files changed:
|
||||
|
||||
- [ ] I checked the lifetime of memory allocated to verify it's (1) freed and (2) only freed when it should be
|
||||
- [ ] I included a test for the new code, or an existing test covers it
|
||||
- [ ] JSValue used outside of the stack is either wrapped in a JSC.Strong or is JSValueProtect'ed
|
||||
- [ ] I wrote TypeScript/JavaScript tests and they pass locally (`bun-debug test test-file-name.test`)
|
||||
-->
|
||||
|
||||
<!-- If new methods, getters, or setters were added to a publicly exposed class:
|
||||
|
||||
- [ ] I added TypeScript types for the new methods, getters, or setters
|
||||
-->
|
||||
|
||||
<!-- If dependencies in tests changed:
|
||||
|
||||
- [ ] I made sure that specific versions of dependencies are used instead of ranged or tagged versions
|
||||
-->
|
||||
|
||||
<!-- If a new builtin ESM/CJS module was added:
|
||||
|
||||
- [ ] I updated Aliases in `module_loader.zig` to include the new module
|
||||
- [ ] I added a test that imports the module
|
||||
- [ ] I added a test that require() the module
|
||||
-->
|
||||
|
||||
33
.github/workflows/format.yml
vendored
33
.github/workflows/format.yml
vendored
@@ -1,30 +1,28 @@
|
||||
name: autofix.ci
|
||||
name: format
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
contents: write
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
merge_group:
|
||||
push:
|
||||
branches: ["main"]
|
||||
|
||||
env:
|
||||
BUN_VERSION: "1.2.11"
|
||||
LLVM_VERSION: "19.1.7"
|
||||
LLVM_VERSION_MAJOR: "19"
|
||||
|
||||
jobs:
|
||||
autofix:
|
||||
format:
|
||||
name: Format
|
||||
runs-on: ubuntu-latest
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config --global core.autocrlf true
|
||||
@@ -46,16 +44,25 @@ jobs:
|
||||
version: 0.14.0
|
||||
- name: Zig Format
|
||||
run: |
|
||||
bun scripts/zig-remove-unreferenced-top-level-decls.ts src/
|
||||
zig fmt src
|
||||
./scripts/sort-imports.ts src
|
||||
bun scripts/sortImports src
|
||||
zig fmt src
|
||||
- name: Commit
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: "`bun run zig-format`"
|
||||
- name: Prettier Format
|
||||
run: |
|
||||
bun run prettier
|
||||
- name: Commit
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: "`bun run prettier`"
|
||||
- name: Clang Format
|
||||
run: |
|
||||
bun run clang-format
|
||||
- name: Ban Words
|
||||
run: |
|
||||
bun ./test/internal/ban-words.test.ts
|
||||
- uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27
|
||||
- name: Commit
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: "`bun run clang-format`"
|
||||
|
||||
102
.github/workflows/update-hdrhistogram.yml
vendored
102
.github/workflows/update-hdrhistogram.yml
vendored
@@ -1,102 +0,0 @@
|
||||
name: Update hdrhistogram
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 4 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check hdrhistogram version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildHdrHistogram.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildHdrHistogram.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildHdrHistogram.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/HdrHistogram/HdrHistogram_c/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/HdrHistogram/HdrHistogram_c/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Try to get commit SHA from tag object (for annotated tags)
|
||||
# If it fails, assume it's a lightweight tag pointing directly to commit
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/HdrHistogram/HdrHistogram_c/git/tags/$LATEST_TAG_SHA" 2>/dev/null | jq -r '.object.sha // empty')
|
||||
if [ -z "$LATEST_SHA" ]; then
|
||||
# Lightweight tag - SHA points directly to commit
|
||||
LATEST_SHA="$LATEST_TAG_SHA"
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildHdrHistogram.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildHdrHistogram.cmake
|
||||
commit-message: "deps: update hdrhistogram to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update hdrhistogram to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-hdrhistogram-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates hdrhistogram to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Compare: https://github.com/HdrHistogram/HdrHistogram_c/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-hdrhistogram.yml)
|
||||
118
.github/workflows/update-highway.yml
vendored
118
.github/workflows/update-highway.yml
vendored
@@ -1,118 +0,0 @@
|
||||
name: Update highway
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 4 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check highway version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildHighway.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildHighway.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildHighway.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/google/highway/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
TAG_REF=$(curl -sL "https://api.github.com/repos/google/highway/git/refs/tags/$LATEST_TAG")
|
||||
if [ -z "$TAG_REF" ]; then
|
||||
echo "Error: Could not fetch tag reference for $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
TAG_OBJECT_SHA=$(echo "$TAG_REF" | jq -r '.object.sha')
|
||||
TAG_OBJECT_TYPE=$(echo "$TAG_REF" | jq -r '.object.type')
|
||||
|
||||
if [ -z "$TAG_OBJECT_SHA" ] || [ "$TAG_OBJECT_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Handle both lightweight tags (type: commit) and annotated tags (type: tag)
|
||||
if [ "$TAG_OBJECT_TYPE" = "commit" ]; then
|
||||
# Lightweight tag - object.sha is already the commit SHA
|
||||
LATEST_SHA="$TAG_OBJECT_SHA"
|
||||
elif [ "$TAG_OBJECT_TYPE" = "tag" ]; then
|
||||
# Annotated tag - need to fetch the tag object to get the commit SHA
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/google/highway/git/tags/$TAG_OBJECT_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch commit SHA for annotated tag $LATEST_TAG @ $TAG_OBJECT_SHA"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "Error: Unexpected tag object type: $TAG_OBJECT_TYPE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildHighway.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildHighway.cmake
|
||||
commit-message: "deps: update highway to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update highway to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-highway-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates highway to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Compare: https://github.com/google/highway/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-highway.yml)
|
||||
22
.github/workflows/update-lolhtml.yml
vendored
22
.github/workflows/update-lolhtml.yml
vendored
@@ -50,27 +50,15 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get the commit SHA that the tag points to
|
||||
# This handles both lightweight tags (direct commit refs) and annotated tags (tag objects)
|
||||
TAG_REF_RESPONSE=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/refs/tags/$LATEST_TAG")
|
||||
LATEST_TAG_SHA=$(echo "$TAG_REF_RESPONSE" | jq -r '.object.sha')
|
||||
TAG_OBJECT_TYPE=$(echo "$TAG_REF_RESPONSE" | jq -r '.object.type')
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$TAG_OBJECT_TYPE" = "tag" ]; then
|
||||
# This is an annotated tag, we need to get the commit it points to
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch commit SHA for annotated tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
# This is a lightweight tag pointing directly to a commit
|
||||
LATEST_SHA="$LATEST_TAG_SHA"
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
|
||||
27
.github/workflows/update-lshpack.yml
vendored
27
.github/workflows/update-lshpack.yml
vendored
@@ -50,32 +50,15 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get the tag reference, which contains both SHA and type
|
||||
TAG_REF=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/refs/tags/$LATEST_TAG")
|
||||
if [ -z "$TAG_REF" ]; then
|
||||
echo "Error: Could not fetch tag reference for $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(echo "$TAG_REF" | jq -r '.object.sha')
|
||||
TAG_TYPE=$(echo "$TAG_REF" | jq -r '.object.type')
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# If it's an annotated tag, we need to dereference it to get the commit SHA
|
||||
# If it's a lightweight tag, the SHA already points to the commit
|
||||
if [ "$TAG_TYPE" = "tag" ]; then
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch commit SHA for annotated tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
# For lightweight tags, the SHA is already the commit SHA
|
||||
LATEST_SHA="$LATEST_TAG_SHA"
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
|
||||
47
.github/workflows/vscode-release.yml
vendored
47
.github/workflows/vscode-release.yml
vendored
@@ -1,47 +0,0 @@
|
||||
name: VSCode Extension Publish
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: "Version to publish (e.g. 0.0.25) - Check the marketplace for the latest version"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: "Publish to VS Code Marketplace"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.2.18"
|
||||
|
||||
- name: Install dependencies (root)
|
||||
run: bun install
|
||||
|
||||
- name: Install dependencies
|
||||
run: bun install
|
||||
working-directory: packages/bun-vscode
|
||||
|
||||
- name: Set Version
|
||||
run: bun pm version ${{ github.event.inputs.version }} --no-git-tag-version --allow-same-version
|
||||
working-directory: packages/bun-vscode
|
||||
|
||||
- name: Build (inspector protocol)
|
||||
run: bun install && bun run build
|
||||
working-directory: packages/bun-inspector-protocol
|
||||
|
||||
- name: Build (vscode extension)
|
||||
run: bun run build
|
||||
working-directory: packages/bun-vscode
|
||||
|
||||
- name: Publish
|
||||
if: success()
|
||||
run: bunx vsce publish
|
||||
env:
|
||||
VSCE_PAT: ${{ secrets.VSCODE_EXTENSION }}
|
||||
working-directory: packages/bun-vscode/extension
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -184,6 +184,4 @@ codegen-for-zig-team.tar.gz
|
||||
*.sock
|
||||
scratch*.{js,ts,tsx,cjs,mjs}
|
||||
|
||||
*.bun-build
|
||||
|
||||
scripts/lldb-inline
|
||||
*.bun-build
|
||||
2
.vscode/settings.json
vendored
2
.vscode/settings.json
vendored
@@ -168,5 +168,5 @@
|
||||
"WebKit/WebInspectorUI": true,
|
||||
},
|
||||
"git.detectSubmodules": false,
|
||||
"bun.test.customScript": "./build/debug/bun-debug test"
|
||||
"bun.test.customScript": "bun-debug test"
|
||||
}
|
||||
|
||||
31
CLAUDE.md
31
CLAUDE.md
@@ -4,9 +4,9 @@ This is the Bun repository - an all-in-one JavaScript runtime & toolkit designed
|
||||
|
||||
### Build Commands
|
||||
|
||||
- **Build debug version**: `bun bd`
|
||||
- **Build debug version**: `bun bd` or `bun run build:debug`
|
||||
- Creates a debug build at `./build/debug/bun-debug`
|
||||
- **CRITICAL**: DO NOT set a build timeout. Compilation takes ~5 minutes. Be patient.
|
||||
- Compilation takes ~2.5 minutes
|
||||
- **Run tests with your debug build**: `bun bd test <test-file>`
|
||||
- **CRITICAL**: Never use `bun test` directly - it won't include your changes
|
||||
- **Run any command with debug build**: `bun bd <command>`
|
||||
@@ -59,8 +59,8 @@ test("my feature", async () => {
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([
|
||||
proc.stdout.text(),
|
||||
proc.stderr.text(),
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
@@ -69,8 +69,6 @@ test("my feature", async () => {
|
||||
});
|
||||
```
|
||||
|
||||
- Always use `port: 0`. Do not hardcode ports. Do not use your own random port number function.
|
||||
|
||||
## Code Architecture
|
||||
|
||||
### Language Structure
|
||||
@@ -135,6 +133,7 @@ test("my feature", async () => {
|
||||
When implementing JavaScript classes in C++:
|
||||
|
||||
1. Create three classes if there's a public constructor:
|
||||
|
||||
- `class Foo : public JSC::JSDestructibleObject` (if has C++ fields)
|
||||
- `class FooPrototype : public JSC::JSNonFinalObject`
|
||||
- `class FooConstructor : public JSC::InternalFunction`
|
||||
@@ -194,6 +193,7 @@ Built-in JavaScript modules use special syntax and are organized as:
|
||||
```
|
||||
|
||||
3. **Debug helpers**:
|
||||
|
||||
- `$debug()` - Like console.log but stripped in release builds
|
||||
- `$assert()` - Assertions stripped in release builds
|
||||
- `if($debug) {}` - Check if debug env var is set
|
||||
@@ -221,16 +221,15 @@ bun ci
|
||||
## Important Development Notes
|
||||
|
||||
1. **Never use `bun test` or `bun <file>` directly** - always use `bun bd test` or `bun bd <command>`. `bun bd` compiles & runs the debug build.
|
||||
2. **All changes must be tested** - if you're not testing your changes, you're not done.
|
||||
3. **Get your tests to pass**. If you didn't run the tests, your code does not work.
|
||||
4. **Follow existing code style** - check neighboring files for patterns
|
||||
5. **Create tests in the right folder** in `test/` and the test must end in `.test.ts` or `.test.tsx`
|
||||
6. **Use absolute paths** - Always use absolute paths in file operations
|
||||
7. **Avoid shell commands** - Don't use `find` or `grep` in tests; use Bun's Glob and built-in tools
|
||||
8. **Memory management** - In Zig code, be careful with allocators and use defer for cleanup
|
||||
9. **Cross-platform** - Run `bun run zig:check-all` to compile the Zig code on all platforms when making platform-specific changes
|
||||
10. **Debug builds** - Use `BUN_DEBUG_QUIET_LOGS=1` to disable debug logging, or `BUN_DEBUG_<scope>=1` to enable specific scopes
|
||||
11. **Be humble & honest** - NEVER overstate what you got done or what actually works in commits, PRs or in messages to the user.
|
||||
2. **Use `await using`** for proper resource cleanup with Bun APIs (Bun.spawn, Bun.serve, Bun.connect, etc.)
|
||||
3. **Follow existing code style** - check neighboring files for patterns
|
||||
4. **Create regression tests** in `test/regression/issue/` when fixing bugs
|
||||
5. **Use absolute paths** - Always use absolute paths in file operations
|
||||
6. **Avoid shell commands** - Don't use `find` or `grep` in tests; use Bun's Glob and built-in tools
|
||||
7. **Memory management** - In Zig code, be careful with allocators and use defer for cleanup
|
||||
8. **Cross-platform** - Test on macOS, Linux, and Windows when making platform-specific changes
|
||||
9. **Debug builds** - Use `BUN_DEBUG_QUIET_LOGS=1` to disable debug logging, or `BUN_DEBUG_<scope>=1` to enable specific scopes
|
||||
10. **Transpiled source** - Find transpiled files in `/tmp/bun-debug-src/` for debugging
|
||||
|
||||
## Key APIs and Features
|
||||
|
||||
|
||||
@@ -160,7 +160,6 @@ In particular, these are:
|
||||
|
||||
- `./src/codegen/generate-jssink.ts` -- Generates `build/debug/codegen/JSSink.cpp`, `build/debug/codegen/JSSink.h` which implement various classes for interfacing with `ReadableStream`. This is internally how `FileSink`, `ArrayBufferSink`, `"type": "direct"` streams and other code related to streams works.
|
||||
- `./src/codegen/generate-classes.ts` -- Generates `build/debug/codegen/ZigGeneratedClasses*`, which generates Zig & C++ bindings for JavaScriptCore classes implemented in Zig. In `**/*.classes.ts` files, we define the interfaces for various classes, methods, prototypes, getters/setters etc which the code generator reads to generate boilerplate code implementing the JavaScript objects in C++ and wiring them up to Zig
|
||||
- `./src/codegen/cppbind.ts` -- Generates automatic Zig bindings for C++ functions marked with `[[ZIG_EXPORT]]` attributes.
|
||||
- `./src/codegen/bundle-modules.ts` -- Bundles built-in modules like `node:fs`, `bun:ffi` into files we can include in the final binary. In development, these can be reloaded without rebuilding Zig (you still need to run `bun run build`, but it re-reads the transpiled files from disk afterwards). In release builds, these are embedded into the binary.
|
||||
- `./src/codegen/bundle-functions.ts` -- Bundles globally-accessible functions implemented in JavaScript/TypeScript like `ReadableStream`, `WritableStream`, and a handful more. These are used similarly to the builtin modules, but the output more closely aligns with what WebKit/Safari does for Safari's built-in functions so that we can copy-paste the implementations from WebKit as a starting point.
|
||||
|
||||
|
||||
Binary file not shown.
@@ -28,7 +28,9 @@ if (+(existingUsers?.[0]?.count ?? existingUsers?.count) < 100) {
|
||||
}));
|
||||
|
||||
// Insert all users
|
||||
await sql`INSERT INTO users_bun_bench ${sql(users)}`;
|
||||
await sql`
|
||||
INSERT INTO users_bun_bench (first_name, last_name, email, dob) ${sql(users)}
|
||||
`;
|
||||
}
|
||||
|
||||
const type = isBun ? "Bun.sql" : "postgres";
|
||||
|
||||
@@ -9,6 +9,6 @@
|
||||
"typescript": "^5.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"postgres": "^3.4.7"
|
||||
"postgres": "^3.4.5"
|
||||
}
|
||||
}
|
||||
@@ -28,4 +28,10 @@ bench("brotli compress stream", async () => {
|
||||
await pipeline(source, compress);
|
||||
});
|
||||
|
||||
bench("brotli decompress stream", async () => {
|
||||
const source = Readable.from([compressed]);
|
||||
const decompress = createBrotliDecompress();
|
||||
await pipeline(source, decompress);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
@@ -752,13 +752,6 @@ fn addInternalImports(b: *Build, mod: *Module, opts: *BunBuildOptions) void {
|
||||
});
|
||||
}
|
||||
}
|
||||
{
|
||||
const cppImport = b.createModule(.{
|
||||
.root_source_file = (std.Build.LazyPath{ .cwd_relative = opts.codegen_path }).path(b, "cpp.zig"),
|
||||
});
|
||||
mod.addImport("cpp", cppImport);
|
||||
cppImport.addImport("bun", mod);
|
||||
}
|
||||
inline for (.{
|
||||
.{ .import = "completions-bash", .file = b.path("completions/bun.bash") },
|
||||
.{ .import = "completions-zsh", .file = b.path("completions/bun.zsh") },
|
||||
|
||||
220
bun.lock
220
bun.lock
@@ -4,9 +4,6 @@
|
||||
"": {
|
||||
"name": "bun",
|
||||
"devDependencies": {
|
||||
"@lezer/common": "^1.2.3",
|
||||
"@lezer/cpp": "^1.1.3",
|
||||
"bun-tracestrings": "github:oven-sh/bun.report#912ca63e26c51429d3e6799aa2a6ab079b188fd8",
|
||||
"esbuild": "^0.21.4",
|
||||
"mitata": "^0.1.11",
|
||||
"peechy": "0.4.34",
|
||||
@@ -15,7 +12,7 @@
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"source-map-js": "^1.2.0",
|
||||
"typescript": "5.9.2",
|
||||
"typescript": "^5.7.2",
|
||||
},
|
||||
},
|
||||
"packages/@types/bun": {
|
||||
@@ -32,6 +29,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "^19",
|
||||
"typescript": "^5.0.2",
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "^19",
|
||||
@@ -89,191 +87,41 @@
|
||||
|
||||
"@esbuild/win32-x64": ["@esbuild/win32-x64@0.21.5", "", { "os": "win32", "cpu": "x64" }, "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw=="],
|
||||
|
||||
"@lezer/common": ["@lezer/common@1.2.3", "", {}, "sha512-w7ojc8ejBqr2REPsWxJjrMFsA/ysDCFICn8zEOR9mrqzOu2amhITYuLD8ag6XZf0CFXDrhKqw7+tW8cX66NaDA=="],
|
||||
|
||||
"@lezer/cpp": ["@lezer/cpp@1.1.3", "", { "dependencies": { "@lezer/common": "^1.2.0", "@lezer/highlight": "^1.0.0", "@lezer/lr": "^1.0.0" } }, "sha512-ykYvuFQKGsRi6IcE+/hCSGUhb/I4WPjd3ELhEblm2wS2cOznDFzO+ubK2c+ioysOnlZ3EduV+MVQFCPzAIoY3w=="],
|
||||
|
||||
"@lezer/highlight": ["@lezer/highlight@1.2.1", "", { "dependencies": { "@lezer/common": "^1.0.0" } }, "sha512-Z5duk4RN/3zuVO7Jq0pGLJ3qynpxUVsh7IbUbGj88+uV2ApSAn6kWg2au3iJb+0Zi7kKtqffIESgNcRXWZWmSA=="],
|
||||
|
||||
"@lezer/lr": ["@lezer/lr@1.4.2", "", { "dependencies": { "@lezer/common": "^1.0.0" } }, "sha512-pu0K1jCIdnQ12aWNaAVU5bzi7Bd1w54J3ECgANPmYLtQKP0HBj2cE/5coBD66MT10xbtIuUr7tg0Shbsvk0mDA=="],
|
||||
|
||||
"@octokit/app": ["@octokit/app@14.1.0", "", { "dependencies": { "@octokit/auth-app": "^6.0.0", "@octokit/auth-unauthenticated": "^5.0.0", "@octokit/core": "^5.0.0", "@octokit/oauth-app": "^6.0.0", "@octokit/plugin-paginate-rest": "^9.0.0", "@octokit/types": "^12.0.0", "@octokit/webhooks": "^12.0.4" } }, "sha512-g3uEsGOQCBl1+W1rgfwoRFUIR6PtvB2T1E4RpygeUU5LrLvlOqcxrt5lfykIeRpUPpupreGJUYl70fqMDXdTpw=="],
|
||||
|
||||
"@octokit/auth-app": ["@octokit/auth-app@6.1.4", "", { "dependencies": { "@octokit/auth-oauth-app": "^7.1.0", "@octokit/auth-oauth-user": "^4.1.0", "@octokit/request": "^8.3.1", "@octokit/request-error": "^5.1.0", "@octokit/types": "^13.1.0", "deprecation": "^2.3.1", "lru-cache": "npm:@wolfy1339/lru-cache@^11.0.2-patch.1", "universal-github-app-jwt": "^1.1.2", "universal-user-agent": "^6.0.0" } }, "sha512-QkXkSOHZK4dA5oUqY5Dk3S+5pN2s1igPjEASNQV8/vgJgW034fQWR16u7VsNOK/EljA00eyjYF5mWNxWKWhHRQ=="],
|
||||
|
||||
"@octokit/auth-oauth-app": ["@octokit/auth-oauth-app@7.1.0", "", { "dependencies": { "@octokit/auth-oauth-device": "^6.1.0", "@octokit/auth-oauth-user": "^4.1.0", "@octokit/request": "^8.3.1", "@octokit/types": "^13.0.0", "@types/btoa-lite": "^1.0.0", "btoa-lite": "^1.0.0", "universal-user-agent": "^6.0.0" } }, "sha512-w+SyJN/b0l/HEb4EOPRudo7uUOSW51jcK1jwLa+4r7PA8FPFpoxEnHBHMITqCsc/3Vo2qqFjgQfz/xUUvsSQnA=="],
|
||||
|
||||
"@octokit/auth-oauth-device": ["@octokit/auth-oauth-device@6.1.0", "", { "dependencies": { "@octokit/oauth-methods": "^4.1.0", "@octokit/request": "^8.3.1", "@octokit/types": "^13.0.0", "universal-user-agent": "^6.0.0" } }, "sha512-FNQ7cb8kASufd6Ej4gnJ3f1QB5vJitkoV1O0/g6e6lUsQ7+VsSNRHRmFScN2tV4IgKA12frrr/cegUs0t+0/Lw=="],
|
||||
|
||||
"@octokit/auth-oauth-user": ["@octokit/auth-oauth-user@4.1.0", "", { "dependencies": { "@octokit/auth-oauth-device": "^6.1.0", "@octokit/oauth-methods": "^4.1.0", "@octokit/request": "^8.3.1", "@octokit/types": "^13.0.0", "btoa-lite": "^1.0.0", "universal-user-agent": "^6.0.0" } }, "sha512-FrEp8mtFuS/BrJyjpur+4GARteUCrPeR/tZJzD8YourzoVhRics7u7we/aDcKv+yywRNwNi/P4fRi631rG/OyQ=="],
|
||||
|
||||
"@octokit/auth-token": ["@octokit/auth-token@4.0.0", "", {}, "sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA=="],
|
||||
|
||||
"@octokit/auth-unauthenticated": ["@octokit/auth-unauthenticated@5.0.1", "", { "dependencies": { "@octokit/request-error": "^5.0.0", "@octokit/types": "^12.0.0" } }, "sha512-oxeWzmBFxWd+XolxKTc4zr+h3mt+yofn4r7OfoIkR/Cj/o70eEGmPsFbueyJE2iBAGpjgTnEOKM3pnuEGVmiqg=="],
|
||||
|
||||
"@octokit/core": ["@octokit/core@5.2.2", "", { "dependencies": { "@octokit/auth-token": "^4.0.0", "@octokit/graphql": "^7.1.0", "@octokit/request": "^8.4.1", "@octokit/request-error": "^5.1.1", "@octokit/types": "^13.0.0", "before-after-hook": "^2.2.0", "universal-user-agent": "^6.0.0" } }, "sha512-/g2d4sW9nUDJOMz3mabVQvOGhVa4e/BN/Um7yca9Bb2XTzPPnfTWHWQg+IsEYO7M3Vx+EXvaM/I2pJWIMun1bg=="],
|
||||
|
||||
"@octokit/endpoint": ["@octokit/endpoint@9.0.6", "", { "dependencies": { "@octokit/types": "^13.1.0", "universal-user-agent": "^6.0.0" } }, "sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw=="],
|
||||
|
||||
"@octokit/graphql": ["@octokit/graphql@7.1.1", "", { "dependencies": { "@octokit/request": "^8.4.1", "@octokit/types": "^13.0.0", "universal-user-agent": "^6.0.0" } }, "sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g=="],
|
||||
|
||||
"@octokit/oauth-app": ["@octokit/oauth-app@6.1.0", "", { "dependencies": { "@octokit/auth-oauth-app": "^7.0.0", "@octokit/auth-oauth-user": "^4.0.0", "@octokit/auth-unauthenticated": "^5.0.0", "@octokit/core": "^5.0.0", "@octokit/oauth-authorization-url": "^6.0.2", "@octokit/oauth-methods": "^4.0.0", "@types/aws-lambda": "^8.10.83", "universal-user-agent": "^6.0.0" } }, "sha512-nIn/8eUJ/BKUVzxUXd5vpzl1rwaVxMyYbQkNZjHrF7Vk/yu98/YDF/N2KeWO7uZ0g3b5EyiFXFkZI8rJ+DH1/g=="],
|
||||
|
||||
"@octokit/oauth-authorization-url": ["@octokit/oauth-authorization-url@6.0.2", "", {}, "sha512-CdoJukjXXxqLNK4y/VOiVzQVjibqoj/xHgInekviUJV73y/BSIcwvJ/4aNHPBPKcPWFnd4/lO9uqRV65jXhcLA=="],
|
||||
|
||||
"@octokit/oauth-methods": ["@octokit/oauth-methods@4.1.0", "", { "dependencies": { "@octokit/oauth-authorization-url": "^6.0.2", "@octokit/request": "^8.3.1", "@octokit/request-error": "^5.1.0", "@octokit/types": "^13.0.0", "btoa-lite": "^1.0.0" } }, "sha512-4tuKnCRecJ6CG6gr0XcEXdZtkTDbfbnD5oaHBmLERTjTMZNi2CbfEHZxPU41xXLDG4DfKf+sonu00zvKI9NSbw=="],
|
||||
|
||||
"@octokit/openapi-types": ["@octokit/openapi-types@24.2.0", "", {}, "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg=="],
|
||||
|
||||
"@octokit/plugin-paginate-graphql": ["@octokit/plugin-paginate-graphql@4.0.1", "", { "peerDependencies": { "@octokit/core": ">=5" } }, "sha512-R8ZQNmrIKKpHWC6V2gum4x9LG2qF1RxRjo27gjQcG3j+vf2tLsEfE7I/wRWEPzYMaenr1M+qDAtNcwZve1ce1A=="],
|
||||
|
||||
"@octokit/plugin-paginate-rest": ["@octokit/plugin-paginate-rest@11.4.4-cjs.2", "", { "dependencies": { "@octokit/types": "^13.7.0" }, "peerDependencies": { "@octokit/core": "5" } }, "sha512-2dK6z8fhs8lla5PaOTgqfCGBxgAv/le+EhPs27KklPhm1bKObpu6lXzwfUEQ16ajXzqNrKMujsFyo9K2eaoISw=="],
|
||||
|
||||
"@octokit/plugin-rest-endpoint-methods": ["@octokit/plugin-rest-endpoint-methods@13.3.2-cjs.1", "", { "dependencies": { "@octokit/types": "^13.8.0" }, "peerDependencies": { "@octokit/core": "^5" } }, "sha512-VUjIjOOvF2oELQmiFpWA1aOPdawpyaCUqcEBc/UOUnj3Xp6DJGrJ1+bjUIIDzdHjnFNO6q57ODMfdEZnoBkCwQ=="],
|
||||
|
||||
"@octokit/plugin-retry": ["@octokit/plugin-retry@6.1.0", "", { "dependencies": { "@octokit/request-error": "^5.0.0", "@octokit/types": "^13.0.0", "bottleneck": "^2.15.3" }, "peerDependencies": { "@octokit/core": "5" } }, "sha512-WrO3bvq4E1Xh1r2mT9w6SDFg01gFmP81nIG77+p/MqW1JeXXgL++6umim3t6x0Zj5pZm3rXAN+0HEjmmdhIRig=="],
|
||||
|
||||
"@octokit/plugin-throttling": ["@octokit/plugin-throttling@8.2.0", "", { "dependencies": { "@octokit/types": "^12.2.0", "bottleneck": "^2.15.3" }, "peerDependencies": { "@octokit/core": "^5.0.0" } }, "sha512-nOpWtLayKFpgqmgD0y3GqXafMFuKcA4tRPZIfu7BArd2lEZeb1988nhWhwx4aZWmjDmUfdgVf7W+Tt4AmvRmMQ=="],
|
||||
|
||||
"@octokit/request": ["@octokit/request@8.4.1", "", { "dependencies": { "@octokit/endpoint": "^9.0.6", "@octokit/request-error": "^5.1.1", "@octokit/types": "^13.1.0", "universal-user-agent": "^6.0.0" } }, "sha512-qnB2+SY3hkCmBxZsR/MPCybNmbJe4KAlfWErXq+rBKkQJlbjdJeS85VI9r8UqeLYLvnAenU8Q1okM/0MBsAGXw=="],
|
||||
|
||||
"@octokit/request-error": ["@octokit/request-error@5.1.1", "", { "dependencies": { "@octokit/types": "^13.1.0", "deprecation": "^2.0.0", "once": "^1.4.0" } }, "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g=="],
|
||||
|
||||
"@octokit/types": ["@octokit/types@13.10.0", "", { "dependencies": { "@octokit/openapi-types": "^24.2.0" } }, "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA=="],
|
||||
|
||||
"@octokit/webhooks": ["@octokit/webhooks@12.3.2", "", { "dependencies": { "@octokit/request-error": "^5.0.0", "@octokit/webhooks-methods": "^4.1.0", "@octokit/webhooks-types": "7.6.1", "aggregate-error": "^3.1.0" } }, "sha512-exj1MzVXoP7xnAcAB3jZ97pTvVPkQF9y6GA/dvYC47HV7vLv+24XRS6b/v/XnyikpEuvMhugEXdGtAlU086WkQ=="],
|
||||
|
||||
"@octokit/webhooks-methods": ["@octokit/webhooks-methods@5.1.1", "", {}, "sha512-NGlEHZDseJTCj8TMMFehzwa9g7On4KJMPVHDSrHxCQumL6uSQR8wIkP/qesv52fXqV1BPf4pTxwtS31ldAt9Xg=="],
|
||||
|
||||
"@octokit/webhooks-types": ["@octokit/webhooks-types@7.6.1", "", {}, "sha512-S8u2cJzklBC0FgTwWVLaM8tMrDuDMVE4xiTK4EYXM9GntyvrdbSoxqDQa+Fh57CCNApyIpyeqPhhFEmHPfrXgw=="],
|
||||
|
||||
"@sentry/types": ["@sentry/types@7.120.3", "", {}, "sha512-C4z+3kGWNFJ303FC+FxAd4KkHvxpNFYAFN8iMIgBwJdpIl25KZ8Q/VdGn0MLLUEHNLvjob0+wvwlcRBBNLXOow=="],
|
||||
|
||||
"@types/aws-lambda": ["@types/aws-lambda@8.10.152", "", {}, "sha512-soT/c2gYBnT5ygwiHPmd9a1bftj462NWVk2tKCc1PYHSIacB2UwbTS2zYG4jzag1mRDuzg/OjtxQjQ2NKRB6Rw=="],
|
||||
|
||||
"@types/btoa-lite": ["@types/btoa-lite@1.0.2", "", {}, "sha512-ZYbcE2x7yrvNFJiU7xJGrpF/ihpkM7zKgw8bha3LNJSesvTtUNxbpzaT7WXBIryf6jovisrxTBvymxMeLLj1Mg=="],
|
||||
|
||||
"@types/bun": ["@types/bun@workspace:packages/@types/bun"],
|
||||
|
||||
"@types/jsonwebtoken": ["@types/jsonwebtoken@9.0.10", "", { "dependencies": { "@types/ms": "*", "@types/node": "*" } }, "sha512-asx5hIG9Qmf/1oStypjanR7iKTv0gXQ1Ov/jfrX6kS/EO0OFni8orbmGCn0672NHR3kXHwpAwR+B368ZGN/2rA=="],
|
||||
|
||||
"@types/ms": ["@types/ms@2.1.0", "", {}, "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA=="],
|
||||
|
||||
"@types/node": ["@types/node@24.1.0", "", { "dependencies": { "undici-types": "~7.8.0" } }, "sha512-ut5FthK5moxFKH2T1CUOC6ctR67rQRvvHdFLCD2Ql6KXmMuCrjsSsRI9UsLCm9M18BMwClv4pn327UvB7eeO1w=="],
|
||||
"@types/node": ["@types/node@22.15.18", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-v1DKRfUdyW+jJhZNEI1PYy29S2YRxMV5AOO/x/SjKmW0acCIOqmbj6Haf9eHAhsPmrhlHSxEhv/1WszcLWV4cg=="],
|
||||
|
||||
"@types/react": ["@types/react@19.1.8", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-AwAfQ2Wa5bCx9WP8nZL2uMZWod7J7/JSplxbTmBQ5ms6QpqNYm672H0Vu9ZVKVngQ+ii4R/byguVEUZQyeg44g=="],
|
||||
|
||||
"aggregate-error": ["aggregate-error@3.1.0", "", { "dependencies": { "clean-stack": "^2.0.0", "indent-string": "^4.0.0" } }, "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA=="],
|
||||
|
||||
"before-after-hook": ["before-after-hook@2.2.3", "", {}, "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ=="],
|
||||
|
||||
"bottleneck": ["bottleneck@2.19.5", "", {}, "sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw=="],
|
||||
|
||||
"btoa-lite": ["btoa-lite@1.0.0", "", {}, "sha512-gvW7InbIyF8AicrqWoptdW08pUxuhq8BEgowNajy9RhiE86fmGAGl+bLKo6oB8QP0CkqHLowfN0oJdKC/J6LbA=="],
|
||||
|
||||
"buffer-equal-constant-time": ["buffer-equal-constant-time@1.0.1", "", {}, "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA=="],
|
||||
|
||||
"bun-tracestrings": ["bun-tracestrings@github:oven-sh/bun.report#912ca63", { "dependencies": { "@octokit/webhooks-methods": "^5.1.0", "@sentry/types": "^7.112.2", "@types/bun": "^1.2.6", "html-minifier": "^4.0.0", "lightningcss": "^1.24.1", "marked": "^12.0.1", "octokit": "^3.2.0", "prettier": "^3.2.5", "typescript": "^5.0.0" }, "bin": { "ci-remap-server": "./bin/ci-remap-server.ts" } }, "oven-sh-bun.report-912ca63"],
|
||||
|
||||
"bun-types": ["bun-types@workspace:packages/bun-types"],
|
||||
|
||||
"camel-case": ["camel-case@3.0.0", "", { "dependencies": { "no-case": "^2.2.0", "upper-case": "^1.1.1" } }, "sha512-+MbKztAYHXPr1jNTSKQF52VpcFjwY5RkR7fxksV8Doo4KAYc5Fl4UJRgthBbTmEx8C54DqahhbLJkDwjI3PI/w=="],
|
||||
"camel-case": ["camel-case@4.1.2", "", { "dependencies": { "pascal-case": "^3.1.2", "tslib": "^2.0.3" } }, "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw=="],
|
||||
|
||||
"capital-case": ["capital-case@1.0.4", "", { "dependencies": { "no-case": "^3.0.4", "tslib": "^2.0.3", "upper-case-first": "^2.0.2" } }, "sha512-ds37W8CytHgwnhGGTi88pcPyR15qoNkOpYwmMMfnWqqWgESapLqvDx6huFjQ5vqWSn2Z06173XNA7LtMOeUh1A=="],
|
||||
|
||||
"change-case": ["change-case@4.1.2", "", { "dependencies": { "camel-case": "^4.1.2", "capital-case": "^1.0.4", "constant-case": "^3.0.4", "dot-case": "^3.0.4", "header-case": "^2.0.4", "no-case": "^3.0.4", "param-case": "^3.0.4", "pascal-case": "^3.1.2", "path-case": "^3.0.4", "sentence-case": "^3.0.4", "snake-case": "^3.0.4", "tslib": "^2.0.3" } }, "sha512-bSxY2ws9OtviILG1EiY5K7NNxkqg/JnRnFxLtKQ96JaviiIxi7djMrSd0ECT9AC+lttClmYwKw53BWpOMblo7A=="],
|
||||
|
||||
"clean-css": ["clean-css@4.2.4", "", { "dependencies": { "source-map": "~0.6.0" } }, "sha512-EJUDT7nDVFDvaQgAo2G/PJvxmp1o/c6iXLbswsBbUFXi1Nr+AjA2cKmfbKDMjMvzEe75g3P6JkaDDAKk96A85A=="],
|
||||
|
||||
"clean-stack": ["clean-stack@2.2.0", "", {}, "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A=="],
|
||||
|
||||
"commander": ["commander@2.20.3", "", {}, "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ=="],
|
||||
|
||||
"constant-case": ["constant-case@3.0.4", "", { "dependencies": { "no-case": "^3.0.4", "tslib": "^2.0.3", "upper-case": "^2.0.2" } }, "sha512-I2hSBi7Vvs7BEuJDr5dDHfzb/Ruj3FyvFyh7KLilAjNQw3Be+xgqUBA2W6scVEcL0hL1dwPRtIqEPVUCKkSsyQ=="],
|
||||
|
||||
"csstype": ["csstype@3.1.3", "", {}, "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw=="],
|
||||
|
||||
"deprecation": ["deprecation@2.3.1", "", {}, "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ=="],
|
||||
|
||||
"detect-libc": ["detect-libc@2.0.4", "", {}, "sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA=="],
|
||||
|
||||
"dot-case": ["dot-case@3.0.4", "", { "dependencies": { "no-case": "^3.0.4", "tslib": "^2.0.3" } }, "sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w=="],
|
||||
|
||||
"ecdsa-sig-formatter": ["ecdsa-sig-formatter@1.0.11", "", { "dependencies": { "safe-buffer": "^5.0.1" } }, "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ=="],
|
||||
|
||||
"esbuild": ["esbuild@0.21.5", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.21.5", "@esbuild/android-arm": "0.21.5", "@esbuild/android-arm64": "0.21.5", "@esbuild/android-x64": "0.21.5", "@esbuild/darwin-arm64": "0.21.5", "@esbuild/darwin-x64": "0.21.5", "@esbuild/freebsd-arm64": "0.21.5", "@esbuild/freebsd-x64": "0.21.5", "@esbuild/linux-arm": "0.21.5", "@esbuild/linux-arm64": "0.21.5", "@esbuild/linux-ia32": "0.21.5", "@esbuild/linux-loong64": "0.21.5", "@esbuild/linux-mips64el": "0.21.5", "@esbuild/linux-ppc64": "0.21.5", "@esbuild/linux-riscv64": "0.21.5", "@esbuild/linux-s390x": "0.21.5", "@esbuild/linux-x64": "0.21.5", "@esbuild/netbsd-x64": "0.21.5", "@esbuild/openbsd-x64": "0.21.5", "@esbuild/sunos-x64": "0.21.5", "@esbuild/win32-arm64": "0.21.5", "@esbuild/win32-ia32": "0.21.5", "@esbuild/win32-x64": "0.21.5" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw=="],
|
||||
|
||||
"he": ["he@1.2.0", "", { "bin": { "he": "bin/he" } }, "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw=="],
|
||||
|
||||
"header-case": ["header-case@2.0.4", "", { "dependencies": { "capital-case": "^1.0.4", "tslib": "^2.0.3" } }, "sha512-H/vuk5TEEVZwrR0lp2zed9OCo1uAILMlx0JEMgC26rzyJJ3N1v6XkwHHXJQdR2doSjcGPM6OKPYoJgf0plJ11Q=="],
|
||||
|
||||
"html-minifier": ["html-minifier@4.0.0", "", { "dependencies": { "camel-case": "^3.0.0", "clean-css": "^4.2.1", "commander": "^2.19.0", "he": "^1.2.0", "param-case": "^2.1.1", "relateurl": "^0.2.7", "uglify-js": "^3.5.1" }, "bin": { "html-minifier": "./cli.js" } }, "sha512-aoGxanpFPLg7MkIl/DDFYtb0iWz7jMFGqFhvEDZga6/4QTjneiD8I/NXL1x5aaoCp7FSIT6h/OhykDdPsbtMig=="],
|
||||
|
||||
"indent-string": ["indent-string@4.0.0", "", {}, "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg=="],
|
||||
|
||||
"js-tokens": ["js-tokens@4.0.0", "", {}, "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="],
|
||||
|
||||
"jsonwebtoken": ["jsonwebtoken@9.0.2", "", { "dependencies": { "jws": "^3.2.2", "lodash.includes": "^4.3.0", "lodash.isboolean": "^3.0.3", "lodash.isinteger": "^4.0.4", "lodash.isnumber": "^3.0.3", "lodash.isplainobject": "^4.0.6", "lodash.isstring": "^4.0.1", "lodash.once": "^4.0.0", "ms": "^2.1.1", "semver": "^7.5.4" } }, "sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ=="],
|
||||
|
||||
"jwa": ["jwa@1.4.2", "", { "dependencies": { "buffer-equal-constant-time": "^1.0.1", "ecdsa-sig-formatter": "1.0.11", "safe-buffer": "^5.0.1" } }, "sha512-eeH5JO+21J78qMvTIDdBXidBd6nG2kZjg5Ohz/1fpa28Z4CcsWUzJ1ZZyFq/3z3N17aZy+ZuBoHljASbL1WfOw=="],
|
||||
|
||||
"jws": ["jws@3.2.2", "", { "dependencies": { "jwa": "^1.4.1", "safe-buffer": "^5.0.1" } }, "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA=="],
|
||||
|
||||
"lightningcss": ["lightningcss@1.30.1", "", { "dependencies": { "detect-libc": "^2.0.3" }, "optionalDependencies": { "lightningcss-darwin-arm64": "1.30.1", "lightningcss-darwin-x64": "1.30.1", "lightningcss-freebsd-x64": "1.30.1", "lightningcss-linux-arm-gnueabihf": "1.30.1", "lightningcss-linux-arm64-gnu": "1.30.1", "lightningcss-linux-arm64-musl": "1.30.1", "lightningcss-linux-x64-gnu": "1.30.1", "lightningcss-linux-x64-musl": "1.30.1", "lightningcss-win32-arm64-msvc": "1.30.1", "lightningcss-win32-x64-msvc": "1.30.1" } }, "sha512-xi6IyHML+c9+Q3W0S4fCQJOym42pyurFiJUHEcEyHS0CeKzia4yZDEsLlqOFykxOdHpNy0NmvVO31vcSqAxJCg=="],
|
||||
|
||||
"lightningcss-darwin-arm64": ["lightningcss-darwin-arm64@1.30.1", "", { "os": "darwin", "cpu": "arm64" }, "sha512-c8JK7hyE65X1MHMN+Viq9n11RRC7hgin3HhYKhrMyaXflk5GVplZ60IxyoVtzILeKr+xAJwg6zK6sjTBJ0FKYQ=="],
|
||||
|
||||
"lightningcss-darwin-x64": ["lightningcss-darwin-x64@1.30.1", "", { "os": "darwin", "cpu": "x64" }, "sha512-k1EvjakfumAQoTfcXUcHQZhSpLlkAuEkdMBsI/ivWw9hL+7FtilQc0Cy3hrx0AAQrVtQAbMI7YjCgYgvn37PzA=="],
|
||||
|
||||
"lightningcss-freebsd-x64": ["lightningcss-freebsd-x64@1.30.1", "", { "os": "freebsd", "cpu": "x64" }, "sha512-kmW6UGCGg2PcyUE59K5r0kWfKPAVy4SltVeut+umLCFoJ53RdCUWxcRDzO1eTaxf/7Q2H7LTquFHPL5R+Gjyig=="],
|
||||
|
||||
"lightningcss-linux-arm-gnueabihf": ["lightningcss-linux-arm-gnueabihf@1.30.1", "", { "os": "linux", "cpu": "arm" }, "sha512-MjxUShl1v8pit+6D/zSPq9S9dQ2NPFSQwGvxBCYaBYLPlCWuPh9/t1MRS8iUaR8i+a6w7aps+B4N0S1TYP/R+Q=="],
|
||||
|
||||
"lightningcss-linux-arm64-gnu": ["lightningcss-linux-arm64-gnu@1.30.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-gB72maP8rmrKsnKYy8XUuXi/4OctJiuQjcuqWNlJQ6jZiWqtPvqFziskH3hnajfvKB27ynbVCucKSm2rkQp4Bw=="],
|
||||
|
||||
"lightningcss-linux-arm64-musl": ["lightningcss-linux-arm64-musl@1.30.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-jmUQVx4331m6LIX+0wUhBbmMX7TCfjF5FoOH6SD1CttzuYlGNVpA7QnrmLxrsub43ClTINfGSYyHe2HWeLl5CQ=="],
|
||||
|
||||
"lightningcss-linux-x64-gnu": ["lightningcss-linux-x64-gnu@1.30.1", "", { "os": "linux", "cpu": "x64" }, "sha512-piWx3z4wN8J8z3+O5kO74+yr6ze/dKmPnI7vLqfSqI8bccaTGY5xiSGVIJBDd5K5BHlvVLpUB3S2YCfelyJ1bw=="],
|
||||
|
||||
"lightningcss-linux-x64-musl": ["lightningcss-linux-x64-musl@1.30.1", "", { "os": "linux", "cpu": "x64" }, "sha512-rRomAK7eIkL+tHY0YPxbc5Dra2gXlI63HL+v1Pdi1a3sC+tJTcFrHX+E86sulgAXeI7rSzDYhPSeHHjqFhqfeQ=="],
|
||||
|
||||
"lightningcss-win32-arm64-msvc": ["lightningcss-win32-arm64-msvc@1.30.1", "", { "os": "win32", "cpu": "arm64" }, "sha512-mSL4rqPi4iXq5YVqzSsJgMVFENoa4nGTT/GjO2c0Yl9OuQfPsIfncvLrEW6RbbB24WtZ3xP/2CCmI3tNkNV4oA=="],
|
||||
|
||||
"lightningcss-win32-x64-msvc": ["lightningcss-win32-x64-msvc@1.30.1", "", { "os": "win32", "cpu": "x64" }, "sha512-PVqXh48wh4T53F/1CCu8PIPCxLzWyCnn/9T5W1Jpmdy5h9Cwd+0YQS6/LwhHXSafuc61/xg9Lv5OrCby6a++jg=="],
|
||||
|
||||
"lodash.includes": ["lodash.includes@4.3.0", "", {}, "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w=="],
|
||||
|
||||
"lodash.isboolean": ["lodash.isboolean@3.0.3", "", {}, "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg=="],
|
||||
|
||||
"lodash.isinteger": ["lodash.isinteger@4.0.4", "", {}, "sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA=="],
|
||||
|
||||
"lodash.isnumber": ["lodash.isnumber@3.0.3", "", {}, "sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw=="],
|
||||
|
||||
"lodash.isplainobject": ["lodash.isplainobject@4.0.6", "", {}, "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA=="],
|
||||
|
||||
"lodash.isstring": ["lodash.isstring@4.0.1", "", {}, "sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw=="],
|
||||
|
||||
"lodash.once": ["lodash.once@4.1.1", "", {}, "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg=="],
|
||||
|
||||
"loose-envify": ["loose-envify@1.4.0", "", { "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" }, "bin": { "loose-envify": "cli.js" } }, "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q=="],
|
||||
|
||||
"lower-case": ["lower-case@2.0.2", "", { "dependencies": { "tslib": "^2.0.3" } }, "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg=="],
|
||||
|
||||
"lru-cache": ["@wolfy1339/lru-cache@11.0.2-patch.1", "", {}, "sha512-BgYZfL2ADCXKOw2wJtkM3slhHotawWkgIRRxq4wEybnZQPjvAp71SPX35xepMykTw8gXlzWcWPTY31hlbnRsDA=="],
|
||||
|
||||
"marked": ["marked@12.0.2", "", { "bin": { "marked": "bin/marked.js" } }, "sha512-qXUm7e/YKFoqFPYPa3Ukg9xlI5cyAtGmyEIzMfW//m6kXwCy2Ps9DYf5ioijFKQ8qyuscrHoY04iJGctu2Kg0Q=="],
|
||||
|
||||
"mitata": ["mitata@0.1.14", "", {}, "sha512-8kRs0l636eT4jj68PFXOR2D5xl4m56T478g16SzUPOYgkzQU+xaw62guAQxzBPm+SXb15GQi1cCpDxJfkr4CSA=="],
|
||||
|
||||
"ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
|
||||
|
||||
"no-case": ["no-case@3.0.4", "", { "dependencies": { "lower-case": "^2.0.2", "tslib": "^2.0.3" } }, "sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg=="],
|
||||
|
||||
"octokit": ["octokit@3.2.2", "", { "dependencies": { "@octokit/app": "^14.0.2", "@octokit/core": "^5.0.0", "@octokit/oauth-app": "^6.0.0", "@octokit/plugin-paginate-graphql": "^4.0.0", "@octokit/plugin-paginate-rest": "11.4.4-cjs.2", "@octokit/plugin-rest-endpoint-methods": "13.3.2-cjs.1", "@octokit/plugin-retry": "^6.0.0", "@octokit/plugin-throttling": "^8.0.0", "@octokit/request-error": "^5.0.0", "@octokit/types": "^13.0.0", "@octokit/webhooks": "^12.3.1" } }, "sha512-7Abo3nADdja8l/aglU6Y3lpnHSfv0tw7gFPiqzry/yCU+2gTAX7R1roJ8hJrxIK+S1j+7iqRJXtmuHJ/UDsBhQ=="],
|
||||
|
||||
"once": ["once@1.4.0", "", { "dependencies": { "wrappy": "1" } }, "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w=="],
|
||||
|
||||
"param-case": ["param-case@2.1.1", "", { "dependencies": { "no-case": "^2.2.0" } }, "sha512-eQE845L6ot89sk2N8liD8HAuH4ca6Vvr7VWAWwt7+kvvG5aBcPmmphQ68JsEG2qa9n1TykS2DLeMt363AAH8/w=="],
|
||||
"param-case": ["param-case@3.0.4", "", { "dependencies": { "dot-case": "^3.0.4", "tslib": "^2.0.3" } }, "sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A=="],
|
||||
|
||||
"pascal-case": ["pascal-case@3.1.2", "", { "dependencies": { "no-case": "^3.0.4", "tslib": "^2.0.3" } }, "sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g=="],
|
||||
|
||||
@@ -281,78 +129,30 @@
|
||||
|
||||
"peechy": ["peechy@0.4.34", "", { "dependencies": { "change-case": "^4.1.2" }, "bin": { "peechy": "cli.js" } }, "sha512-Cpke/cCqqZHhkyxz7mdqS8ZAGJFUi5icu3ZGqxm9GC7g2VrhH0tmjPhZoWHAN5ghw1m1wq5+2YvfbDSqgC4+Zg=="],
|
||||
|
||||
"prettier": ["prettier@3.6.2", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ=="],
|
||||
"prettier": ["prettier@3.5.3", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw=="],
|
||||
|
||||
"prettier-plugin-organize-imports": ["prettier-plugin-organize-imports@4.2.0", "", { "peerDependencies": { "prettier": ">=2.0", "typescript": ">=2.9", "vue-tsc": "^2.1.0 || 3" }, "optionalPeers": ["vue-tsc"] }, "sha512-Zdy27UhlmyvATZi67BTnLcKTo8fm6Oik59Sz6H64PgZJVs6NJpPD1mT240mmJn62c98/QaL+r3kx9Q3gRpDajg=="],
|
||||
"prettier-plugin-organize-imports": ["prettier-plugin-organize-imports@4.1.0", "", { "peerDependencies": { "prettier": ">=2.0", "typescript": ">=2.9", "vue-tsc": "^2.1.0" }, "optionalPeers": ["vue-tsc"] }, "sha512-5aWRdCgv645xaa58X8lOxzZoiHAldAPChljr/MT0crXVOWTZ+Svl4hIWlz+niYSlO6ikE5UXkN1JrRvIP2ut0A=="],
|
||||
|
||||
"react": ["react@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ=="],
|
||||
|
||||
"react-dom": ["react-dom@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0", "scheduler": "^0.23.2" }, "peerDependencies": { "react": "^18.3.1" } }, "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw=="],
|
||||
|
||||
"relateurl": ["relateurl@0.2.7", "", {}, "sha512-G08Dxvm4iDN3MLM0EsP62EDV9IuhXPR6blNz6Utcp7zyV3tr4HVNINt6MpaRWbxoOHT3Q7YN2P+jaHX8vUbgog=="],
|
||||
|
||||
"safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="],
|
||||
|
||||
"scheduler": ["scheduler@0.23.2", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ=="],
|
||||
|
||||
"semver": ["semver@7.7.2", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA=="],
|
||||
|
||||
"sentence-case": ["sentence-case@3.0.4", "", { "dependencies": { "no-case": "^3.0.4", "tslib": "^2.0.3", "upper-case-first": "^2.0.2" } }, "sha512-8LS0JInaQMCRoQ7YUytAo/xUu5W2XnQxV2HI/6uM6U7CITS1RqPElr30V6uIqyMKM9lJGRVFy5/4CuzcixNYSg=="],
|
||||
|
||||
"snake-case": ["snake-case@3.0.4", "", { "dependencies": { "dot-case": "^3.0.4", "tslib": "^2.0.3" } }, "sha512-LAOh4z89bGQvl9pFfNF8V146i7o7/CqFPbqzYgP+yYzDIDeS9HaNFtXABamRW+AQzEVODcvE79ljJ+8a9YSdMg=="],
|
||||
|
||||
"source-map": ["source-map@0.6.1", "", {}, "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="],
|
||||
|
||||
"source-map-js": ["source-map-js@1.2.1", "", {}, "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA=="],
|
||||
|
||||
"tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="],
|
||||
|
||||
"typescript": ["typescript@5.9.2", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A=="],
|
||||
"typescript": ["typescript@5.8.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ=="],
|
||||
|
||||
"uglify-js": ["uglify-js@3.19.3", "", { "bin": { "uglifyjs": "bin/uglifyjs" } }, "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ=="],
|
||||
"undici-types": ["undici-types@6.21.0", "", {}, "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ=="],
|
||||
|
||||
"undici-types": ["undici-types@7.8.0", "", {}, "sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw=="],
|
||||
|
||||
"universal-github-app-jwt": ["universal-github-app-jwt@1.2.0", "", { "dependencies": { "@types/jsonwebtoken": "^9.0.0", "jsonwebtoken": "^9.0.2" } }, "sha512-dncpMpnsKBk0eetwfN8D8OUHGfiDhhJ+mtsbMl+7PfW7mYjiH8LIcqRmYMtzYLgSh47HjfdBtrBwIQ/gizKR3g=="],
|
||||
|
||||
"universal-user-agent": ["universal-user-agent@6.0.1", "", {}, "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ=="],
|
||||
|
||||
"upper-case": ["upper-case@1.1.3", "", {}, "sha512-WRbjgmYzgXkCV7zNVpy5YgrHgbBv126rMALQQMrmzOVC4GM2waQ9x7xtm8VU+1yF2kWyPzI9zbZ48n4vSxwfSA=="],
|
||||
"upper-case": ["upper-case@2.0.2", "", { "dependencies": { "tslib": "^2.0.3" } }, "sha512-KgdgDGJt2TpuwBUIjgG6lzw2GWFRCW9Qkfkiv0DxqHHLYJHmtmdUIKcZd8rHgFSjopVTlw6ggzCm1b8MFQwikg=="],
|
||||
|
||||
"upper-case-first": ["upper-case-first@2.0.2", "", { "dependencies": { "tslib": "^2.0.3" } }, "sha512-514ppYHBaKwfJRK/pNC6c/OxfGa0obSnAl106u97Ed0I625Nin96KAjttZF6ZL3e1XLtphxnqrOi9iWgm+u+bg=="],
|
||||
|
||||
"wrappy": ["wrappy@1.0.2", "", {}, "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="],
|
||||
|
||||
"@octokit/app/@octokit/plugin-paginate-rest": ["@octokit/plugin-paginate-rest@9.2.2", "", { "dependencies": { "@octokit/types": "^12.6.0" }, "peerDependencies": { "@octokit/core": "5" } }, "sha512-u3KYkGF7GcZnSD/3UP0S7K5XUFT2FkOQdcfXZGZQPGv3lm4F2Xbf71lvjldr8c1H3nNbF+33cLEkWYbokGWqiQ=="],
|
||||
|
||||
"@octokit/app/@octokit/types": ["@octokit/types@12.6.0", "", { "dependencies": { "@octokit/openapi-types": "^20.0.0" } }, "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw=="],
|
||||
|
||||
"@octokit/auth-unauthenticated/@octokit/types": ["@octokit/types@12.6.0", "", { "dependencies": { "@octokit/openapi-types": "^20.0.0" } }, "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw=="],
|
||||
|
||||
"@octokit/plugin-throttling/@octokit/types": ["@octokit/types@12.6.0", "", { "dependencies": { "@octokit/openapi-types": "^20.0.0" } }, "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw=="],
|
||||
|
||||
"@octokit/webhooks/@octokit/webhooks-methods": ["@octokit/webhooks-methods@4.1.0", "", {}, "sha512-zoQyKw8h9STNPqtm28UGOYFE7O6D4Il8VJwhAtMHFt2C4L0VQT1qGKLeefUOqHNs1mNRYSadVv7x0z8U2yyeWQ=="],
|
||||
|
||||
"bun-tracestrings/typescript": ["typescript@5.8.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ=="],
|
||||
|
||||
"camel-case/no-case": ["no-case@2.3.2", "", { "dependencies": { "lower-case": "^1.1.1" } }, "sha512-rmTZ9kz+f3rCvK2TD1Ue/oZlns7OGoIWP4fc3llxxRXlOkHKoWPPWJOfFYpITabSow43QJbRIoHQXtt10VldyQ=="],
|
||||
|
||||
"change-case/camel-case": ["camel-case@4.1.2", "", { "dependencies": { "pascal-case": "^3.1.2", "tslib": "^2.0.3" } }, "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw=="],
|
||||
|
||||
"change-case/param-case": ["param-case@3.0.4", "", { "dependencies": { "dot-case": "^3.0.4", "tslib": "^2.0.3" } }, "sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A=="],
|
||||
|
||||
"constant-case/upper-case": ["upper-case@2.0.2", "", { "dependencies": { "tslib": "^2.0.3" } }, "sha512-KgdgDGJt2TpuwBUIjgG6lzw2GWFRCW9Qkfkiv0DxqHHLYJHmtmdUIKcZd8rHgFSjopVTlw6ggzCm1b8MFQwikg=="],
|
||||
|
||||
"param-case/no-case": ["no-case@2.3.2", "", { "dependencies": { "lower-case": "^1.1.1" } }, "sha512-rmTZ9kz+f3rCvK2TD1Ue/oZlns7OGoIWP4fc3llxxRXlOkHKoWPPWJOfFYpITabSow43QJbRIoHQXtt10VldyQ=="],
|
||||
|
||||
"@octokit/app/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@20.0.0", "", {}, "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA=="],
|
||||
|
||||
"@octokit/auth-unauthenticated/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@20.0.0", "", {}, "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA=="],
|
||||
|
||||
"@octokit/plugin-throttling/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@20.0.0", "", {}, "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA=="],
|
||||
|
||||
"camel-case/no-case/lower-case": ["lower-case@1.1.4", "", {}, "sha512-2Fgx1Ycm599x+WGpIYwJOvsjmXFzTSc34IwDWALRA/8AopUKAVPwfJ+h5+f85BCp0PWmmJcWzEpxOpoXycMpdA=="],
|
||||
|
||||
"param-case/no-case/lower-case": ["lower-case@1.1.4", "", {}, "sha512-2Fgx1Ycm599x+WGpIYwJOvsjmXFzTSc34IwDWALRA/8AopUKAVPwfJ+h5+f85BCp0PWmmJcWzEpxOpoXycMpdA=="],
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,6 +7,3 @@
|
||||
# Instead, we can only scan the test directory for Bun's runtime tests
|
||||
root = "test"
|
||||
preload = "./test/preload.ts"
|
||||
|
||||
[install]
|
||||
linker = "isolated"
|
||||
|
||||
@@ -95,18 +95,13 @@ if(LINUX)
|
||||
optionx(ENABLE_VALGRIND BOOL "If Valgrind support should be enabled" DEFAULT OFF)
|
||||
endif()
|
||||
|
||||
if(DEBUG AND ((APPLE AND ARCH STREQUAL "aarch64") OR LINUX))
|
||||
if(DEBUG AND APPLE AND ARCH STREQUAL "aarch64")
|
||||
set(DEFAULT_ASAN ON)
|
||||
else()
|
||||
set(DEFAULT_ASAN OFF)
|
||||
endif()
|
||||
|
||||
optionx(ENABLE_ASAN BOOL "If ASAN support should be enabled" DEFAULT ${DEFAULT_ASAN})
|
||||
optionx(ENABLE_ZIG_ASAN BOOL "If Zig ASAN support should be enabled" DEFAULT ${ENABLE_ASAN})
|
||||
|
||||
if (NOT ENABLE_ASAN)
|
||||
set(ENABLE_ZIG_ASAN OFF)
|
||||
endif()
|
||||
|
||||
if(RELEASE AND LINUX AND CI AND NOT ENABLE_ASSERTIONS AND NOT ENABLE_ASAN)
|
||||
set(DEFAULT_LTO ON)
|
||||
@@ -144,10 +139,10 @@ endif()
|
||||
optionx(REVISION STRING "The git revision of the build" DEFAULT ${DEFAULT_REVISION})
|
||||
|
||||
# Used in process.version, process.versions.node, napi, and elsewhere
|
||||
setx(NODEJS_VERSION "24.3.0")
|
||||
optionx(NODEJS_VERSION STRING "The version of Node.js to report" DEFAULT "24.3.0")
|
||||
|
||||
# Used in process.versions.modules and compared while loading V8 modules
|
||||
setx(NODEJS_ABI_VERSION "137")
|
||||
optionx(NODEJS_ABI_VERSION STRING "The ABI version of Node.js to report" DEFAULT "137")
|
||||
|
||||
if(APPLE)
|
||||
set(DEFAULT_STATIC_SQLITE OFF)
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
src/bake/bake.bind.ts
|
||||
src/bake/bake.d.ts
|
||||
src/bake/bake.private.d.ts
|
||||
src/bake/bun-framework-react/index.ts
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
src/bake.bind.ts
|
||||
src/bake/bake.bind.ts
|
||||
src/bake/DevServer.bind.ts
|
||||
src/bun.js/api/BunObject.bind.ts
|
||||
src/bun.js/bindgen_test.bind.ts
|
||||
|
||||
@@ -350,7 +350,6 @@ src/bun.js/bindings/webcore/JSTextEncoderStream.cpp
|
||||
src/bun.js/bindings/webcore/JSTransformStream.cpp
|
||||
src/bun.js/bindings/webcore/JSTransformStreamDefaultController.cpp
|
||||
src/bun.js/bindings/webcore/JSURLSearchParams.cpp
|
||||
src/bun.js/bindings/webcore/JSWasmStreamingCompiler.cpp
|
||||
src/bun.js/bindings/webcore/JSWebSocket.cpp
|
||||
src/bun.js/bindings/webcore/JSWorker.cpp
|
||||
src/bun.js/bindings/webcore/JSWorkerOptions.cpp
|
||||
|
||||
@@ -8,14 +8,11 @@ src/codegen/bundle-functions.ts
|
||||
src/codegen/bundle-modules.ts
|
||||
src/codegen/class-definitions.ts
|
||||
src/codegen/client-js.ts
|
||||
src/codegen/cppbind.ts
|
||||
src/codegen/create-hash-table.ts
|
||||
src/codegen/generate-classes.ts
|
||||
src/codegen/generate-compact-string-table.ts
|
||||
src/codegen/generate-js2native.ts
|
||||
src/codegen/generate-jssink.ts
|
||||
src/codegen/generate-node-errors.ts
|
||||
src/codegen/helpers.ts
|
||||
src/codegen/internal-module-registry-scanner.ts
|
||||
src/codegen/replacements.ts
|
||||
src/codegen/shared-types.ts
|
||||
|
||||
@@ -29,7 +29,6 @@ src/js/builtins/TransformStream.ts
|
||||
src/js/builtins/TransformStreamDefaultController.ts
|
||||
src/js/builtins/TransformStreamInternals.ts
|
||||
src/js/builtins/UtilInspect.ts
|
||||
src/js/builtins/WasmStreaming.ts
|
||||
src/js/builtins/WritableStreamDefaultController.ts
|
||||
src/js/builtins/WritableStreamDefaultWriter.ts
|
||||
src/js/builtins/WritableStreamInternals.ts
|
||||
|
||||
@@ -1,17 +1,15 @@
|
||||
src/allocators.zig
|
||||
src/allocators/AllocationScope.zig
|
||||
src/allocators/basic.zig
|
||||
src/allocators/LinuxMemFdAllocator.zig
|
||||
src/allocators/MaxHeapAllocator.zig
|
||||
src/allocators/linux_memfd_allocator.zig
|
||||
src/allocators/max_heap_allocator.zig
|
||||
src/allocators/memory_allocator.zig
|
||||
src/allocators/MemoryReportingAllocator.zig
|
||||
src/allocators/mimalloc_arena.zig
|
||||
src/allocators/mimalloc.zig
|
||||
src/allocators/MimallocArena.zig
|
||||
src/allocators/NullableAllocator.zig
|
||||
src/analytics.zig
|
||||
src/analytics/schema.zig
|
||||
src/analytics/analytics_schema.zig
|
||||
src/analytics/analytics_thread.zig
|
||||
src/api/schema.zig
|
||||
src/asan.zig
|
||||
src/ast.zig
|
||||
src/ast/Ast.zig
|
||||
src/ast/ASTMemoryAllocator.zig
|
||||
src/ast/B.zig
|
||||
@@ -35,30 +33,20 @@ src/ast/UseDirective.zig
|
||||
src/async/posix_event_loop.zig
|
||||
src/async/stub_event_loop.zig
|
||||
src/async/windows_event_loop.zig
|
||||
src/bake.zig
|
||||
src/baby_list.zig
|
||||
src/bake/bake.zig
|
||||
src/bake/DevServer.zig
|
||||
src/bake/DevServer/Assets.zig
|
||||
src/bake/DevServer/DirectoryWatchStore.zig
|
||||
src/bake/DevServer/ErrorReportRequest.zig
|
||||
src/bake/DevServer/HmrSocket.zig
|
||||
src/bake/DevServer/HotReloadEvent.zig
|
||||
src/bake/DevServer/IncrementalGraph.zig
|
||||
src/bake/DevServer/memory_cost.zig
|
||||
src/bake/DevServer/PackedMap.zig
|
||||
src/bake/DevServer/RouteBundle.zig
|
||||
src/bake/DevServer/SerializedFailure.zig
|
||||
src/bake/DevServer/SourceMapStore.zig
|
||||
src/bake/DevServer/WatcherAtomics.zig
|
||||
src/bake/FrameworkRouter.zig
|
||||
src/bake/production.zig
|
||||
src/base64/base64.zig
|
||||
src/bit_set.zig
|
||||
src/bits.zig
|
||||
src/boringssl.zig
|
||||
src/brotli.zig
|
||||
src/btjs.zig
|
||||
src/bun.js.zig
|
||||
src/bun_js.zig
|
||||
src/bun.js/api.zig
|
||||
src/bun.js/api/bun/dns.zig
|
||||
src/bun.js/api/bun/dns_resolver.zig
|
||||
src/bun.js/api/bun/h2_frame_parser.zig
|
||||
src/bun.js/api/bun/lshpack.zig
|
||||
src/bun.js/api/bun/process.zig
|
||||
@@ -108,7 +96,6 @@ src/bun.js/api/Timer/EventLoopTimer.zig
|
||||
src/bun.js/api/Timer/ImmediateObject.zig
|
||||
src/bun.js/api/Timer/TimeoutObject.zig
|
||||
src/bun.js/api/Timer/TimerObjectInternals.zig
|
||||
src/bun.js/api/Timer/WTFTimer.zig
|
||||
src/bun.js/api/TOMLObject.zig
|
||||
src/bun.js/api/UnsafeObject.zig
|
||||
src/bun.js/bindgen_test.zig
|
||||
@@ -255,6 +242,7 @@ src/bun.js/test/jest.zig
|
||||
src/bun.js/test/pretty_format.zig
|
||||
src/bun.js/test/snapshot.zig
|
||||
src/bun.js/test/test.zig
|
||||
src/bun.js/unbounded_queue.zig
|
||||
src/bun.js/uuid.zig
|
||||
src/bun.js/virtual_machine_exports.zig
|
||||
src/bun.js/VirtualMachine.zig
|
||||
@@ -293,6 +281,7 @@ src/bun.js/webcore/streams.zig
|
||||
src/bun.js/webcore/TextDecoder.zig
|
||||
src/bun.js/webcore/TextEncoder.zig
|
||||
src/bun.js/webcore/TextEncoderStreamEncoder.zig
|
||||
src/bun.js/WTFTimer.zig
|
||||
src/bun.zig
|
||||
src/bundler/AstBuilder.zig
|
||||
src/bundler/bundle_v2.zig
|
||||
@@ -316,14 +305,12 @@ src/bundler/linker_context/generateCodeForLazyExport.zig
|
||||
src/bundler/linker_context/generateCompileResultForCssChunk.zig
|
||||
src/bundler/linker_context/generateCompileResultForHtmlChunk.zig
|
||||
src/bundler/linker_context/generateCompileResultForJSChunk.zig
|
||||
src/bundler/linker_context/OutputFileListBuilder.zig
|
||||
src/bundler/linker_context/postProcessCSSChunk.zig
|
||||
src/bundler/linker_context/postProcessHTMLChunk.zig
|
||||
src/bundler/linker_context/postProcessJSChunk.zig
|
||||
src/bundler/linker_context/prepareCssAstsForChunk.zig
|
||||
src/bundler/linker_context/renameSymbolsInChunk.zig
|
||||
src/bundler/linker_context/scanImportsAndExports.zig
|
||||
src/bundler/linker_context/StaticRouteVisitor.zig
|
||||
src/bundler/linker_context/writeOutputFilesToDisk.zig
|
||||
src/bundler/LinkerContext.zig
|
||||
src/bundler/LinkerGraph.zig
|
||||
@@ -356,11 +343,9 @@ src/cli/pack_command.zig
|
||||
src/cli/package_manager_command.zig
|
||||
src/cli/patch_command.zig
|
||||
src/cli/patch_commit_command.zig
|
||||
src/cli/pm_pkg_command.zig
|
||||
src/cli/pm_trusted_command.zig
|
||||
src/cli/pm_version_command.zig
|
||||
src/cli/pm_view_command.zig
|
||||
src/cli/pm_why_command.zig
|
||||
src/cli/publish_command.zig
|
||||
src/cli/remove_command.zig
|
||||
src/cli/run_command.zig
|
||||
@@ -369,15 +354,8 @@ src/cli/test_command.zig
|
||||
src/cli/test/Scanner.zig
|
||||
src/cli/unlink_command.zig
|
||||
src/cli/update_command.zig
|
||||
src/cli/update_interactive_command.zig
|
||||
src/cli/upgrade_command.zig
|
||||
src/cli/why_command.zig
|
||||
src/codegen/process_windows_translate_c.zig
|
||||
src/collections.zig
|
||||
src/collections/baby_list.zig
|
||||
src/collections/bit_set.zig
|
||||
src/collections/hive_array.zig
|
||||
src/collections/multi_array_list.zig
|
||||
src/compile_target.zig
|
||||
src/comptime_string_map.zig
|
||||
src/copy_file.zig
|
||||
@@ -526,19 +504,23 @@ src/env.zig
|
||||
src/errno/darwin_errno.zig
|
||||
src/errno/linux_errno.zig
|
||||
src/errno/windows_errno.zig
|
||||
src/exact_size_matcher.zig
|
||||
src/fd.zig
|
||||
src/feature_flags.zig
|
||||
src/fmt.zig
|
||||
src/fs.zig
|
||||
src/fs/stat_hash.zig
|
||||
src/futex.zig
|
||||
src/generated_perf_trace_events.zig
|
||||
src/generated_versions_list.zig
|
||||
src/glob.zig
|
||||
src/glob/GlobWalker.zig
|
||||
src/glob/match.zig
|
||||
src/Global.zig
|
||||
src/grapheme.zig
|
||||
src/heap_breakdown.zig
|
||||
src/highway.zig
|
||||
src/hive_array.zig
|
||||
src/hmac.zig
|
||||
src/HTMLScanner.zig
|
||||
src/http.zig
|
||||
@@ -546,7 +528,6 @@ src/http/AsyncHTTP.zig
|
||||
src/http/CertificateInfo.zig
|
||||
src/http/Decompressor.zig
|
||||
src/http/Encoding.zig
|
||||
src/http/ETag.zig
|
||||
src/http/FetchRedirect.zig
|
||||
src/http/HeaderBuilder.zig
|
||||
src/http/Headers.zig
|
||||
@@ -557,7 +538,6 @@ src/http/HTTPThread.zig
|
||||
src/http/InitError.zig
|
||||
src/http/InternalState.zig
|
||||
src/http/Method.zig
|
||||
src/http/mime_type_list_enum.zig
|
||||
src/http/MimeType.zig
|
||||
src/http/ProxyTunnel.zig
|
||||
src/http/SendFile.zig
|
||||
@@ -583,7 +563,6 @@ src/install/install_binding.zig
|
||||
src/install/install.zig
|
||||
src/install/integrity.zig
|
||||
src/install/isolated_install.zig
|
||||
src/install/isolated_install/FileCopier.zig
|
||||
src/install/isolated_install/Hardlinker.zig
|
||||
src/install/isolated_install/Installer.zig
|
||||
src/install/isolated_install/Store.zig
|
||||
@@ -634,11 +613,6 @@ src/install/resolvers/folder_resolver.zig
|
||||
src/install/versioned_url.zig
|
||||
src/install/windows-shim/BinLinkingShim.zig
|
||||
src/install/windows-shim/bun_shim_impl.zig
|
||||
src/install/yarn.zig
|
||||
src/interchange.zig
|
||||
src/interchange/json.zig
|
||||
src/interchange/toml.zig
|
||||
src/interchange/toml/lexer.zig
|
||||
src/io/heap.zig
|
||||
src/io/io.zig
|
||||
src/io/MaxBuf.zig
|
||||
@@ -647,12 +621,14 @@ src/io/PipeReader.zig
|
||||
src/io/pipes.zig
|
||||
src/io/PipeWriter.zig
|
||||
src/io/source.zig
|
||||
src/js_ast.zig
|
||||
src/js_lexer_tables.zig
|
||||
src/js_lexer.zig
|
||||
src/js_lexer/identifier.zig
|
||||
src/js_parser.zig
|
||||
src/js_printer.zig
|
||||
src/jsc_stub.zig
|
||||
src/json_parser.zig
|
||||
src/libarchive/libarchive-bindings.zig
|
||||
src/libarchive/libarchive.zig
|
||||
src/linear_fifo.zig
|
||||
@@ -664,6 +640,8 @@ src/main_test.zig
|
||||
src/main_wasm.zig
|
||||
src/main.zig
|
||||
src/meta.zig
|
||||
src/multi_array_list.zig
|
||||
src/Mutex.zig
|
||||
src/napi/napi.zig
|
||||
src/node_fallbacks.zig
|
||||
src/open.zig
|
||||
@@ -675,7 +653,6 @@ src/paths.zig
|
||||
src/paths/EnvPath.zig
|
||||
src/paths/path_buffer_pool.zig
|
||||
src/paths/Path.zig
|
||||
src/pe.zig
|
||||
src/perf.zig
|
||||
src/pool.zig
|
||||
src/Progress.zig
|
||||
@@ -705,9 +682,6 @@ src/s3/multipart_options.zig
|
||||
src/s3/multipart.zig
|
||||
src/s3/simple_request.zig
|
||||
src/s3/storage_class.zig
|
||||
src/safety.zig
|
||||
src/safety/alloc_ptr.zig
|
||||
src/safety/CriticalSection.zig
|
||||
src/semver.zig
|
||||
src/semver/ExternalString.zig
|
||||
src/semver/SemverObject.zig
|
||||
@@ -841,40 +815,30 @@ src/sql/postgres/types/PostgresString.zig
|
||||
src/sql/postgres/types/Tag.zig
|
||||
src/StandaloneModuleGraph.zig
|
||||
src/StaticHashMap.zig
|
||||
src/string_immutable.zig
|
||||
src/string_types.zig
|
||||
src/string.zig
|
||||
src/string/escapeHTML.zig
|
||||
src/string/HashedString.zig
|
||||
src/string/immutable.zig
|
||||
src/string/immutable/escapeHTML.zig
|
||||
src/string/immutable/exact_size_matcher.zig
|
||||
src/string/immutable/grapheme.zig
|
||||
src/string/immutable/paths.zig
|
||||
src/string/immutable/unicode.zig
|
||||
src/string/immutable/visible.zig
|
||||
src/string/MutableString.zig
|
||||
src/string/paths.zig
|
||||
src/string/PathString.zig
|
||||
src/string/SmolStr.zig
|
||||
src/string/StringBuilder.zig
|
||||
src/string/StringJoiner.zig
|
||||
src/string/unicode.zig
|
||||
src/string/visible.zig
|
||||
src/string/WTFStringImpl.zig
|
||||
src/sync.zig
|
||||
src/sys_uv.zig
|
||||
src/sys.zig
|
||||
src/sys/coreutils_error_map.zig
|
||||
src/sys/Error.zig
|
||||
src/sys/File.zig
|
||||
src/sys/libuv_error_map.zig
|
||||
src/system_timer.zig
|
||||
src/test/fixtures.zig
|
||||
src/test/recover.zig
|
||||
src/threading.zig
|
||||
src/threading/channel.zig
|
||||
src/threading/Condition.zig
|
||||
src/threading/Futex.zig
|
||||
src/threading/guarded_value.zig
|
||||
src/threading/Mutex.zig
|
||||
src/threading/ThreadPool.zig
|
||||
src/threading/unbounded_queue.zig
|
||||
src/threading/WaitGroup.zig
|
||||
src/thread_pool.zig
|
||||
src/tmp.zig
|
||||
src/toml/toml_lexer.zig
|
||||
src/toml/toml_parser.zig
|
||||
src/tracy.zig
|
||||
src/trait.zig
|
||||
src/transpiler.zig
|
||||
|
||||
@@ -255,10 +255,6 @@ set(BUN_ZIG_GENERATED_CLASSES_SCRIPT ${CWD}/src/codegen/generate-classes.ts)
|
||||
|
||||
absolute_sources(BUN_ZIG_GENERATED_CLASSES_SOURCES ${CWD}/cmake/sources/ZigGeneratedClassesSources.txt)
|
||||
|
||||
# hand written cpp source files. Full list of "source" code (including codegen) is in BUN_CPP_SOURCES
|
||||
absolute_sources(BUN_CXX_SOURCES ${CWD}/cmake/sources/CxxSources.txt)
|
||||
absolute_sources(BUN_C_SOURCES ${CWD}/cmake/sources/CSources.txt)
|
||||
|
||||
set(BUN_ZIG_GENERATED_CLASSES_OUTPUTS
|
||||
${CODEGEN_PATH}/ZigGeneratedClasses.h
|
||||
${CODEGEN_PATH}/ZigGeneratedClasses.cpp
|
||||
@@ -312,27 +308,6 @@ set(BUN_JAVASCRIPT_OUTPUTS
|
||||
${CWD}/src/bun.js/bindings/GeneratedJS2Native.zig
|
||||
)
|
||||
|
||||
set(BUN_CPP_OUTPUTS
|
||||
${CODEGEN_PATH}/cpp.zig
|
||||
)
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
bun-cppbind
|
||||
COMMENT
|
||||
"Generating C++ --> Zig bindings"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
${CWD}/src/codegen/cppbind.ts
|
||||
${CWD}/src
|
||||
${CODEGEN_PATH}
|
||||
SOURCES
|
||||
${BUN_JAVASCRIPT_CODEGEN_SOURCES}
|
||||
${BUN_CXX_SOURCES}
|
||||
OUTPUTS
|
||||
${BUN_CPP_OUTPUTS}
|
||||
)
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
bun-js-modules
|
||||
@@ -562,7 +537,6 @@ set(BUN_ZIG_GENERATED_SOURCES
|
||||
${BUN_ERROR_CODE_OUTPUTS}
|
||||
${BUN_ZIG_GENERATED_CLASSES_OUTPUTS}
|
||||
${BUN_JAVASCRIPT_OUTPUTS}
|
||||
${BUN_CPP_OUTPUTS}
|
||||
)
|
||||
|
||||
# In debug builds, these are not embedded, but rather referenced at runtime.
|
||||
@@ -618,7 +592,7 @@ register_command(
|
||||
-Doptimize=${ZIG_OPTIMIZE}
|
||||
-Dcpu=${ZIG_CPU}
|
||||
-Denable_logs=$<IF:$<BOOL:${ENABLE_LOGS}>,true,false>
|
||||
-Denable_asan=$<IF:$<BOOL:${ENABLE_ZIG_ASAN}>,true,false>
|
||||
-Denable_asan=$<IF:$<BOOL:${ENABLE_ASAN}>,true,false>
|
||||
-Dversion=${VERSION}
|
||||
-Dreported_nodejs_version=${NODEJS_VERSION}
|
||||
-Dcanary=${CANARY_REVISION}
|
||||
@@ -632,7 +606,6 @@ register_command(
|
||||
TARGETS
|
||||
clone-zig
|
||||
clone-zstd
|
||||
bun-cppbind
|
||||
SOURCES
|
||||
${BUN_ZIG_SOURCES}
|
||||
${BUN_ZIG_GENERATED_SOURCES}
|
||||
@@ -645,6 +618,10 @@ set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS "build.zig")
|
||||
|
||||
set(BUN_USOCKETS_SOURCE ${CWD}/packages/bun-usockets)
|
||||
|
||||
# hand written cpp source files. Full list of "source" code (including codegen) is in BUN_CPP_SOURCES
|
||||
absolute_sources(BUN_CXX_SOURCES ${CWD}/cmake/sources/CxxSources.txt)
|
||||
absolute_sources(BUN_C_SOURCES ${CWD}/cmake/sources/CSources.txt)
|
||||
|
||||
if(WIN32)
|
||||
list(APPEND BUN_CXX_SOURCES ${CWD}/src/bun.js/bindings/windows/rescle.cpp)
|
||||
list(APPEND BUN_CXX_SOURCES ${CWD}/src/bun.js/bindings/windows/rescle-binding.cpp)
|
||||
@@ -708,7 +685,7 @@ if(WIN32)
|
||||
${CODEGEN_PATH}/windows-app-info.rc
|
||||
@ONLY
|
||||
)
|
||||
set(WINDOWS_RESOURCES ${CODEGEN_PATH}/windows-app-info.rc ${CWD}/src/bun.exe.manifest)
|
||||
set(WINDOWS_RESOURCES ${CODEGEN_PATH}/windows-app-info.rc)
|
||||
endif()
|
||||
|
||||
# --- Executable ---
|
||||
@@ -981,16 +958,6 @@ if(APPLE)
|
||||
-Wl,-map,${bun}.linker-map
|
||||
)
|
||||
|
||||
if(DEBUG)
|
||||
target_link_options(${bun} PUBLIC
|
||||
# Suppress ALL linker warnings on macOS.
|
||||
# The intent is to only suppress linker alignment warnings.
|
||||
# As of July 21st, 2025 there doesn't seem to be a more specific suppression just for linker alignment warnings.
|
||||
# If you find one, please update this to only be for linker alignment.
|
||||
-Wl,-w
|
||||
)
|
||||
endif()
|
||||
|
||||
# don't strip in debug, this seems to be needed so that the Zig std library
|
||||
# `*dbHelper` DWARF symbols (used by LLDB for pretty printing) are in the
|
||||
# output executable
|
||||
@@ -1034,6 +1001,7 @@ if(LINUX)
|
||||
--ld-path=${LLD_PROGRAM}
|
||||
-fno-pic
|
||||
-Wl,-no-pie
|
||||
-Wl,-icf=safe
|
||||
-Wl,--as-needed
|
||||
-Wl,-z,stack-size=12800000
|
||||
-Wl,--compress-debug-sections=zlib
|
||||
@@ -1059,13 +1027,6 @@ if(LINUX)
|
||||
-Wl,--gc-sections
|
||||
)
|
||||
endif()
|
||||
|
||||
if (NOT DEBUG AND NOT ENABLE_ASAN)
|
||||
target_link_options(${bun} PUBLIC
|
||||
-Wl,-icf=safe
|
||||
)
|
||||
endif()
|
||||
|
||||
endif()
|
||||
|
||||
# --- Symbols list ---
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
HdrHistogram/HdrHistogram_c
|
||||
COMMIT
|
||||
8dcce8f68512fca460b171bccc3a5afce0048779
|
||||
652d51bcc36744fd1a6debfeb1a8a5f58b14022c
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
libarchive/libarchive
|
||||
COMMIT
|
||||
9525f90ca4bd14c7b335e2f8c84a4607b0af6bdf
|
||||
898dc8319355b7e985f68a9819f182aaed61b53a
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
@@ -20,14 +20,11 @@ register_cmake_command(
|
||||
-DENABLE_WERROR=OFF
|
||||
-DENABLE_BZip2=OFF
|
||||
-DENABLE_CAT=OFF
|
||||
-DENABLE_CPIO=OFF
|
||||
-DENABLE_UNZIP=OFF
|
||||
-DENABLE_EXPAT=OFF
|
||||
-DENABLE_ICONV=OFF
|
||||
-DENABLE_LIBB2=OFF
|
||||
-DENABLE_LibGCC=OFF
|
||||
-DENABLE_LIBXML2=OFF
|
||||
-DENABLE_WIN32_XMLLITE=OFF
|
||||
-DENABLE_LZ4=OFF
|
||||
-DENABLE_LZMA=OFF
|
||||
-DENABLE_LZO=OFF
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
cloudflare/lol-html
|
||||
COMMIT
|
||||
d64457d9ff0143deef025d5df7e8586092b9afb7
|
||||
67f1d4ffd6b74db7e053fb129dcce620193c180d
|
||||
)
|
||||
|
||||
set(LOLHTML_CWD ${VENDOR_PATH}/lolhtml/c-api)
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
oven-sh/mimalloc
|
||||
COMMIT
|
||||
178534eeb7c0b4e2f438b513640c6f4d7338416a
|
||||
1beadf9651a7bfdec6b5367c380ecc3fe1c40d1a
|
||||
)
|
||||
|
||||
set(MIMALLOC_CMAKE_ARGS
|
||||
@@ -13,47 +13,14 @@ set(MIMALLOC_CMAKE_ARGS
|
||||
-DMI_BUILD_SHARED=OFF
|
||||
-DMI_BUILD_TESTS=OFF
|
||||
-DMI_USE_CXX=ON
|
||||
-DMI_OVERRIDE=OFF
|
||||
-DMI_OSX_ZONE=OFF
|
||||
-DMI_OSX_INTERPOSE=OFF
|
||||
-DMI_SKIP_COLLECT_ON_EXIT=ON
|
||||
|
||||
# ```
|
||||
# ❯ mimalloc_allow_large_os_pages=0 BUN_PORT=3004 mem bun http-hello.js
|
||||
# Started development server: http://localhost:3004
|
||||
#
|
||||
# Peak memory usage: 52 MB
|
||||
#
|
||||
# ❯ mimalloc_allow_large_os_pages=1 BUN_PORT=3004 mem bun http-hello.js
|
||||
# Started development server: http://localhost:3004
|
||||
#
|
||||
# Peak memory usage: 74 MB
|
||||
# ```
|
||||
#
|
||||
# ```
|
||||
# ❯ mimalloc_allow_large_os_pages=1 mem bun --eval 1
|
||||
#
|
||||
# Peak memory usage: 52 MB
|
||||
#
|
||||
# ❯ mimalloc_allow_large_os_pages=0 mem bun --eval 1
|
||||
#
|
||||
# Peak memory usage: 30 MB
|
||||
# ```
|
||||
-DMI_NO_THP=1
|
||||
)
|
||||
|
||||
if(ENABLE_ASAN)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_TRACK_ASAN=ON)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OVERRIDE=OFF)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_ZONE=OFF)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_INTERPOSE=OFF)
|
||||
elseif(APPLE OR LINUX)
|
||||
# Enable static override when ASAN is not enabled
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OVERRIDE=ON)
|
||||
if(APPLE)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_ZONE=ON)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_INTERPOSE=ON)
|
||||
else()
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_ZONE=OFF)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_INTERPOSE=OFF)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(DEBUG)
|
||||
@@ -64,7 +31,13 @@ if(ENABLE_VALGRIND)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_VALGRIND=ON)
|
||||
endif()
|
||||
|
||||
if(DEBUG)
|
||||
if(WIN32)
|
||||
if(DEBUG)
|
||||
set(MIMALLOC_LIBRARY mimalloc-static-debug)
|
||||
else()
|
||||
set(MIMALLOC_LIBRARY mimalloc-static)
|
||||
endif()
|
||||
elseif(DEBUG)
|
||||
if (ENABLE_ASAN)
|
||||
set(MIMALLOC_LIBRARY mimalloc-asan-debug)
|
||||
else()
|
||||
@@ -80,7 +53,6 @@ if(APPLE OR (LINUX AND NOT DEBUG))
|
||||
set(MIMALLOC_LIBRARY CMakeFiles/mimalloc-obj.dir/src/static.c.o)
|
||||
endif()
|
||||
|
||||
|
||||
register_cmake_command(
|
||||
TARGET
|
||||
mimalloc
|
||||
|
||||
@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
|
||||
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
|
||||
|
||||
if(NOT WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION 642e2252f6298387edb6d2f991a0408fd0320466)
|
||||
set(WEBKIT_VERSION 1098cc50652ab1eab171f58f7669e19ca6c276ae)
|
||||
endif()
|
||||
|
||||
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)
|
||||
|
||||
@@ -20,7 +20,7 @@ else()
|
||||
unsupported(CMAKE_SYSTEM_NAME)
|
||||
endif()
|
||||
|
||||
set(ZIG_COMMIT "edc6229b1fafb1701a25fb4e17114cc756991546")
|
||||
set(ZIG_COMMIT "0a0120fa92cd7f6ab244865688b351df634f0707")
|
||||
optionx(ZIG_TARGET STRING "The zig target to use" DEFAULT ${DEFAULT_ZIG_TARGET})
|
||||
|
||||
if(CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
|
||||
@@ -2,25 +2,6 @@
|
||||
**Note** — Bun provides a browser- and Node.js-compatible [console](https://developer.mozilla.org/en-US/docs/Web/API/console) global. This page only documents Bun-native APIs.
|
||||
{% /callout %}
|
||||
|
||||
## Object inspection depth
|
||||
|
||||
Bun allows you to configure how deeply nested objects are displayed in `console.log()` output:
|
||||
|
||||
- **CLI flag**: Use `--console-depth <number>` to set the depth for a single run
|
||||
- **Configuration**: Set `console.depth` in your `bunfig.toml` for persistent configuration
|
||||
- **Default**: Objects are inspected to a depth of `2` levels
|
||||
|
||||
```js
|
||||
const nested = { a: { b: { c: { d: "deep" } } } };
|
||||
console.log(nested);
|
||||
// Default (depth 2): { a: { b: [Object] } }
|
||||
// With depth 4: { a: { b: { c: { d: 'deep' } } } }
|
||||
```
|
||||
|
||||
The CLI flag takes precedence over the configuration file setting.
|
||||
|
||||
## Reading from stdin
|
||||
|
||||
In Bun, the `console` object can be used as an `AsyncIterable` to sequentially read lines from `process.stdin`.
|
||||
|
||||
```ts
|
||||
|
||||
@@ -164,70 +164,6 @@ Static responses do not allocate additional memory after initialization. You can
|
||||
|
||||
Static route responses are cached for the lifetime of the server object. To reload static routes, call `server.reload(options)`.
|
||||
|
||||
### File Responses vs Static Responses
|
||||
|
||||
When serving files in routes, there are two distinct behaviors depending on whether you buffer the file content or serve it directly:
|
||||
|
||||
```ts
|
||||
Bun.serve({
|
||||
routes: {
|
||||
// Static route - content is buffered in memory at startup
|
||||
"/logo.png": new Response(await Bun.file("./logo.png").bytes()),
|
||||
|
||||
// File route - content is read from filesystem on each request
|
||||
"/download.zip": new Response(Bun.file("./download.zip")),
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
**Static routes** (`new Response(await file.bytes())`) buffer content in memory at startup:
|
||||
|
||||
- **Zero filesystem I/O** during requests - content served entirely from memory
|
||||
- **ETag support** - Automatically generates and validates ETags for caching
|
||||
- **If-None-Match** - Returns `304 Not Modified` when client ETag matches
|
||||
- **No 404 handling** - Missing files cause startup errors, not runtime 404s
|
||||
- **Memory usage** - Full file content stored in RAM
|
||||
- **Best for**: Small static assets, API responses, frequently accessed files
|
||||
|
||||
**File routes** (`new Response(Bun.file(path))`) read from filesystem per request:
|
||||
|
||||
- **Filesystem reads** on each request - checks file existence and reads content
|
||||
- **Built-in 404 handling** - Returns `404 Not Found` if file doesn't exist or becomes inaccessible
|
||||
- **Last-Modified support** - Uses file modification time for `If-Modified-Since` headers
|
||||
- **If-Modified-Since** - Returns `304 Not Modified` when file hasn't changed since client's cached version
|
||||
- **Range request support** - Automatically handles partial content requests with `Content-Range` headers
|
||||
- **Streaming transfers** - Uses buffered reader with backpressure handling for efficient memory usage
|
||||
- **Memory efficient** - Only buffers small chunks during transfer, not entire file
|
||||
- **Best for**: Large files, dynamic content, user uploads, files that change frequently
|
||||
|
||||
### HTTP Caching Behavior
|
||||
|
||||
Both route types implement HTTP caching standards but with different strategies:
|
||||
|
||||
#### Static Routes Caching
|
||||
|
||||
- **ETag generation**: Automatically computes ETag hash from content at startup
|
||||
- **If-None-Match**: Validates client ETag against server ETag
|
||||
- **304 responses**: Returns `304 Not Modified` with empty body when ETags match
|
||||
- **Cache headers**: Inherits any `Cache-Control` headers you provide in the Response
|
||||
- **Consistency**: ETag remains constant until server restart or route reload
|
||||
|
||||
#### File Routes Caching
|
||||
|
||||
- **Last-Modified**: Uses file's `mtime` for `Last-Modified` header
|
||||
- **If-Modified-Since**: Compares client date with file modification time
|
||||
- **304 responses**: Returns `304 Not Modified` when file unchanged since client's cached version
|
||||
- **Content-Length**: Automatically set based on current file size
|
||||
- **Dynamic validation**: Checks file modification time on each request
|
||||
|
||||
#### Status Code Handling
|
||||
|
||||
Both route types automatically adjust status codes:
|
||||
|
||||
- **200 → 204**: Empty files (0 bytes) return `204 No Content` instead of `200 OK`
|
||||
- **200 → 304**: Successful cache validation returns `304 Not Modified`
|
||||
- **File routes only**: Missing or inaccessible files return `404 Not Found`
|
||||
|
||||
```ts
|
||||
const server = Bun.serve({
|
||||
static: {
|
||||
|
||||
@@ -274,23 +274,6 @@ If no connection URL is provided, the system checks for the following individual
|
||||
| `PGPASSWORD` | - | (empty) | Database password |
|
||||
| `PGDATABASE` | - | username | Database name |
|
||||
|
||||
## Runtime Preconnection
|
||||
|
||||
Bun can preconnect to PostgreSQL at startup to improve performance by establishing database connections before your application code runs. This is useful for reducing connection latency on the first database query.
|
||||
|
||||
```bash
|
||||
# Enable PostgreSQL preconnection
|
||||
bun --sql-preconnect index.js
|
||||
|
||||
# Works with DATABASE_URL environment variable
|
||||
DATABASE_URL=postgres://user:pass@localhost:5432/db bun --sql-preconnect index.js
|
||||
|
||||
# Can be combined with other runtime flags
|
||||
bun --sql-preconnect --hot index.js
|
||||
```
|
||||
|
||||
The `--sql-preconnect` flag will automatically establish a PostgreSQL connection using your configured environment variables at startup. If the connection fails, it won't crash your application - the error will be handled gracefully.
|
||||
|
||||
## Connection Options
|
||||
|
||||
You can configure your database connection manually by passing options to the SQL constructor:
|
||||
|
||||
@@ -208,8 +208,8 @@ export class ArrayBufferSink {
|
||||
*
|
||||
* This API might change later to separate Uint8ArraySink and ArrayBufferSink
|
||||
*/
|
||||
flush(): number | Uint8Array<ArrayBuffer> | ArrayBuffer;
|
||||
end(): ArrayBuffer | Uint8Array<ArrayBuffer>;
|
||||
flush(): number | Uint8Array | ArrayBuffer;
|
||||
end(): ArrayBuffer | Uint8Array;
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
@@ -88,20 +88,6 @@ The order of the `--target` flag does not matter, as long as they're delimited b
|
||||
|
||||
On x64 platforms, Bun uses SIMD optimizations which require a modern CPU supporting AVX2 instructions. The `-baseline` build of Bun is for older CPUs that don't support these optimizations. Normally, when you install Bun we automatically detect which version to use but this can be harder to do when cross-compiling since you might not know the target CPU. You usually don't need to worry about it on Darwin x64, but it is relevant for Windows x64 and Linux x64. If you or your users see `"Illegal instruction"` errors, you might need to use the baseline version.
|
||||
|
||||
## Build-time constants
|
||||
|
||||
Use the `--define` flag to inject build-time constants into your executable, such as version numbers, build timestamps, or configuration values:
|
||||
|
||||
```bash
|
||||
$ bun build --compile --define BUILD_VERSION='"1.2.3"' --define BUILD_TIME='"2024-01-15T10:30:00Z"' src/cli.ts --outfile mycli
|
||||
```
|
||||
|
||||
These constants are embedded directly into your compiled binary at build time, providing zero runtime overhead and enabling dead code elimination optimizations.
|
||||
|
||||
{% callout type="info" %}
|
||||
For comprehensive examples and advanced patterns, see the [Build-time constants guide](/guides/runtime/build-time-constants).
|
||||
{% /callout %}
|
||||
|
||||
## Deploying to production
|
||||
|
||||
Compiled executables reduce memory usage and improve Bun's start time.
|
||||
|
||||
@@ -183,30 +183,6 @@ Bun supports installing dependencies from Git, GitHub, and local or remotely-hos
|
||||
}
|
||||
```
|
||||
|
||||
## Installation strategies
|
||||
|
||||
Bun supports two package installation strategies that determine how dependencies are organized in `node_modules`:
|
||||
|
||||
### Hoisted installs (default for single projects)
|
||||
|
||||
The traditional npm/Yarn approach that flattens dependencies into a shared `node_modules` directory:
|
||||
|
||||
```bash
|
||||
$ bun install --linker hoisted
|
||||
```
|
||||
|
||||
### Isolated installs
|
||||
|
||||
A pnpm-like approach that creates strict dependency isolation to prevent phantom dependencies:
|
||||
|
||||
```bash
|
||||
$ bun install --linker isolated
|
||||
```
|
||||
|
||||
Isolated installs create a central package store in `node_modules/.bun/` with symlinks in the top-level `node_modules`. This ensures packages can only access their declared dependencies.
|
||||
|
||||
For complete documentation on isolated installs, refer to [Package manager > Isolated installs](https://bun.com/docs/install/isolated).
|
||||
|
||||
## Configuration
|
||||
|
||||
The default behavior of `bun install` can be configured in `bunfig.toml`. The default values are shown below.
|
||||
@@ -237,15 +213,11 @@ dryRun = false
|
||||
|
||||
# equivalent to `--concurrent-scripts` flag
|
||||
concurrentScripts = 16 # (cpu count or GOMAXPROCS) x2
|
||||
|
||||
# installation strategy: "hoisted" or "isolated"
|
||||
# default: "hoisted"
|
||||
linker = "hoisted"
|
||||
```
|
||||
|
||||
## CI/CD
|
||||
|
||||
Use the official [`oven-sh/setup-bun`](https://github.com/oven-sh/setup-bun) action to install `bun` in a GitHub Actions pipeline:
|
||||
Looking to speed up your CI? Use the official [`oven-sh/setup-bun`](https://github.com/oven-sh/setup-bun) action to install `bun` in a GitHub Actions pipeline.
|
||||
|
||||
```yaml#.github/workflows/release.yml
|
||||
name: bun-types
|
||||
@@ -264,31 +236,4 @@ jobs:
|
||||
run: bun run build
|
||||
```
|
||||
|
||||
For CI/CD environments that want to enforce reproducible builds, use `bun ci` to fail the build if the package.json is out of sync with the lockfile:
|
||||
|
||||
```bash
|
||||
$ bun ci
|
||||
```
|
||||
|
||||
This is equivalent to `bun install --frozen-lockfile`. It installs exact versions from `bun.lock` and fails if `package.json` doesn't match the lockfile. To use `bun ci` or `bun install --frozen-lockfile`, you must commit `bun.lock` to version control.
|
||||
|
||||
And instead of running `bun install`, run `bun ci`.
|
||||
|
||||
```yaml#.github/workflows/release.yml
|
||||
name: bun-types
|
||||
jobs:
|
||||
build:
|
||||
name: build-app
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
- name: Install bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
- name: Install dependencies
|
||||
run: bun ci
|
||||
- name: Build app
|
||||
run: bun run build
|
||||
```
|
||||
|
||||
{% bunCLIUsage command="install" /%}
|
||||
|
||||
102
docs/cli/pm.md
102
docs/cli/pm.md
@@ -8,70 +8,15 @@ To create a tarball of the current workspace:
|
||||
$ bun pm pack
|
||||
```
|
||||
|
||||
This command creates a `.tgz` file containing all files that would be published to npm, following the same rules as `npm pack`.
|
||||
Options for the `pack` command:
|
||||
|
||||
## Examples
|
||||
|
||||
Basic usage:
|
||||
|
||||
```bash
|
||||
$ bun pm pack
|
||||
# Creates my-package-1.0.0.tgz in current directory
|
||||
```
|
||||
|
||||
Quiet mode for scripting:
|
||||
|
||||
```bash
|
||||
$ TARBALL=$(bun pm pack --quiet)
|
||||
$ echo "Created: $TARBALL"
|
||||
# Output: Created: my-package-1.0.0.tgz
|
||||
```
|
||||
|
||||
Custom destination:
|
||||
|
||||
```bash
|
||||
$ bun pm pack --destination ./dist
|
||||
# Saves tarball in ./dist/ directory
|
||||
```
|
||||
|
||||
## Options
|
||||
|
||||
- `--dry-run`: Perform all tasks except writing the tarball to disk. Shows what would be included.
|
||||
- `--destination <dir>`: Specify the directory where the tarball will be saved.
|
||||
- `--filename <name>`: Specify an exact file name for the tarball to be saved at.
|
||||
- `--dry-run`: Perform all tasks except writing the tarball to disk.
|
||||
- `--destination`: Specify the directory where the tarball will be saved.
|
||||
- `--filename`: Specify an exact file name for the tarball to be saved at.
|
||||
- `--ignore-scripts`: Skip running pre/postpack and prepare scripts.
|
||||
- `--gzip-level <0-9>`: Set a custom compression level for gzip, ranging from 0 to 9 (default is 9).
|
||||
- `--quiet`: Only output the tarball filename, suppressing verbose output. Ideal for scripts and automation.
|
||||
- `--gzip-level`: Set a custom compression level for gzip, ranging from 0 to 9 (default is 9).
|
||||
|
||||
> **Note:** `--filename` and `--destination` cannot be used at the same time.
|
||||
|
||||
## Output Modes
|
||||
|
||||
**Default output:**
|
||||
|
||||
```bash
|
||||
$ bun pm pack
|
||||
bun pack v1.2.19
|
||||
|
||||
packed 131B package.json
|
||||
packed 40B index.js
|
||||
|
||||
my-package-1.0.0.tgz
|
||||
|
||||
Total files: 2
|
||||
Shasum: f2451d6eb1e818f500a791d9aace80b394258a90
|
||||
Unpacked size: 171B
|
||||
Packed size: 249B
|
||||
```
|
||||
|
||||
**Quiet output:**
|
||||
|
||||
```bash
|
||||
$ bun pm pack --quiet
|
||||
my-package-1.0.0.tgz
|
||||
```
|
||||
|
||||
The `--quiet` flag is particularly useful for automation workflows where you need to capture the generated tarball filename for further processing.
|
||||
> Note `--filename` and `--destination` cannot be used at the same time
|
||||
|
||||
## bin
|
||||
|
||||
@@ -248,38 +193,3 @@ v1.0.1
|
||||
```
|
||||
|
||||
Supports `patch`, `minor`, `major`, `premajor`, `preminor`, `prepatch`, `prerelease`, `from-git`, or specific versions like `1.2.3`. By default creates git commit and tag unless `--no-git-tag-version` was used to skip.
|
||||
|
||||
## pkg
|
||||
|
||||
Manage `package.json` data with get, set, delete, and fix operations.
|
||||
|
||||
All commands support dot and bracket notation:
|
||||
|
||||
```bash
|
||||
scripts.build # dot notation
|
||||
contributors[0] # array access
|
||||
workspaces.0 # dot with numeric index
|
||||
scripts[test:watch] # bracket for special chars
|
||||
```
|
||||
|
||||
Examples:
|
||||
|
||||
```bash
|
||||
# set
|
||||
$ bun pm pkg get name # single property
|
||||
$ bun pm pkg get name version # multiple properties
|
||||
$ bun pm pkg get # entire package.json
|
||||
$ bun pm pkg get scripts.build # nested property
|
||||
|
||||
# set
|
||||
$ bun pm pkg set name="my-package" # simple property
|
||||
$ bun pm pkg set scripts.test="jest" version=2.0.0 # multiple properties
|
||||
$ bun pm pkg set {"private":"true"} --json # JSON values with --json flag
|
||||
|
||||
# delete
|
||||
$ bun pm pkg delete description # single property
|
||||
$ bun pm pkg delete scripts.test contributors[0] # multiple/nested
|
||||
|
||||
# fix
|
||||
$ bun pm pkg fix # auto-fix common issues
|
||||
```
|
||||
|
||||
@@ -185,23 +185,6 @@ This is TypeScript!
|
||||
|
||||
For convenience, all code is treated as TypeScript with JSX support when using `bun run -`.
|
||||
|
||||
## `bun run --console-depth`
|
||||
|
||||
Control the depth of object inspection in console output with the `--console-depth` flag.
|
||||
|
||||
```bash
|
||||
$ bun --console-depth 5 run index.tsx
|
||||
```
|
||||
|
||||
This sets how deeply nested objects are displayed in `console.log()` output. The default depth is `2`. Higher values show more nested properties but may produce verbose output for complex objects.
|
||||
|
||||
```js
|
||||
const nested = { a: { b: { c: { d: "deep" } } } };
|
||||
console.log(nested);
|
||||
// With --console-depth 2 (default): { a: { b: [Object] } }
|
||||
// With --console-depth 4: { a: { b: { c: { d: 'deep' } } } }
|
||||
```
|
||||
|
||||
## `bun run --smol`
|
||||
|
||||
In memory-constrained environments, use the `--smol` flag to reduce memory usage at a cost to performance.
|
||||
|
||||
@@ -158,7 +158,7 @@ See [Test > Lifecycle](https://bun.com/docs/test/lifecycle) for complete documen
|
||||
|
||||
## Mocks
|
||||
|
||||
Create mock functions with the `mock` function.
|
||||
Create mock functions with the `mock` function. Mocks are automatically reset between tests.
|
||||
|
||||
```ts
|
||||
import { test, expect, mock } from "bun:test";
|
||||
@@ -248,33 +248,4 @@ $ bun test foo
|
||||
|
||||
Any test file in the directory with an _absolute path_ that contains one of the targets will run. Glob patterns are not yet supported. -->
|
||||
|
||||
## AI Agent Integration
|
||||
|
||||
When using Bun's test runner with AI coding assistants, you can enable quieter output to improve readability and reduce context noise. This feature minimizes test output verbosity while preserving essential failure information.
|
||||
|
||||
### Environment Variables
|
||||
|
||||
Set any of the following environment variables to enable AI-friendly output:
|
||||
|
||||
- `CLAUDECODE=1` - For Claude Code
|
||||
- `REPL_ID=1` - For Replit
|
||||
- `AGENT=1` - Generic AI agent flag
|
||||
|
||||
### Behavior
|
||||
|
||||
When an AI agent environment is detected:
|
||||
|
||||
- Only test failures are displayed in detail
|
||||
- Passing, skipped, and todo test indicators are hidden
|
||||
- Summary statistics remain intact
|
||||
|
||||
```bash
|
||||
# Example: Enable quiet output for Claude Code
|
||||
$ CLAUDECODE=1 bun test
|
||||
|
||||
# Still shows failures and summary, but hides verbose passing test output
|
||||
```
|
||||
|
||||
This feature is particularly useful in AI-assisted development workflows where reduced output verbosity improves context efficiency while maintaining visibility into test failures.
|
||||
|
||||
{% bunCLIUsage command="test" /%}
|
||||
|
||||
@@ -10,86 +10,6 @@ To update a specific dependency to the latest version:
|
||||
$ bun update [package]
|
||||
```
|
||||
|
||||
## `--interactive`
|
||||
|
||||
For a more controlled update experience, use the `--interactive` flag to select which packages to update:
|
||||
|
||||
```sh
|
||||
$ bun update --interactive
|
||||
$ bun update -i
|
||||
```
|
||||
|
||||
This launches an interactive terminal interface that shows all outdated packages with their current and target versions. You can then select which packages to update.
|
||||
|
||||
### Interactive Interface
|
||||
|
||||
The interface displays packages grouped by dependency type:
|
||||
|
||||
```
|
||||
? Select packages to update - Space to toggle, Enter to confirm, a to select all, n to select none, i to invert, l to toggle latest
|
||||
|
||||
dependencies Current Target Latest
|
||||
□ react 17.0.2 18.2.0 18.3.1
|
||||
□ lodash 4.17.20 4.17.21 4.17.21
|
||||
|
||||
devDependencies Current Target Latest
|
||||
□ typescript 4.8.0 5.0.0 5.3.3
|
||||
□ @types/node 16.11.7 18.0.0 20.11.5
|
||||
|
||||
optionalDependencies Current Target Latest
|
||||
□ some-optional-package 1.0.0 1.1.0 1.2.0
|
||||
```
|
||||
|
||||
**Sections:**
|
||||
|
||||
- Packages are grouped under section headers: `dependencies`, `devDependencies`, `peerDependencies`, `optionalDependencies`
|
||||
- Each section shows column headers aligned with the package data
|
||||
|
||||
**Columns:**
|
||||
|
||||
- **Package**: Package name (may have suffix like ` dev`, ` peer`, ` optional` for clarity)
|
||||
- **Current**: Currently installed version
|
||||
- **Target**: Version that would be installed (respects semver constraints)
|
||||
- **Latest**: Latest available version
|
||||
|
||||
### Keyboard Controls
|
||||
|
||||
**Selection:**
|
||||
|
||||
- **Space**: Toggle package selection
|
||||
- **Enter**: Confirm selections and update
|
||||
- **a/A**: Select all packages
|
||||
- **n/N**: Select none
|
||||
- **i/I**: Invert selection
|
||||
|
||||
**Navigation:**
|
||||
|
||||
- **↑/↓ Arrow keys** or **j/k**: Move cursor
|
||||
- **l/L**: Toggle between target and latest version for current package
|
||||
|
||||
**Exit:**
|
||||
|
||||
- **Ctrl+C** or **Ctrl+D**: Cancel without updating
|
||||
|
||||
### Visual Indicators
|
||||
|
||||
- **☑** Selected packages (will be updated)
|
||||
- **□** Unselected packages
|
||||
- **>** Current cursor position
|
||||
- **Colors**: Red (major), yellow (minor), green (patch) version changes
|
||||
- **Underlined**: Currently selected update target
|
||||
|
||||
### Package Grouping
|
||||
|
||||
Packages are organized in sections by dependency type:
|
||||
|
||||
- **dependencies** - Regular runtime dependencies
|
||||
- **devDependencies** - Development dependencies
|
||||
- **peerDependencies** - Peer dependencies
|
||||
- **optionalDependencies** - Optional dependencies
|
||||
|
||||
Within each section, individual packages may have additional suffixes (` dev`, ` peer`, ` optional`) for extra clarity.
|
||||
|
||||
## `--latest`
|
||||
|
||||
By default, `bun update` will update to the latest version of a dependency that satisfies the version range specified in your `package.json`.
|
||||
@@ -100,8 +20,6 @@ To update to the latest version, regardless of if it's compatible with the curre
|
||||
$ bun update --latest
|
||||
```
|
||||
|
||||
In interactive mode, you can toggle individual packages between their target version (respecting semver) and latest version using the **l** key.
|
||||
|
||||
For example, with the following `package.json`:
|
||||
|
||||
```json
|
||||
|
||||
@@ -1,67 +0,0 @@
|
||||
The `bun why` command explains why a package is installed in your project by showing the dependency chain that led to its installation.
|
||||
|
||||
## Usage
|
||||
|
||||
```bash
|
||||
$ bun why <package>
|
||||
```
|
||||
|
||||
## Arguments
|
||||
|
||||
- `<package>`: The name of the package to explain. Supports glob patterns like `@org/*` or `*-lodash`.
|
||||
|
||||
## Options
|
||||
|
||||
- `--top`: Show only the top-level dependencies instead of the complete dependency tree.
|
||||
- `--depth <number>`: Maximum depth of the dependency tree to display.
|
||||
|
||||
## Examples
|
||||
|
||||
Check why a specific package is installed:
|
||||
|
||||
```bash
|
||||
$ bun why react
|
||||
react@18.2.0
|
||||
└─ my-app@1.0.0 (requires ^18.0.0)
|
||||
```
|
||||
|
||||
Check why all packages with a specific pattern are installed:
|
||||
|
||||
```bash
|
||||
$ bun why "@types/*"
|
||||
@types/react@18.2.15
|
||||
└─ dev my-app@1.0.0 (requires ^18.0.0)
|
||||
|
||||
@types/react-dom@18.2.7
|
||||
└─ dev my-app@1.0.0 (requires ^18.0.0)
|
||||
```
|
||||
|
||||
Show only top-level dependencies:
|
||||
|
||||
```bash
|
||||
$ bun why express --top
|
||||
express@4.18.2
|
||||
└─ my-app@1.0.0 (requires ^4.18.2)
|
||||
```
|
||||
|
||||
Limit the dependency tree depth:
|
||||
|
||||
```bash
|
||||
$ bun why express --depth 2
|
||||
express@4.18.2
|
||||
└─ express-pollyfill@1.20.1 (requires ^4.18.2)
|
||||
└─ body-parser@1.20.1 (requires ^1.20.1)
|
||||
└─ accepts@1.3.8 (requires ^1.3.8)
|
||||
└─ (deeper dependencies hidden)
|
||||
```
|
||||
|
||||
## Understanding the Output
|
||||
|
||||
The output shows:
|
||||
|
||||
- The package name and version being queried
|
||||
- The dependency chain that led to its installation
|
||||
- The type of dependency (dev, peer, optional, or production)
|
||||
- The version requirement specified in each package's dependencies
|
||||
|
||||
For nested dependencies, the command shows the complete dependency tree by default, with indentation indicating the relationship hierarchy.
|
||||
@@ -40,7 +40,6 @@ Open `prisma/schema.prisma` and add a simple `User` model.
|
||||
```prisma-diff#prisma/schema.prisma
|
||||
generator client {
|
||||
provider = "prisma-client-js"
|
||||
output = "../generated/prisma"
|
||||
}
|
||||
|
||||
datasource db {
|
||||
@@ -79,7 +78,7 @@ migrations/
|
||||
|
||||
Your database is now in sync with your schema.
|
||||
|
||||
✔ Generated Prisma Client (v6.11.1) to ./generated/prisma in 41ms
|
||||
✔ Generated Prisma Client (v5.3.1) to ./node_modules/@prisma/client in 41ms
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -1,293 +0,0 @@
|
||||
---
|
||||
name: Build-time constants with --define
|
||||
---
|
||||
|
||||
The `--define` flag can be used with `bun build` and `bun build --compile` to inject build-time constants into your application. This is especially useful for embedding metadata like build versions, timestamps, or configuration flags directly into your compiled executables.
|
||||
|
||||
```sh
|
||||
$ bun build --compile --define BUILD_VERSION='"1.2.3"' --define BUILD_TIME='"2024-01-15T10:30:00Z"' src/index.ts --outfile myapp
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Why use build-time constants?
|
||||
|
||||
Build-time constants are embedded directly into your compiled code, making them:
|
||||
|
||||
- **Zero runtime overhead** - No environment variable lookups or file reads
|
||||
- **Immutable** - Values are baked into the binary at compile time
|
||||
- **Optimizable** - Dead code elimination can remove unused branches
|
||||
- **Secure** - No external dependencies or configuration files to manage
|
||||
|
||||
This is similar to `gcc -D` or `#define` in C/C++, but for JavaScript/TypeScript.
|
||||
|
||||
---
|
||||
|
||||
## Basic usage
|
||||
|
||||
### With `bun build`
|
||||
|
||||
```sh
|
||||
# Bundle with build-time constants
|
||||
$ bun build --define BUILD_VERSION='"1.0.0"' --define NODE_ENV='"production"' src/index.ts --outdir ./dist
|
||||
```
|
||||
|
||||
### With `bun build --compile`
|
||||
|
||||
```sh
|
||||
# Compile to executable with build-time constants
|
||||
$ bun build --compile --define BUILD_VERSION='"1.0.0"' --define BUILD_TIME='"2024-01-15T10:30:00Z"' src/cli.ts --outfile mycli
|
||||
```
|
||||
|
||||
### JavaScript API
|
||||
|
||||
```ts
|
||||
await Bun.build({
|
||||
entrypoints: ["./src/index.ts"],
|
||||
outdir: "./dist",
|
||||
define: {
|
||||
BUILD_VERSION: '"1.0.0"',
|
||||
BUILD_TIME: '"2024-01-15T10:30:00Z"',
|
||||
DEBUG: "false",
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Common use cases
|
||||
|
||||
### Version information
|
||||
|
||||
Embed version and build metadata directly into your executable:
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
```ts#src/version.ts
|
||||
// These constants are replaced at build time
|
||||
declare const BUILD_VERSION: string;
|
||||
declare const BUILD_TIME: string;
|
||||
declare const GIT_COMMIT: string;
|
||||
|
||||
export function getVersion() {
|
||||
return {
|
||||
version: BUILD_VERSION,
|
||||
buildTime: BUILD_TIME,
|
||||
commit: GIT_COMMIT,
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
```sh#Build command
|
||||
$ bun build --compile \
|
||||
--define BUILD_VERSION='"1.2.3"' \
|
||||
--define BUILD_TIME='"2024-01-15T10:30:00Z"' \
|
||||
--define GIT_COMMIT='"abc123"' \
|
||||
src/cli.ts --outfile mycli
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
### Feature flags
|
||||
|
||||
Use build-time constants to enable/disable features:
|
||||
|
||||
```ts
|
||||
// Replaced at build time
|
||||
declare const ENABLE_ANALYTICS: boolean;
|
||||
declare const ENABLE_DEBUG: boolean;
|
||||
|
||||
function trackEvent(event: string) {
|
||||
if (ENABLE_ANALYTICS) {
|
||||
// This entire block is removed if ENABLE_ANALYTICS is false
|
||||
console.log("Tracking:", event);
|
||||
}
|
||||
}
|
||||
|
||||
if (ENABLE_DEBUG) {
|
||||
console.log("Debug mode enabled");
|
||||
}
|
||||
```
|
||||
|
||||
```sh
|
||||
# Production build - analytics enabled, debug disabled
|
||||
$ bun build --compile --define ENABLE_ANALYTICS=true --define ENABLE_DEBUG=false src/app.ts --outfile app-prod
|
||||
|
||||
# Development build - both enabled
|
||||
$ bun build --compile --define ENABLE_ANALYTICS=false --define ENABLE_DEBUG=true src/app.ts --outfile app-dev
|
||||
```
|
||||
|
||||
### Configuration
|
||||
|
||||
Replace configuration objects at build time:
|
||||
|
||||
```ts
|
||||
declare const CONFIG: {
|
||||
apiUrl: string;
|
||||
timeout: number;
|
||||
retries: number;
|
||||
};
|
||||
|
||||
// CONFIG is replaced with the actual object at build time
|
||||
const response = await fetch(CONFIG.apiUrl, {
|
||||
timeout: CONFIG.timeout,
|
||||
});
|
||||
```
|
||||
|
||||
```sh
|
||||
$ bun build --compile --define 'CONFIG={"apiUrl":"https://api.example.com","timeout":5000,"retries":3}' src/app.ts --outfile app
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Advanced patterns
|
||||
|
||||
### Environment-specific builds
|
||||
|
||||
Create different executables for different environments:
|
||||
|
||||
```json
|
||||
{
|
||||
"scripts": {
|
||||
"build:dev": "bun build --compile --define NODE_ENV='\"development\"' --define API_URL='\"http://localhost:3000\"' src/app.ts --outfile app-dev",
|
||||
"build:staging": "bun build --compile --define NODE_ENV='\"staging\"' --define API_URL='\"https://staging.example.com\"' src/app.ts --outfile app-staging",
|
||||
"build:prod": "bun build --compile --define NODE_ENV='\"production\"' --define API_URL='\"https://api.example.com\"' src/app.ts --outfile app-prod"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Using shell commands for dynamic values
|
||||
|
||||
Generate build-time constants from shell commands:
|
||||
|
||||
```sh
|
||||
# Use git to get current commit and timestamp
|
||||
$ bun build --compile \
|
||||
--define BUILD_VERSION="\"$(git describe --tags --always)\"" \
|
||||
--define BUILD_TIME="\"$(date -u +%Y-%m-%dT%H:%M:%SZ)\"" \
|
||||
--define GIT_COMMIT="\"$(git rev-parse HEAD)\"" \
|
||||
src/cli.ts --outfile mycli
|
||||
```
|
||||
|
||||
### Build automation script
|
||||
|
||||
Create a build script that automatically injects build metadata:
|
||||
|
||||
```ts
|
||||
// build.ts
|
||||
import { $ } from "bun";
|
||||
|
||||
const version = await $`git describe --tags --always`.text();
|
||||
const buildTime = new Date().toISOString();
|
||||
const gitCommit = await $`git rev-parse HEAD`.text();
|
||||
|
||||
await Bun.build({
|
||||
entrypoints: ["./src/cli.ts"],
|
||||
outdir: "./dist",
|
||||
define: {
|
||||
BUILD_VERSION: JSON.stringify(version.trim()),
|
||||
BUILD_TIME: JSON.stringify(buildTime),
|
||||
GIT_COMMIT: JSON.stringify(gitCommit.trim()),
|
||||
},
|
||||
});
|
||||
|
||||
console.log(`Built with version ${version.trim()}`);
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Important considerations
|
||||
|
||||
### Value format
|
||||
|
||||
Values must be valid JSON that will be parsed and inlined as JavaScript expressions:
|
||||
|
||||
```sh
|
||||
# ✅ Strings must be JSON-quoted
|
||||
--define VERSION='"1.0.0"'
|
||||
|
||||
# ✅ Numbers are JSON literals
|
||||
--define PORT=3000
|
||||
|
||||
# ✅ Booleans are JSON literals
|
||||
--define DEBUG=true
|
||||
|
||||
# ✅ Objects and arrays (use single quotes to wrap the JSON)
|
||||
--define 'CONFIG={"host":"localhost","port":3000}'
|
||||
|
||||
# ✅ Arrays work too
|
||||
--define 'FEATURES=["auth","billing","analytics"]'
|
||||
|
||||
# ❌ This won't work - missing quotes around string
|
||||
--define VERSION=1.0.0
|
||||
```
|
||||
|
||||
### Property keys
|
||||
|
||||
You can use property access patterns as keys, not just simple identifiers:
|
||||
|
||||
```sh
|
||||
# ✅ Replace process.env.NODE_ENV with "production"
|
||||
--define 'process.env.NODE_ENV="production"'
|
||||
|
||||
# ✅ Replace process.env.API_KEY with the actual key
|
||||
--define 'process.env.API_KEY="abc123"'
|
||||
|
||||
# ✅ Replace nested properties
|
||||
--define 'window.myApp.version="1.0.0"'
|
||||
|
||||
# ✅ Replace array access
|
||||
--define 'process.argv[2]="--production"'
|
||||
```
|
||||
|
||||
This is particularly useful for environment variables:
|
||||
|
||||
```ts
|
||||
// Before compilation
|
||||
if (process.env.NODE_ENV === "production") {
|
||||
console.log("Production mode");
|
||||
}
|
||||
|
||||
// After compilation with --define 'process.env.NODE_ENV="production"'
|
||||
if ("production" === "production") {
|
||||
console.log("Production mode");
|
||||
}
|
||||
|
||||
// After optimization
|
||||
console.log("Production mode");
|
||||
```
|
||||
|
||||
### TypeScript declarations
|
||||
|
||||
For TypeScript projects, declare your constants to avoid type errors:
|
||||
|
||||
```ts
|
||||
// types/build-constants.d.ts
|
||||
declare const BUILD_VERSION: string;
|
||||
declare const BUILD_TIME: string;
|
||||
declare const NODE_ENV: "development" | "staging" | "production";
|
||||
declare const DEBUG: boolean;
|
||||
```
|
||||
|
||||
### Cross-platform compatibility
|
||||
|
||||
When building for multiple platforms, constants work the same way:
|
||||
|
||||
```sh
|
||||
# Linux
|
||||
$ bun build --compile --target=bun-linux-x64 --define PLATFORM='"linux"' src/app.ts --outfile app-linux
|
||||
|
||||
# macOS
|
||||
$ bun build --compile --target=bun-darwin-x64 --define PLATFORM='"darwin"' src/app.ts --outfile app-macos
|
||||
|
||||
# Windows
|
||||
$ bun build --compile --target=bun-windows-x64 --define PLATFORM='"windows"' src/app.ts --outfile app-windows.exe
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Related
|
||||
|
||||
- [Define constants at runtime](/guides/runtime/define-constant) - Using `--define` with `bun run`
|
||||
- [Building executables](/bundler/executables) - Complete guide to `bun build --compile`
|
||||
- [Bundler API](/bundler) - Full bundler documentation including `define` option
|
||||
@@ -14,7 +14,7 @@ if (typeof Bun !== "undefined") {
|
||||
|
||||
---
|
||||
|
||||
In TypeScript environments, the previous approach will result in a type error unless `@types/bun` is installed. To avoid this, you can check `process.versions` instead.
|
||||
In TypeScript environments, the previous approach will result in a type error unless `bun-types` is globally installed. To avoid this, you can check `process.versions` instead.
|
||||
|
||||
```ts
|
||||
if (process.versions.bun) {
|
||||
|
||||
@@ -52,8 +52,6 @@ In your root-level `package.json`, add a `catalog` or `catalogs` field within th
|
||||
}
|
||||
```
|
||||
|
||||
If you put `catalog` or `catalogs` at the top level of the `package.json` file, that will work too.
|
||||
|
||||
### 2. Reference Catalog Versions in Workspace Packages
|
||||
|
||||
In your workspace packages, use the `catalog:` protocol to reference versions:
|
||||
|
||||
@@ -81,14 +81,6 @@ $ bun install --verbose # debug logging
|
||||
$ bun install --silent # no logging
|
||||
```
|
||||
|
||||
To use isolated installs instead of the default hoisted strategy:
|
||||
|
||||
```bash
|
||||
$ bun install --linker isolated
|
||||
```
|
||||
|
||||
Isolated installs create strict dependency isolation similar to pnpm, preventing phantom dependencies and ensuring more deterministic builds. For complete documentation, see [Isolated installs](https://bun.com/docs/install/isolated).
|
||||
|
||||
{% details summary="Configuring behavior" %}
|
||||
The default behavior of `bun install` can be configured in `bunfig.toml`:
|
||||
|
||||
@@ -118,10 +110,6 @@ dryRun = false
|
||||
|
||||
# equivalent to `--concurrent-scripts` flag
|
||||
concurrentScripts = 16 # (cpu count or GOMAXPROCS) x2
|
||||
|
||||
# installation strategy: "hoisted" or "isolated"
|
||||
# default: "hoisted"
|
||||
linker = "hoisted"
|
||||
```
|
||||
|
||||
{% /details %}
|
||||
|
||||
@@ -1,195 +0,0 @@
|
||||
Bun provides an alternative package installation strategy called **isolated installs** that creates strict dependency isolation similar to pnpm's approach. This mode prevents phantom dependencies and ensures reproducible, deterministic builds.
|
||||
|
||||
## What are isolated installs?
|
||||
|
||||
Isolated installs create a non-hoisted dependency structure where packages can only access their explicitly declared dependencies. This differs from the traditional "hoisted" installation strategy used by npm and Yarn, where dependencies are flattened into a shared `node_modules` directory.
|
||||
|
||||
### Key benefits
|
||||
|
||||
- **Prevents phantom dependencies** — Packages cannot accidentally import dependencies they haven't declared
|
||||
- **Deterministic resolution** — Same dependency tree regardless of what else is installed
|
||||
- **Better for monorepos** — Workspace isolation prevents cross-contamination between packages
|
||||
- **Reproducible builds** — More predictable resolution behavior across environments
|
||||
|
||||
## Using isolated installs
|
||||
|
||||
### Command line
|
||||
|
||||
Use the `--linker` flag to specify the installation strategy:
|
||||
|
||||
```bash
|
||||
# Use isolated installs
|
||||
$ bun install --linker isolated
|
||||
|
||||
# Use traditional hoisted installs
|
||||
$ bun install --linker hoisted
|
||||
```
|
||||
|
||||
### Configuration file
|
||||
|
||||
Set the default linker strategy in your `bunfig.toml`:
|
||||
|
||||
```toml
|
||||
[install]
|
||||
linker = "isolated"
|
||||
```
|
||||
|
||||
### Default behavior
|
||||
|
||||
By default, Bun uses the **hoisted** installation strategy for all projects. To use isolated installs, you must explicitly specify the `--linker isolated` flag or set it in your configuration file.
|
||||
|
||||
## How isolated installs work
|
||||
|
||||
### Directory structure
|
||||
|
||||
Instead of hoisting dependencies, isolated installs create a two-tier structure:
|
||||
|
||||
```
|
||||
node_modules/
|
||||
├── .bun/ # Central package store
|
||||
│ ├── package@1.0.0/ # Versioned package installations
|
||||
│ │ └── node_modules/
|
||||
│ │ └── package/ # Actual package files
|
||||
│ ├── @scope+package@2.1.0/ # Scoped packages (+ replaces /)
|
||||
│ │ └── node_modules/
|
||||
│ │ └── @scope/
|
||||
│ │ └── package/
|
||||
│ └── ...
|
||||
└── package-name -> .bun/package@1.0.0/node_modules/package # Symlinks
|
||||
```
|
||||
|
||||
### Resolution algorithm
|
||||
|
||||
1. **Central store** — All packages are installed in `node_modules/.bun/package@version/` directories
|
||||
2. **Symlinks** — Top-level `node_modules` contains symlinks pointing to the central store
|
||||
3. **Peer resolution** — Complex peer dependencies create specialized directory names
|
||||
4. **Deduplication** — Packages with identical package IDs and peer dependency sets are shared
|
||||
|
||||
### Workspace handling
|
||||
|
||||
In monorepos, workspace dependencies are handled specially:
|
||||
|
||||
- **Workspace packages** — Symlinked directly to their source directories, not the store
|
||||
- **Workspace dependencies** — Can access other workspace packages in the monorepo
|
||||
- **External dependencies** — Installed in the isolated store with proper isolation
|
||||
|
||||
## Comparison with hoisted installs
|
||||
|
||||
| Aspect | Hoisted (npm/Yarn) | Isolated (pnpm-like) |
|
||||
| ------------------------- | ------------------------------------------ | --------------------------------------- |
|
||||
| **Dependency access** | Packages can access any hoisted dependency | Packages only see declared dependencies |
|
||||
| **Phantom dependencies** | ❌ Possible | ✅ Prevented |
|
||||
| **Disk usage** | ✅ Lower (shared installs) | ✅ Similar (uses symlinks) |
|
||||
| **Determinism** | ❌ Less deterministic | ✅ More deterministic |
|
||||
| **Node.js compatibility** | ✅ Standard behavior | ✅ Compatible via symlinks |
|
||||
| **Best for** | Single projects, legacy code | Monorepos, strict dependency management |
|
||||
|
||||
## Advanced features
|
||||
|
||||
### Peer dependency handling
|
||||
|
||||
Isolated installs handle peer dependencies through sophisticated resolution:
|
||||
|
||||
```bash
|
||||
# Package with peer dependencies creates specialized paths
|
||||
node_modules/.bun/package@1.0.0_react@18.2.0/
|
||||
```
|
||||
|
||||
The directory name encodes both the package version and its peer dependency versions, ensuring each unique combination gets its own installation.
|
||||
|
||||
### Backend strategies
|
||||
|
||||
Bun uses different file operation strategies for performance:
|
||||
|
||||
- **Clonefile** (macOS) — Copy-on-write filesystem clones for maximum efficiency
|
||||
- **Hardlink** (Linux/Windows) — Hardlinks to save disk space
|
||||
- **Copyfile** (fallback) — Full file copies when other methods aren't available
|
||||
|
||||
### Debugging isolated installs
|
||||
|
||||
Enable verbose logging to understand the installation process:
|
||||
|
||||
```bash
|
||||
$ bun install --linker isolated --verbose
|
||||
```
|
||||
|
||||
This shows:
|
||||
|
||||
- Store entry creation
|
||||
- Symlink operations
|
||||
- Peer dependency resolution
|
||||
- Deduplication decisions
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Compatibility issues
|
||||
|
||||
Some packages may not work correctly with isolated installs due to:
|
||||
|
||||
- **Hardcoded paths** — Packages that assume a flat `node_modules` structure
|
||||
- **Dynamic imports** — Runtime imports that don't follow Node.js resolution
|
||||
- **Build tools** — Tools that scan `node_modules` directly
|
||||
|
||||
If you encounter issues, you can:
|
||||
|
||||
1. **Switch to hoisted mode** for specific projects:
|
||||
|
||||
```bash
|
||||
$ bun install --linker hoisted
|
||||
```
|
||||
|
||||
2. **Report compatibility issues** to help improve isolated install support
|
||||
|
||||
### Performance considerations
|
||||
|
||||
- **Install time** — May be slightly slower due to symlink operations
|
||||
- **Disk usage** — Similar to hoisted (uses symlinks, not file copies)
|
||||
- **Memory usage** — Higher during install due to complex peer resolution
|
||||
|
||||
## Migration guide
|
||||
|
||||
### From npm/Yarn
|
||||
|
||||
```bash
|
||||
# Remove existing node_modules and lockfiles
|
||||
$ rm -rf node_modules package-lock.json yarn.lock
|
||||
|
||||
# Install with isolated linker
|
||||
$ bun install --linker isolated
|
||||
```
|
||||
|
||||
### From pnpm
|
||||
|
||||
Isolated installs are conceptually similar to pnpm, so migration should be straightforward:
|
||||
|
||||
```bash
|
||||
# Remove pnpm files
|
||||
$ rm -rf node_modules pnpm-lock.yaml
|
||||
|
||||
# Install with Bun's isolated linker
|
||||
$ bun install --linker isolated
|
||||
```
|
||||
|
||||
The main difference is that Bun uses symlinks in `node_modules` while pnpm uses a global store with symlinks.
|
||||
|
||||
## When to use isolated installs
|
||||
|
||||
**Use isolated installs when:**
|
||||
|
||||
- Working in monorepos with multiple packages
|
||||
- Strict dependency management is required
|
||||
- Preventing phantom dependencies is important
|
||||
- Building libraries that need deterministic dependencies
|
||||
|
||||
**Use hoisted installs when:**
|
||||
|
||||
- Working with legacy code that assumes flat `node_modules`
|
||||
- Compatibility with existing build tools is required
|
||||
- Working in environments where symlinks aren't well supported
|
||||
- You prefer the simpler traditional npm behavior
|
||||
|
||||
## Related documentation
|
||||
|
||||
- [Package manager > Workspaces](https://bun.com/docs/install/workspaces) — Monorepo workspace management
|
||||
- [Package manager > Lockfile](https://bun.com/docs/install/lockfile) — Understanding Bun's lockfile format
|
||||
- [CLI > install](https://bun.com/docs/cli/install) — Complete `bun install` command reference
|
||||
@@ -176,16 +176,10 @@ export default {
|
||||
page("cli/pm", "`bun pm`", {
|
||||
description: "Utilities relating to package management with Bun.",
|
||||
}),
|
||||
page("cli/why", "`bun why`", {
|
||||
description: "Explains why a package is installed in your project.",
|
||||
}),
|
||||
page("install/cache", "Global cache", {
|
||||
description:
|
||||
"Bun's package manager installs all packages into a shared global cache to avoid redundant re-downloads.",
|
||||
}),
|
||||
page("install/isolated", "Isolated installs", {
|
||||
description: "Create strict dependency isolation, preventing phantom dependencies.",
|
||||
}),
|
||||
page("install/workspaces", "Workspaces", {
|
||||
description: "Bun's package manager supports workspaces and monorepo development workflows.",
|
||||
}),
|
||||
|
||||
@@ -20,7 +20,7 @@ this one:
|
||||
Given a file implementing a simple function, such as `add`
|
||||
|
||||
```zig#src/bun.js/math.zig
|
||||
pub fn add(global: *jsc.JSGlobalObject, a: i32, b: i32) !i32 {
|
||||
pub fn add(global: *JSC.JSGlobalObject, a: i32, b: i32) !i32 {
|
||||
return std.math.add(i32, a, b) catch {
|
||||
// Binding functions can return `error.OutOfMemory` and `error.JSError`.
|
||||
// Others like `error.Overflow` from `std.math.add` must be converted.
|
||||
@@ -33,7 +33,7 @@ const gen = bun.gen.math; // "math" being this file's basename
|
||||
|
||||
const std = @import("std");
|
||||
const bun = @import("bun");
|
||||
const jsc = bun.jsc;
|
||||
const JSC = bun.JSC;
|
||||
```
|
||||
|
||||
Then describe the API schema using a `.bind.ts` function. The binding file goes next to the Zig file.
|
||||
|
||||
@@ -108,21 +108,6 @@ The `telemetry` field permit to enable/disable the analytics records. Bun record
|
||||
telemetry = false
|
||||
```
|
||||
|
||||
### `console`
|
||||
|
||||
Configure console output behavior.
|
||||
|
||||
#### `console.depth`
|
||||
|
||||
Set the default depth for `console.log()` object inspection. Default `2`.
|
||||
|
||||
```toml
|
||||
[console]
|
||||
depth = 3
|
||||
```
|
||||
|
||||
This controls how deeply nested objects are displayed in console output. Higher values show more nested properties but may produce verbose output for complex objects. This setting can be overridden by the `--console-depth` CLI flag.
|
||||
|
||||
## Test runner
|
||||
|
||||
The test runner is configured under the `[test]` section of your bunfig.toml.
|
||||
@@ -195,24 +180,6 @@ Whether to skip test files when computing coverage statistics. Default `false`.
|
||||
coverageSkipTestFiles = false
|
||||
```
|
||||
|
||||
### `test.coveragePathIgnorePatterns`
|
||||
|
||||
Exclude specific files or file patterns from coverage reports using glob patterns. Can be a single string pattern or an array of patterns.
|
||||
|
||||
```toml
|
||||
[test]
|
||||
# Single pattern
|
||||
coveragePathIgnorePatterns = "**/*.spec.ts"
|
||||
|
||||
# Multiple patterns
|
||||
coveragePathIgnorePatterns = [
|
||||
"**/*.spec.ts",
|
||||
"**/*.test.ts",
|
||||
"src/utils/**",
|
||||
"*.config.js"
|
||||
]
|
||||
```
|
||||
|
||||
### `test.coverageReporter`
|
||||
|
||||
By default, coverage reports will be printed to the console. For persistent code coverage reports in CI environments and for other tools use `lcov`.
|
||||
|
||||
@@ -71,26 +71,6 @@ coverageThreshold = { lines = 0.9, functions = 0.8, statements = 0.85 }
|
||||
|
||||
Setting any of these enables `fail_on_low_coverage`, causing the test run to fail if coverage is below the threshold.
|
||||
|
||||
#### coveragePathIgnorePatterns
|
||||
|
||||
Exclude specific files or file patterns from coverage reports using glob patterns:
|
||||
|
||||
```toml
|
||||
[test]
|
||||
# Single pattern
|
||||
coveragePathIgnorePatterns = "**/*.spec.ts"
|
||||
|
||||
# Multiple patterns
|
||||
coveragePathIgnorePatterns = [
|
||||
"**/*.spec.ts",
|
||||
"**/*.test.ts",
|
||||
"src/utils/**",
|
||||
"*.config.js"
|
||||
]
|
||||
```
|
||||
|
||||
Files matching any of these patterns will be excluded from coverage calculation and reporting. See the [coverage documentation](./coverage.md) for more details and examples.
|
||||
|
||||
#### coverageIgnoreSourcemaps
|
||||
|
||||
Internally, Bun transpiles every file. That means code coverage must also go through sourcemaps before they can be reported. We expose this as a flag to allow you to opt out of this behavior, but it will be confusing because during the transpilation process, Bun may move code around and change variable names. This option is mostly useful for debugging coverage issues.
|
||||
|
||||
@@ -57,18 +57,7 @@ coverageThreshold = { lines = 0.9, functions = 0.9, statements = 0.9 }
|
||||
|
||||
Setting any of these thresholds enables `fail_on_low_coverage`, causing the test run to fail if coverage is below the threshold.
|
||||
|
||||
### Sourcemaps
|
||||
|
||||
Internally, Bun transpiles all files by default, so Bun automatically generates an internal [source map](https://web.dev/source-maps/) that maps lines of your original source code onto Bun's internal representation. If for any reason you want to disable this, set `test.coverageIgnoreSourcemaps` to `true`; this will rarely be desirable outside of advanced use cases.
|
||||
|
||||
```toml
|
||||
[test]
|
||||
coverageIgnoreSourcemaps = true # default false
|
||||
```
|
||||
|
||||
### Exclude files from coverage
|
||||
|
||||
#### Skip test files
|
||||
### Exclude test files from coverage
|
||||
|
||||
By default, test files themselves are included in coverage reports. You can exclude them with:
|
||||
|
||||
@@ -79,33 +68,15 @@ coverageSkipTestFiles = true # default false
|
||||
|
||||
This will exclude files matching test patterns (e.g., _.test.ts, _\_spec.js) from the coverage report.
|
||||
|
||||
#### Ignore specific paths and patterns
|
||||
### Sourcemaps
|
||||
|
||||
You can exclude specific files or file patterns from coverage reports using `coveragePathIgnorePatterns`:
|
||||
Internally, Bun transpiles all files by default, so Bun automatically generates an internal [source map](https://web.dev/source-maps/) that maps lines of your original source code onto Bun's internal representation. If for any reason you want to disable this, set `test.coverageIgnoreSourcemaps` to `true`; this will rarely be desirable outside of advanced use cases.
|
||||
|
||||
```toml
|
||||
[test]
|
||||
# Single pattern
|
||||
coveragePathIgnorePatterns = "**/*.spec.ts"
|
||||
|
||||
# Multiple patterns
|
||||
coveragePathIgnorePatterns = [
|
||||
"**/*.spec.ts",
|
||||
"**/*.test.ts",
|
||||
"src/utils/**",
|
||||
"*.config.js"
|
||||
]
|
||||
coverageIgnoreSourcemaps = true # default false
|
||||
```
|
||||
|
||||
This option accepts glob patterns and works similarly to Jest's `collectCoverageFrom` ignore patterns. Files matching any of these patterns will be excluded from coverage calculation and reporting in both text and LCOV outputs.
|
||||
|
||||
Common use cases:
|
||||
|
||||
- Exclude utility files: `"src/utils/**"`
|
||||
- Exclude configuration files: `"*.config.js"`
|
||||
- Exclude specific test patterns: `"**/*.spec.ts"`
|
||||
- Exclude build artifacts: `"dist/**"`
|
||||
|
||||
### Coverage defaults
|
||||
|
||||
By default, coverage reports:
|
||||
@@ -113,7 +84,6 @@ By default, coverage reports:
|
||||
1. Exclude `node_modules` directories
|
||||
2. Exclude files loaded via non-JS/TS loaders (e.g., .css, .txt) unless a custom JS loader is specified
|
||||
3. Include test files themselves (can be disabled with `coverageSkipTestFiles = true` as shown above)
|
||||
4. Can exclude additional files with `coveragePathIgnorePatterns` as shown above
|
||||
|
||||
### Coverage reporters
|
||||
|
||||
|
||||
@@ -426,54 +426,6 @@ test("exactly two assertions", () => {
|
||||
|
||||
This helps ensure all your assertions run, especially in complex async code with multiple code paths.
|
||||
|
||||
## Type Testing
|
||||
|
||||
Bun includes `expectTypeOf` for testing typescript types, compatible with Vitest.
|
||||
|
||||
### expectTypeOf
|
||||
|
||||
{% callout %}
|
||||
|
||||
**Note** — These functions are no-ops at runtime - you need to run TypeScript separately to verify the type checks.
|
||||
|
||||
{% endcallout %}
|
||||
|
||||
The `expectTypeOf` function provides type-level assertions that are checked by TypeScript's type checker. **Important**:
|
||||
|
||||
To test your types:
|
||||
|
||||
1. Write your type assertions using `expectTypeOf`
|
||||
2. Run `bunx tsc --noEmit` to check that your types are correct
|
||||
|
||||
```ts
|
||||
import { expectTypeOf } from "bun:test";
|
||||
|
||||
// Basic type assertions
|
||||
expectTypeOf<string>().toEqualTypeOf<string>();
|
||||
expectTypeOf(123).toBeNumber();
|
||||
expectTypeOf("hello").toBeString();
|
||||
|
||||
// Object type matching
|
||||
expectTypeOf({ a: 1, b: "hello" }).toMatchObjectType<{ a: number }>();
|
||||
|
||||
// Function types
|
||||
function greet(name: string): string {
|
||||
return `Hello ${name}`;
|
||||
}
|
||||
|
||||
expectTypeOf(greet).toBeFunction();
|
||||
expectTypeOf(greet).parameters.toEqualTypeOf<[string]>();
|
||||
expectTypeOf(greet).returns.toEqualTypeOf<string>();
|
||||
|
||||
// Array types
|
||||
expectTypeOf([1, 2, 3]).items.toBeNumber();
|
||||
|
||||
// Promise types
|
||||
expectTypeOf(Promise.resolve(42)).resolves.toBeNumber();
|
||||
```
|
||||
|
||||
For full documentation on expectTypeOf matchers, see the [API Reference](/reference/bun/test/expectTypeOf)
|
||||
|
||||
## Matchers
|
||||
|
||||
Bun implements the following matchers. Full Jest compatibility is on the roadmap; track progress [here](https://github.com/oven-sh/bun/issues/1825).
|
||||
@@ -677,17 +629,17 @@ Bun implements the following matchers. Full Jest compatibility is on the roadmap
|
||||
|
||||
---
|
||||
|
||||
- ✅
|
||||
- ❌
|
||||
- [`.toHaveReturnedWith()`](https://jestjs.io/docs/expect#tohavereturnedwithvalue)
|
||||
|
||||
---
|
||||
|
||||
- ✅
|
||||
- ❌
|
||||
- [`.toHaveLastReturnedWith()`](https://jestjs.io/docs/expect#tohavelastreturnedwithvalue)
|
||||
|
||||
---
|
||||
|
||||
- ✅
|
||||
- ❌
|
||||
- [`.toHaveNthReturnedWith()`](https://jestjs.io/docs/expect#tohaventhreturnedwithnthcall-value)
|
||||
|
||||
---
|
||||
|
||||
1
examples/.gitignore
vendored
Normal file
1
examples/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
bun-examples-all
|
||||
0
examples/.is-examples-folder
Normal file
0
examples/.is-examples-folder
Normal file
7
examples/add.rs
Normal file
7
examples/add.rs
Normal file
@@ -0,0 +1,7 @@
|
||||
#[no_mangle]
|
||||
pub extern "C" fn add(a: i32, b: i32) -> i32 {
|
||||
a + b
|
||||
}
|
||||
|
||||
// to compile:
|
||||
// rustc --crate-type cdylib add.rs
|
||||
12
examples/add.ts
Normal file
12
examples/add.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { dlopen, suffix } from "bun:ffi";
|
||||
|
||||
const {
|
||||
symbols: { add },
|
||||
} = dlopen(`./libadd.${suffix}`, {
|
||||
add: {
|
||||
args: ["i32", "i32"],
|
||||
returns: "i32",
|
||||
},
|
||||
});
|
||||
|
||||
console.log(add(1, 2));
|
||||
6
examples/add.zig
Normal file
6
examples/add.zig
Normal file
@@ -0,0 +1,6 @@
|
||||
pub export fn add(a: i32, b: i32) i32 {
|
||||
return a + b;
|
||||
}
|
||||
|
||||
// to compile:
|
||||
// zig build-lib -OReleaseFast ./add.zig -dynamic --name add
|
||||
89
examples/bun-hot-websockets.js
Normal file
89
examples/bun-hot-websockets.js
Normal file
@@ -0,0 +1,89 @@
|
||||
// To run this example:
|
||||
//
|
||||
// bun --hot bun-hot-websockets.js
|
||||
//
|
||||
|
||||
const css = ([inner]) => {
|
||||
return inner;
|
||||
};
|
||||
|
||||
const styles = css`
|
||||
#bun {
|
||||
margin: 0 auto;
|
||||
margin-top: 200px;
|
||||
object-fit: cover;
|
||||
}
|
||||
html,
|
||||
body {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
body {
|
||||
background: #f1239f;
|
||||
font-family: "Inter", sans-serif;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
align-content: center;
|
||||
color: white;
|
||||
}
|
||||
h1 {
|
||||
padding: 0;
|
||||
text-align: center;
|
||||
font-size: 3rem;
|
||||
-webkit-text-stroke: 2px black;
|
||||
}
|
||||
* {
|
||||
box-sizing: border-box;
|
||||
}
|
||||
`;
|
||||
|
||||
Bun.serve({
|
||||
websocket: {
|
||||
message(ws, msg) {
|
||||
ws.send(styles);
|
||||
},
|
||||
},
|
||||
fetch(req, server) {
|
||||
if (req.url.endsWith("/hot")) {
|
||||
if (server.upgrade(req))
|
||||
return new Response("", {
|
||||
status: 101,
|
||||
});
|
||||
}
|
||||
|
||||
return new Response(
|
||||
`
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>WebSockets</title>
|
||||
</head>
|
||||
<body>
|
||||
<style></style>
|
||||
<script>
|
||||
const ws = new WebSocket("ws://localhost:3000/hot");
|
||||
const style = document.querySelector("style");
|
||||
ws.onmessage = (e) => {
|
||||
style.innerHTML = e.data;
|
||||
};
|
||||
setInterval(() => {
|
||||
ws.send("ping");
|
||||
}, 8);
|
||||
</script>
|
||||
<div id="app">
|
||||
<img src="https://bun.com/logo.svg" alt="Bun" id='bun' />
|
||||
<h1>bun --hot websockets</h1>
|
||||
</div>
|
||||
</body>
|
||||
|
||||
`,
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "text/html; charset=utf-8",
|
||||
},
|
||||
},
|
||||
);
|
||||
},
|
||||
});
|
||||
9
examples/cat.ts
Normal file
9
examples/cat.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import { resolve } from "path";
|
||||
const { write, stdout, file } = Bun;
|
||||
import { argv } from "process";
|
||||
|
||||
const path = resolve(argv.at(-1)!);
|
||||
await write(stdout, file(path));
|
||||
|
||||
Bun.stdout;
|
||||
process.stdout;
|
||||
11
examples/change-array-by-copy.js
Normal file
11
examples/change-array-by-copy.js
Normal file
@@ -0,0 +1,11 @@
|
||||
const sequence = [1, 2, 3];
|
||||
sequence.toReversed(); // => [3, 2, 1]
|
||||
sequence; // => [1, 2, 3]
|
||||
|
||||
const outOfOrder = new Uint8Array([3, 1, 2]);
|
||||
outOfOrder.toSorted(); // => Uint8Array [1, 2, 3]
|
||||
outOfOrder; // => Uint8Array [3, 1, 2]
|
||||
|
||||
const correctionNeeded = [1, 1, 3];
|
||||
correctionNeeded.with(1, 2); // => [1, 2, 3]
|
||||
correctionNeeded; // => [1, 1, 3]
|
||||
23
examples/hashing.js
Normal file
23
examples/hashing.js
Normal file
@@ -0,0 +1,23 @@
|
||||
// Accepts a string, TypedArray, or Blob (file blob support is not implemented but planned)
|
||||
const input = "hello world".repeat(400);
|
||||
|
||||
// Bun.hash() defaults to Wyhash because it's fast
|
||||
console.log(Bun.hash(input));
|
||||
|
||||
console.log(Bun.hash.wyhash(input));
|
||||
// and returns a bigint
|
||||
// all of these hashing functions return number if 32-bit or bigint if 64-bit, not typed arrays.
|
||||
console.log(Bun.hash.adler32(input)); // number
|
||||
console.log(Bun.hash.crc32(input)); // number
|
||||
console.log(Bun.hash.cityHash32(input)); // number
|
||||
console.log(Bun.hash.cityHash64(input)); // bigint
|
||||
console.log(Bun.hash.xxHash32(input)); // number
|
||||
console.log(Bun.hash.xxHash64(input)); // bigint
|
||||
console.log(Bun.hash.xxHash3(input)); // bigint
|
||||
console.log(Bun.hash.murmur32v3(input)); // number
|
||||
console.log(Bun.hash.murmur32v2(input)); // number
|
||||
console.log(Bun.hash.murmur64v2(input)); // bigint
|
||||
console.log(Bun.hash.rapidhash(input)); // bigint
|
||||
|
||||
// Second argument accepts a seed where relevant
|
||||
console.log(Bun.hash(input, 12345));
|
||||
37
examples/html-rewriter.ts
Normal file
37
examples/html-rewriter.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
// Start a fast HTTP server from a function
|
||||
|
||||
Bun.serve({
|
||||
async fetch(req) {
|
||||
const { pathname } = new URL(req.url);
|
||||
if (!(pathname.startsWith("/https://") || pathname.startsWith("/http://"))) {
|
||||
return new Response("Enter a path that starts with https:// or http://\n", {
|
||||
status: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const response = await fetch(req.url.substring("http://localhost:3000/".length), req.clone());
|
||||
|
||||
return new HTMLRewriter()
|
||||
.on("a[href]", {
|
||||
element(element) {
|
||||
element.setAttribute("href", "https://www.youtube.com/watch?v=dQw4w9WgXcQ");
|
||||
},
|
||||
})
|
||||
.transform(response);
|
||||
},
|
||||
|
||||
// this is called when fetch() throws or rejects
|
||||
// error(err: Error) {
|
||||
// },
|
||||
|
||||
// this boolean enables the bun's default error handler
|
||||
// sometime after the initial release, it will auto reload as well
|
||||
development: process.env.NODE_ENV !== "production",
|
||||
// note: this isn't node, but for compatibility bun supports process.env + more stuff in process
|
||||
|
||||
// SSL is enabled if these two are set
|
||||
// certFile: './cert.pem',
|
||||
// keyFile: './key.pem',
|
||||
|
||||
port: 3000, // number or string
|
||||
});
|
||||
76
examples/http-file-extended.ts
Normal file
76
examples/http-file-extended.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import { file, serve } from "bun";
|
||||
import { existsSync, statSync } from "fs";
|
||||
|
||||
serve({
|
||||
fetch(req: Request) {
|
||||
let pathname = new URL(req.url).pathname.substring(1);
|
||||
if (pathname == "") {
|
||||
pathname = import.meta.url.replace("file://", "");
|
||||
}
|
||||
|
||||
if (!existsSync(pathname)) {
|
||||
return new Response(null, { status: 404 });
|
||||
}
|
||||
|
||||
const stats = statSync(pathname);
|
||||
|
||||
// https://github.com/gornostay25/svelte-adapter-bun/blob/master/src/sirv.js
|
||||
const headers = new Headers({
|
||||
"Content-Length": "" + stats.size,
|
||||
"Last-Modified": stats.mtime.toUTCString(),
|
||||
ETag: `W/"${stats.size}-${stats.mtime.getTime()}"`,
|
||||
});
|
||||
|
||||
if (req.headers.get("if-none-match") === headers.get("ETag")) {
|
||||
return new Response(null, { status: 304 });
|
||||
}
|
||||
|
||||
const opts = { code: 200, start: 0, end: Infinity, range: false };
|
||||
|
||||
if (req.headers.has("range")) {
|
||||
opts.code = 206;
|
||||
let [x, y] = req.headers.get("range")!.replace("bytes=", "").split("-");
|
||||
let end = (opts.end = parseInt(y, 10) || stats.size - 1);
|
||||
let start = (opts.start = parseInt(x, 10) || 0);
|
||||
|
||||
if (start >= stats.size || end >= stats.size) {
|
||||
headers.set("Content-Range", `bytes */${stats.size}`);
|
||||
return new Response(null, {
|
||||
headers: headers,
|
||||
status: 416,
|
||||
});
|
||||
}
|
||||
|
||||
headers.set("Content-Range", `bytes ${start}-${end}/${stats.size}`);
|
||||
headers.set("Content-Length", "" + (end - start + 1));
|
||||
headers.set("Accept-Ranges", "bytes");
|
||||
opts.range = true;
|
||||
}
|
||||
|
||||
if (opts.range) {
|
||||
return new Response(file(pathname).slice(opts.start, opts.end), {
|
||||
headers,
|
||||
status: opts.code,
|
||||
});
|
||||
}
|
||||
|
||||
return new Response(file(pathname), { headers, status: opts.code });
|
||||
},
|
||||
|
||||
// this is called when fetch() throws or rejects
|
||||
// error(err: Error) {
|
||||
// return new Response("uh oh! :(" + String(err.toString()), { status: 500 });
|
||||
// },
|
||||
|
||||
// this boolean enables the bun's default error handler
|
||||
// sometime after the initial release, it will auto reload as well
|
||||
development: process.env.NODE_ENV !== "production",
|
||||
// note: this isn't node, but for compatibility bun supports process.env + more stuff in process
|
||||
|
||||
// SSL is enabled if these two are set
|
||||
// certFile: './cert.pem',
|
||||
// keyFile: './key.pem',
|
||||
|
||||
port: 3000, // number or string
|
||||
hostname: "localhost", // defaults to 0.0.0.0
|
||||
});
|
||||
31
examples/http-file.ts
Normal file
31
examples/http-file.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
import { file, serve } from "bun";
|
||||
|
||||
serve({
|
||||
fetch(req: Request) {
|
||||
const pathname = new URL(req.url).pathname.substring(1);
|
||||
|
||||
// If the URL is empty, display this file.
|
||||
if (pathname === "") {
|
||||
return new Response(file(import.meta.url.replace("file://", "")));
|
||||
}
|
||||
|
||||
return new Response(file(pathname));
|
||||
},
|
||||
|
||||
// this is called when fetch() throws or rejects
|
||||
// error(err: Error) {
|
||||
// return new Response("uh oh! :(" + String(err.toString()), { status: 500 });
|
||||
// },
|
||||
|
||||
// this boolean enables the bun's default error handler
|
||||
// sometime after the initial release, it will auto reload as well
|
||||
development: process.env.NODE_ENV !== "production",
|
||||
// note: this isn't node, but for compatibility bun supports process.env + more stuff in process
|
||||
|
||||
// SSL is enabled if these two are set
|
||||
// certFile: './cert.pem',
|
||||
// keyFile: './key.pem',
|
||||
|
||||
port: 3000, // number or string
|
||||
hostname: "localhost", // defaults to 0.0.0.0
|
||||
});
|
||||
17
examples/http-request-body.ts
Normal file
17
examples/http-request-body.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { serve } from "bun";
|
||||
|
||||
serve({
|
||||
async fetch(req) {
|
||||
// body is a ReadableStream
|
||||
const body = req.body;
|
||||
|
||||
const writer = Bun.file(`upload.${Date.now()}.txt`).writer();
|
||||
for await (const chunk of body!) {
|
||||
writer.write(chunk);
|
||||
}
|
||||
const wrote = await writer.end();
|
||||
|
||||
// @ts-ignore
|
||||
return Response.json({ wrote, type: req.headers.get("Content-Type") });
|
||||
},
|
||||
});
|
||||
12
examples/http-stop.ts
Normal file
12
examples/http-stop.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { serve } from "bun";
|
||||
|
||||
const server = serve({
|
||||
fetch(req) {
|
||||
return new Response(`Pending requests count: ${this.pendingRequests}`);
|
||||
},
|
||||
});
|
||||
|
||||
// Stop the server after 5 seconds
|
||||
setTimeout(() => {
|
||||
server.stop();
|
||||
}, 5000);
|
||||
34
examples/http.ts
Normal file
34
examples/http.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
// Start a fast HTTP server from a function
|
||||
Bun.serve({
|
||||
fetch(req: Request) {
|
||||
return new Response(`Echo: ${req.url}`);
|
||||
},
|
||||
|
||||
// baseURI: "http://localhost:3000",
|
||||
|
||||
// this is called when fetch() throws or rejects
|
||||
// error(err: Error) {
|
||||
// return new Response("uh oh! :(\n" + err.toString(), { status: 500 });
|
||||
// },
|
||||
|
||||
// this boolean enables bun's default error handler
|
||||
development: process.env.NODE_ENV !== "production",
|
||||
// note: this isn't node, but for compatibility bun supports process.env + more stuff in process
|
||||
|
||||
// SSL is enabled if these two are set
|
||||
// certFile: './cert.pem',
|
||||
// keyFile: './key.pem',
|
||||
|
||||
port: 3000, // number or string
|
||||
});
|
||||
// Start a fast HTTP server from the main file's export
|
||||
// export default {
|
||||
// fetch(req) {
|
||||
// return new Response(
|
||||
// `This is another way to start a server!
|
||||
// if the main file export default's an object
|
||||
// with 'fetch'. Bun automatically calls Bun.serve`
|
||||
// );
|
||||
// },
|
||||
// // so autocomplete & type checking works
|
||||
// } as Bun.Serve;
|
||||
193
examples/lambda.ts
Normal file
193
examples/lambda.ts
Normal file
@@ -0,0 +1,193 @@
|
||||
const { AWS_LAMBDA_RUNTIME_API, LAMBDA_TASK_ROOT, _HANDLER } = process.env;
|
||||
|
||||
if (!AWS_LAMBDA_RUNTIME_API || AWS_LAMBDA_RUNTIME_API === "") {
|
||||
throw new Error("AWS_LAMBDA_RUNTIME_API is not set");
|
||||
}
|
||||
|
||||
const nextURL = `http://${AWS_LAMBDA_RUNTIME_API}/2018-06-01/runtime/invocation/next`;
|
||||
const sourceDir = LAMBDA_TASK_ROOT;
|
||||
if (!sourceDir) {
|
||||
throw new Error("handler is not set");
|
||||
}
|
||||
if (!_HANDLER) {
|
||||
throw new Error("handler is not set");
|
||||
}
|
||||
|
||||
// don't care if this fails
|
||||
if (process.cwd() !== sourceDir) {
|
||||
try {
|
||||
process.chdir(sourceDir);
|
||||
} catch (e) {}
|
||||
}
|
||||
|
||||
var handlerDot = _HANDLER.lastIndexOf(".");
|
||||
var sourcefile = handlerDot > 0 ? _HANDLER.substring(0, handlerDot) : _HANDLER;
|
||||
if (sourcefile.length === 0) {
|
||||
throw new Error("handler is not set");
|
||||
}
|
||||
if (!sourcefile.startsWith("/")) {
|
||||
sourcefile = `./${sourcefile}`;
|
||||
}
|
||||
function noop() {}
|
||||
const method = (handlerDot > 0 ? _HANDLER.substring(handlerDot) : "") || "GET";
|
||||
|
||||
if (typeof process.env.VERBOSE !== "undefined") {
|
||||
console.time(`Loaded ${sourcefile}`);
|
||||
}
|
||||
var Handler;
|
||||
|
||||
try {
|
||||
Handler = await import(sourcefile);
|
||||
} catch (e: any) {
|
||||
console.error("Error loading sourcefile:", e);
|
||||
try {
|
||||
await fetch(new URL(`http://${AWS_LAMBDA_RUNTIME_API}/2018-06-01/runtime/init/error`).href, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
errorMessage: e.message,
|
||||
errorType: e.name,
|
||||
stackTrace: e?.stack?.split("\n") ?? [],
|
||||
}),
|
||||
});
|
||||
} catch (e2) {
|
||||
console.error("Error sending error to runtime:", e2);
|
||||
}
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (typeof process.env.VERBOSE !== "undefined") {
|
||||
console.timeEnd(`Loaded ${sourcefile}`);
|
||||
}
|
||||
|
||||
const handlerFunction = Handler.default?.fetch;
|
||||
if (typeof handlerFunction !== "function") {
|
||||
const e = new Error(`${sourcefile} must export default a function called fetch
|
||||
|
||||
Here is an example:
|
||||
|
||||
export default {
|
||||
fetch(req) {
|
||||
return new Response("Hello World");
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
console.error(e);
|
||||
|
||||
try {
|
||||
await fetch(new URL(`http://${AWS_LAMBDA_RUNTIME_API}/2018-06-01/runtime/init/error`).href, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
errorMessage: e.message,
|
||||
errorType: e.name,
|
||||
stackTrace: e?.stack?.split("\n") ?? [],
|
||||
}),
|
||||
});
|
||||
} catch (e2) {
|
||||
console.error("Error sending error to runtime:", e2);
|
||||
}
|
||||
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
var baseURLString = AWS_LAMBDA_RUNTIME_API;
|
||||
if ("baseURI" in Handler.default) {
|
||||
baseURLString = Handler.default.baseURI?.toString();
|
||||
}
|
||||
|
||||
var baseURL;
|
||||
try {
|
||||
baseURL = new URL(baseURLString);
|
||||
} catch (e: any) {
|
||||
console.error("Error parsing baseURI:", e);
|
||||
try {
|
||||
await fetch(new URL(`http://${AWS_LAMBDA_RUNTIME_API}/2018-06-01/runtime/init/error`).href, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
errorMessage: e.message,
|
||||
errorType: e.name,
|
||||
stackTrace: e?.stack?.split("\n") || [],
|
||||
}),
|
||||
});
|
||||
} catch (e2) {
|
||||
console.error("Error sending error to runtime:", e2);
|
||||
}
|
||||
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
async function runHandler(response: Response) {
|
||||
const traceID = response.headers.get("Lambda-Runtime-Trace-Id");
|
||||
const requestID = response.headers.get("Lambda-Runtime-Aws-Request-Id");
|
||||
var request = new Request(baseURL.href, {
|
||||
method,
|
||||
headers: response.headers,
|
||||
body: parseInt(response.headers.get("Content-Length") || "0", 10) > 0 ? await response.blob() : undefined,
|
||||
});
|
||||
// we are done with the Response object here
|
||||
// allow it to be GC'd
|
||||
(response as any) = undefined;
|
||||
|
||||
var result: Response;
|
||||
try {
|
||||
if (typeof process.env.VERBOSE !== "undefined") {
|
||||
console.time(`[${traceID}] Run ${request.url}`);
|
||||
}
|
||||
result = handlerFunction(request, {});
|
||||
if (result && (result as any).then) {
|
||||
await result;
|
||||
}
|
||||
} catch (e1: any) {
|
||||
if (typeof process.env.VERBOSE !== "undefined") {
|
||||
console.error(`[${traceID}] Error running handler:`, e1);
|
||||
}
|
||||
fetch(`http://${AWS_LAMBDA_RUNTIME_API}/2018-06-01/runtime/invocation/${requestID}/error`, {
|
||||
method: "POST",
|
||||
|
||||
body: JSON.stringify({
|
||||
errorMessage: e1.message,
|
||||
errorType: e1.name,
|
||||
stackTrace: e1?.stack?.split("\n") ?? [],
|
||||
}),
|
||||
}).finally(noop);
|
||||
return;
|
||||
} finally {
|
||||
if (typeof process.env.VERBOSE !== "undefined") {
|
||||
console.timeEnd(`[${traceID}] Run ${request.url}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (!result || !("headers" in result)) {
|
||||
await fetch(`http://${AWS_LAMBDA_RUNTIME_API}/2018-06-01/runtime/invocation/${requestID}/error`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
errorMessage: "Expected Response object",
|
||||
errorType: "ExpectedResponseObject",
|
||||
stackTrace: [],
|
||||
}),
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
await fetch(`http://${AWS_LAMBDA_RUNTIME_API}/2018-06-01/runtime/invocation/${requestID}/response`, {
|
||||
method: "POST",
|
||||
headers: result.headers,
|
||||
body: await result.blob(),
|
||||
});
|
||||
(result as any) = undefined;
|
||||
}
|
||||
|
||||
while (true) {
|
||||
fetch(nextURL).then(runHandler, console.error);
|
||||
}
|
||||
|
||||
export {};
|
||||
48
examples/macros/bun.lock
Normal file
48
examples/macros/bun.lock
Normal file
@@ -0,0 +1,48 @@
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "macros",
|
||||
"dependencies": {
|
||||
"moment": "^2.29.1",
|
||||
"papaparse": "^5.3.1",
|
||||
"react": "^17.0.2",
|
||||
"react-dom": "^17.0.2",
|
||||
"react-refresh": "^0.10.0",
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "^17.0.24",
|
||||
"@types/react-dom": "^17.0.9",
|
||||
},
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"@types/prop-types": ["@types/prop-types@15.7.5", "", {}, "sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w=="],
|
||||
|
||||
"@types/react": ["@types/react@17.0.53", "", { "dependencies": { "@types/prop-types": "*", "@types/scheduler": "*", "csstype": "^3.0.2" } }, "sha512-1yIpQR2zdYu1Z/dc1OxC+MA6GR240u3gcnP4l6mvj/PJiVaqHsQPmWttsvHsfnhfPbU2FuGmo0wSITPygjBmsw=="],
|
||||
|
||||
"@types/react-dom": ["@types/react-dom@17.0.19", "", { "dependencies": { "@types/react": "^17" } }, "sha512-PiYG40pnQRdPHnlf7tZnp0aQ6q9tspYr72vD61saO6zFCybLfMqwUCN0va1/P+86DXn18ZWeW30Bk7xlC5eEAQ=="],
|
||||
|
||||
"@types/scheduler": ["@types/scheduler@0.16.2", "", {}, "sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew=="],
|
||||
|
||||
"csstype": ["csstype@3.1.1", "", {}, "sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw=="],
|
||||
|
||||
"js-tokens": ["js-tokens@4.0.0", "", {}, "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="],
|
||||
|
||||
"loose-envify": ["loose-envify@1.4.0", "", { "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" }, "bin": { "loose-envify": "cli.js" } }, "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q=="],
|
||||
|
||||
"moment": ["moment@2.29.4", "", {}, "sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w=="],
|
||||
|
||||
"object-assign": ["object-assign@4.1.1", "", {}, "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg=="],
|
||||
|
||||
"papaparse": ["papaparse@5.3.2", "", {}, "sha512-6dNZu0Ki+gyV0eBsFKJhYr+MdQYAzFUGlBMNj3GNrmHxmz1lfRa24CjFObPXtjcetlOv5Ad299MhIK0znp3afw=="],
|
||||
|
||||
"react": ["react@17.0.2", "", { "dependencies": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1" } }, "sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA=="],
|
||||
|
||||
"react-dom": ["react-dom@17.0.2", "", { "dependencies": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1", "scheduler": "^0.20.2" }, "peerDependencies": { "react": "17.0.2" } }, "sha512-s4h96KtLDUQlsENhMn1ar8t2bEa+q/YAtj8pPPdIjPDGBDIVNsrD9aXNWqspUe6AzKCIG0C1HZZLqLV7qpOBGA=="],
|
||||
|
||||
"react-refresh": ["react-refresh@0.10.0", "", {}, "sha512-PgidR3wST3dDYKr6b4pJoqQFpPGNKDSCDx4cZoshjXipw3LzO7mG1My2pwEzz2JVkF+inx3xRpDeQLFQGH/hsQ=="],
|
||||
|
||||
"scheduler": ["scheduler@0.20.2", "", { "dependencies": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1" } }, "sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ=="],
|
||||
}
|
||||
}
|
||||
30
examples/macros/components/covid19.tsx
Normal file
30
examples/macros/components/covid19.tsx
Normal file
@@ -0,0 +1,30 @@
|
||||
import { fetchCSV } from "macro:fetchCSV";
|
||||
|
||||
export const Covid19 = () => {
|
||||
const rows = fetchCSV("https://covid19.who.int/WHO-COVID-19-global-data.csv", {
|
||||
last: 100,
|
||||
columns: ["New_cases", "Date_reported", "Country"],
|
||||
});
|
||||
|
||||
return (
|
||||
<div>
|
||||
<h2>Covid-19</h2>
|
||||
<h6>last {rows.length} updates from the WHO</h6>
|
||||
<div className="Table">
|
||||
<div className="Header">
|
||||
<div className="Heading">New Cases</div>
|
||||
<div className="Heading">Date</div>
|
||||
<div className="Heading">Country</div>
|
||||
</div>
|
||||
|
||||
{rows.map((row, index) => (
|
||||
<div className="Row" key={index}>
|
||||
<div className="Column">{row[0]}</div>
|
||||
<div className="Column">{row[1]}</div>
|
||||
<div className="Column">{row[2]}</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
15
examples/macros/components/example.jsx
Normal file
15
examples/macros/components/example.jsx
Normal file
@@ -0,0 +1,15 @@
|
||||
// source code
|
||||
import { matchInFile } from "macro:matchInFile";
|
||||
|
||||
export const IPAddresses = () => (
|
||||
<div>
|
||||
<h2>recent ip addresses</h2>
|
||||
<div className="Lines">
|
||||
{matchInFile("access.log", /^(?:[0-9]{1,3}\.){3}[0-9]{1,3}/).map((ipAddress, index) => (
|
||||
<div className="Line" key={index}>
|
||||
{ipAddress}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
15
examples/macros/components/index.tsx
Normal file
15
examples/macros/components/index.tsx
Normal file
@@ -0,0 +1,15 @@
|
||||
import * as ReactDOM from "react-dom";
|
||||
import * as React from "react";
|
||||
import { IPAddresses } from "./example";
|
||||
import { Covid19 } from "./covid19";
|
||||
|
||||
const Start = function () {
|
||||
const root = document.createElement("div");
|
||||
document.body.appendChild(root);
|
||||
|
||||
// comment out to switch between examples
|
||||
// ReactDOM.render(<IPAddresses />, root);
|
||||
ReactDOM.render(<Covid19 />, root);
|
||||
};
|
||||
|
||||
Start();
|
||||
4
examples/macros/example.js
Normal file
4
examples/macros/example.js
Normal file
@@ -0,0 +1,4 @@
|
||||
// source code
|
||||
import { mysteryBox } from "macro:./mystery-box";
|
||||
|
||||
export default "You roll! " + mysteryBox(123);
|
||||
54
examples/macros/fetchCSV.tsx
Normal file
54
examples/macros/fetchCSV.tsx
Normal file
@@ -0,0 +1,54 @@
|
||||
import Pappa from "papaparse";
|
||||
// Example usage:
|
||||
// const rows = fetchCSV(
|
||||
// "https://covid19.who.int/WHO-COVID-19-global-data.csv",
|
||||
// {
|
||||
// last: 100,
|
||||
// columns: ["New_cases", "Date_reported", "Country"],
|
||||
// }
|
||||
// );
|
||||
export async function fetchCSV(callExpression) {
|
||||
console.time("fetchCSV Total");
|
||||
const [
|
||||
urlNode,
|
||||
{
|
||||
properties: { last: limit = 10, columns = [] },
|
||||
},
|
||||
] = callExpression.arguments;
|
||||
const url = urlNode.get();
|
||||
|
||||
console.time("Fetch");
|
||||
const response = await fetch(url);
|
||||
const csvText = await response.text();
|
||||
console.timeEnd("Fetch");
|
||||
|
||||
console.time("Parse");
|
||||
let rows = Pappa.parse(csvText, { fastMode: true }).data;
|
||||
console.timeEnd("Parse");
|
||||
|
||||
console.time("Render");
|
||||
const columnIndices = new Array(columns.length);
|
||||
|
||||
for (let i = 0; i < columns.length; i++) {
|
||||
columnIndices[i] = rows[0].indexOf(columns[i]);
|
||||
}
|
||||
|
||||
rows = rows
|
||||
.slice(Math.max(limit, rows.length) - limit)
|
||||
.reverse()
|
||||
.filter(columns => columns.every(Boolean));
|
||||
const value = (
|
||||
<array>
|
||||
{rows.map(columns => (
|
||||
<array>
|
||||
{columnIndices.map(columnIndex => (
|
||||
<string value={columns[columnIndex]} />
|
||||
))}
|
||||
</array>
|
||||
))}
|
||||
</array>
|
||||
);
|
||||
console.timeEnd("Render");
|
||||
console.timeEnd("fetchCSV Total");
|
||||
return value;
|
||||
}
|
||||
23
examples/macros/matchInFile.tsx
Normal file
23
examples/macros/matchInFile.tsx
Normal file
@@ -0,0 +1,23 @@
|
||||
// macro code
|
||||
export async function matchInFile(callExpression: BunAST.CallExpression) {
|
||||
const [filePathNode, matcherNode] = callExpression.arguments;
|
||||
let filePath: string;
|
||||
filePath = filePathNode.get();
|
||||
|
||||
let matcher: RegExp;
|
||||
matcher = matcherNode.get();
|
||||
const file: string = await Bun.file(Bun.cwd + filePath).text();
|
||||
|
||||
return (
|
||||
<array>
|
||||
{file
|
||||
.split("\n")
|
||||
.map(line => line.match(matcher))
|
||||
.filter(Boolean)
|
||||
.reverse()
|
||||
.map(line => (
|
||||
<string value={line[0]} />
|
||||
))}
|
||||
</array>
|
||||
);
|
||||
}
|
||||
13
examples/macros/mystery-box.ts
Normal file
13
examples/macros/mystery-box.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
export function mysteryBox(callExpression) {
|
||||
console.log(callExpression.log);
|
||||
// get arguments
|
||||
const [countNode] = callExpression.arguments;
|
||||
const countString: string = countNode.get();
|
||||
const count: number = parseInt(countString, 10);
|
||||
|
||||
// validate
|
||||
if (!(count >= 1 && count <= 1000)) return new Error(`Argument ${countString} is expected to be between 1 and 1000`);
|
||||
|
||||
// return a value
|
||||
return (Math.random() * count) | 0;
|
||||
}
|
||||
10
examples/macros/now.tsx
Normal file
10
examples/macros/now.tsx
Normal file
@@ -0,0 +1,10 @@
|
||||
import moment from "moment";
|
||||
export function now(node) {
|
||||
var fmt = "HH:mm:ss";
|
||||
const args = node.arguments;
|
||||
if (args[0] instanceof <string />) {
|
||||
fmt = args[0].get();
|
||||
}
|
||||
const time = moment().format(fmt);
|
||||
return <string value={time}></string>;
|
||||
}
|
||||
17
examples/macros/package.json
Normal file
17
examples/macros/package.json
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"name": "macros",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"moment": "^2.29.1",
|
||||
"papaparse": "^5.3.1",
|
||||
"react": "^17.0.2",
|
||||
"react-dom": "^17.0.2",
|
||||
"react-refresh": "^0.10.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "^17.0.24",
|
||||
"@types/react-dom": "^17.0.9"
|
||||
}
|
||||
}
|
||||
14
examples/macros/public/index.html
Normal file
14
examples/macros/public/index.html
Normal file
@@ -0,0 +1,14 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Macro test</title>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
|
||||
<link rel="stylesheet" href="/styles.css" type="text/css" />
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<script async type="module" src="/components/index.tsx"></script>
|
||||
</body>
|
||||
</html>
|
||||
47
examples/macros/styles.css
Normal file
47
examples/macros/styles.css
Normal file
@@ -0,0 +1,47 @@
|
||||
html {
|
||||
font-size: 4rem;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
background-color: black;
|
||||
|
||||
color: rgb(0, 255, 0);
|
||||
font-family: "Courier";
|
||||
}
|
||||
|
||||
body {
|
||||
margin: 48px auto;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.Line {
|
||||
font-size: 0.5rem;
|
||||
font-family: monospace;
|
||||
}
|
||||
|
||||
.Table {
|
||||
display: grid;
|
||||
width: fit-content;
|
||||
}
|
||||
|
||||
.Row,
|
||||
.Header {
|
||||
display: grid;
|
||||
grid-template-columns: 2fr 1fr 1fr;
|
||||
text-align: right;
|
||||
|
||||
column-gap: 2rem;
|
||||
}
|
||||
|
||||
.Heading {
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
.Header {
|
||||
border-bottom: 1px solid rgb(0, 255, 0);
|
||||
margin-bottom: 20px;
|
||||
padding-bottom: 20px;
|
||||
}
|
||||
|
||||
.Heading:nth-of-type(2) {
|
||||
text-align: left;
|
||||
}
|
||||
7
examples/macros/tsconfig.json
Normal file
7
examples/macros/tsconfig.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"baseUrl": ".",
|
||||
"paths": {},
|
||||
"jsx": "preserve"
|
||||
}
|
||||
}
|
||||
11
examples/mmap/1.js
Normal file
11
examples/mmap/1.js
Normal file
@@ -0,0 +1,11 @@
|
||||
const map = Bun.mmap("./mmap.txt", { shared: true });
|
||||
const utf8decoder = new TextDecoder("utf-8");
|
||||
|
||||
let old = new TextEncoder().encode("12345");
|
||||
|
||||
setInterval(() => {
|
||||
old = old.sort((a, b) => (Math.random() > 0.5 ? -1 : 1));
|
||||
console.log(`changing mmap to ~> ${utf8decoder.decode(old)}`);
|
||||
|
||||
map.set(old);
|
||||
}, 4);
|
||||
22
examples/mmap/2.js
Normal file
22
examples/mmap/2.js
Normal file
@@ -0,0 +1,22 @@
|
||||
const map = Bun.mmap("./mmap.txt");
|
||||
|
||||
function buffer_hash(buffer) {
|
||||
let hash = 0;
|
||||
for (let i = 0; i < buffer.length; i++) {
|
||||
hash = (hash << 5) - hash + buffer[i];
|
||||
hash |= 0; // Convert to 32bit integer
|
||||
}
|
||||
return hash;
|
||||
}
|
||||
|
||||
const decoder = new TextDecoder();
|
||||
|
||||
let hash = buffer_hash(map);
|
||||
console.log(decoder.decode(map));
|
||||
|
||||
while (true) {
|
||||
if (buffer_hash(map) !== hash) {
|
||||
hash = buffer_hash(map);
|
||||
console.log(`mmap changed to ~> ${decoder.decode(map)}`);
|
||||
}
|
||||
}
|
||||
1
examples/mmap/mmap.txt
Normal file
1
examples/mmap/mmap.txt
Normal file
@@ -0,0 +1 @@
|
||||
43521
|
||||
23
examples/openInEditor.js
Normal file
23
examples/openInEditor.js
Normal file
@@ -0,0 +1,23 @@
|
||||
import { resolve } from "path";
|
||||
import { parse } from "querystring";
|
||||
|
||||
export default {
|
||||
fetch(req) {
|
||||
const url = new URL(req.url);
|
||||
if (url.pathname === "/favicon.ico") return new Response("nooo dont open favicon in editor", { status: 404 });
|
||||
|
||||
var pathname = req.url.substring(1);
|
||||
const q = pathname.indexOf("?");
|
||||
var { editor } = parse(pathname.substring(q + 1)) || {};
|
||||
|
||||
if (q > 0) {
|
||||
pathname = pathname.substring(0, q);
|
||||
}
|
||||
|
||||
Bun.openInEditor(resolve(pathname), {
|
||||
editor,
|
||||
});
|
||||
|
||||
return new Response(`Opened ${req.url}`);
|
||||
},
|
||||
};
|
||||
2566
examples/react-fast-refresh-test/bun.lock
Normal file
2566
examples/react-fast-refresh-test/bun.lock
Normal file
File diff suppressed because it is too large
Load Diff
33
examples/react-fast-refresh-test/package.json
Normal file
33
examples/react-fast-refresh-test/package.json
Normal file
@@ -0,0 +1,33 @@
|
||||
{
|
||||
"name": "simple-react",
|
||||
"version": "1.0.0",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@emotion/css": "^11.1.3",
|
||||
"@vitejs/plugin-react-refresh": "^1.3.3",
|
||||
"antd": "^4.16.1",
|
||||
"left-pad": "^1.3.0",
|
||||
"next": "^11.0.0",
|
||||
"parcel": "2.0.0-beta.3",
|
||||
"react": "^17.0.2",
|
||||
"react-bootstrap": "^1.6.1",
|
||||
"react-dom": "^17.0.2",
|
||||
"react-form": "^4.0.1",
|
||||
"react-hook-form": "^7.8.3"
|
||||
},
|
||||
"parcel": "parceldist/index.js",
|
||||
"targets": {
|
||||
"parcel": {
|
||||
"outputFormat": "esmodule",
|
||||
"sourceMap": false,
|
||||
"optimize": false,
|
||||
"engines": {
|
||||
"chrome": "last 1 version"
|
||||
}
|
||||
}
|
||||
},
|
||||
"devDependencies": {
|
||||
"@snowpack/plugin-react-refresh": "^2.5.0",
|
||||
"typescript": "^4.3.4"
|
||||
}
|
||||
}
|
||||
15
examples/react-fast-refresh-test/public/index.html
Normal file
15
examples/react-fast-refresh-test/public/index.html
Normal file
@@ -0,0 +1,15 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<link
|
||||
rel="stylesheet"
|
||||
crossorigin="anonymous"
|
||||
href="https://fonts.googleapis.com/css2?family=IBM+Plex+Sans:wght@400;700&family=Space+Mono:wght@400;700&display=swap"
|
||||
/>
|
||||
</head>
|
||||
<body>
|
||||
<div id="reactroot"></div>
|
||||
<link rel="stylesheet" href="./src/index.css" />
|
||||
<script src="./src/index.tsx" async type="module"></script>
|
||||
</body>
|
||||
</html>
|
||||
11758
examples/react-fast-refresh-test/src/button.css
Normal file
11758
examples/react-fast-refresh-test/src/button.css
Normal file
File diff suppressed because it is too large
Load Diff
14
examples/react-fast-refresh-test/src/colors.css
Normal file
14
examples/react-fast-refresh-test/src/colors.css
Normal file
@@ -0,0 +1,14 @@
|
||||
:root {
|
||||
--timestamp: "0";
|
||||
--interval: "8";
|
||||
--progress-bar: 11.83299999999997%;
|
||||
--spinner-1-muted: rgb(142, 6, 182);
|
||||
--spinner-1-primary: rgb(177, 8, 227);
|
||||
--spinner-2-muted: rgb(110, 148, 190);
|
||||
--spinner-2-primary: rgb(138, 185, 238);
|
||||
--spinner-3-muted: rgb(75, 45, 64);
|
||||
--spinner-3-primary: rgb(94, 56, 80);
|
||||
--spinner-4-muted: rgb(155, 129, 108);
|
||||
--spinner-4-primary: rgb(194, 161, 135);
|
||||
--spinner-rotate: 213deg;
|
||||
}
|
||||
@@ -0,0 +1,21 @@
|
||||
import React from "react";
|
||||
|
||||
export function RenderCounter({ name, children }) {
|
||||
const counter = React.useRef(1);
|
||||
return (
|
||||
<div className="RenderCounter">
|
||||
<div className="RenderCounter-meta">
|
||||
<div className="RenderCounter-title">
|
||||
{name} rendered <strong>{counter.current++} times</strong>
|
||||
</div>
|
||||
<div className="RenderCounter-lastRender">
|
||||
LAST RENDER:{" "}
|
||||
{new Intl.DateTimeFormat([], {
|
||||
timeStyle: "long",
|
||||
}).format(new Date())}
|
||||
</div>
|
||||
</div>
|
||||
<div className="RenderCounter-children">{children}</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
14
examples/react-fast-refresh-test/src/components/app.tsx
Normal file
14
examples/react-fast-refresh-test/src/components/app.tsx
Normal file
@@ -0,0 +1,14 @@
|
||||
import * as React from "react";
|
||||
import { Button } from "./Button";
|
||||
import { RenderCounter } from "./RenderCounter";
|
||||
export function App() {
|
||||
return (
|
||||
<RenderCounter name="App">
|
||||
<div className="AppRoot">
|
||||
<h1>This is the root element</h1>
|
||||
|
||||
<Button>Click</Button>
|
||||
</div>
|
||||
</RenderCounter>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
import { RenderCounter } from "./RenderCounter";
|
||||
|
||||
export const Button = ({ children }) => {
|
||||
return (
|
||||
<RenderCounter name="Button">
|
||||
<div className="Button">{children}</div>
|
||||
</RenderCounter>
|
||||
);
|
||||
};
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user