mirror of
https://github.com/oven-sh/bun
synced 2026-02-02 15:08:46 +00:00
Compare commits
149 Commits
claude/fix
...
claude/htt
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7e752b645a | ||
|
|
6edfdb6589 | ||
|
|
66bce3d6da | ||
|
|
069a8d0b5d | ||
|
|
9907c2e9fa | ||
|
|
3976fd83ee | ||
|
|
2ac835f764 | ||
|
|
52b82cbe40 | ||
|
|
7ddb527573 | ||
|
|
bac13201ae | ||
|
|
0a3b9ce701 | ||
|
|
7d5f5ad772 | ||
|
|
a3d3d49c7f | ||
|
|
d9551dda1a | ||
|
|
ee7608f7cf | ||
|
|
e329316d44 | ||
|
|
9479bb8a5b | ||
|
|
88a0002f7e | ||
|
|
6e9d57a953 | ||
|
|
3b7d1f7be2 | ||
|
|
1f517499ef | ||
|
|
b3f5dd73da | ||
|
|
a37b858993 | ||
|
|
09c56c8ba8 | ||
|
|
6e3349b55c | ||
|
|
2162837416 | ||
|
|
b9f6a908f7 | ||
|
|
4b5551d230 | ||
|
|
e1505b7143 | ||
|
|
6611983038 | ||
|
|
d7ca10e22f | ||
|
|
dc3c8f79c4 | ||
|
|
3ee477fc5b | ||
|
|
25834afe9a | ||
|
|
7caaf434e9 | ||
|
|
edf13bd91d | ||
|
|
20dddd1819 | ||
|
|
8ec4c0abb3 | ||
|
|
ab45d20630 | ||
|
|
21841af612 | ||
|
|
98da9b943c | ||
|
|
6a1bc7d780 | ||
|
|
bdfdcebafb | ||
|
|
1e4935cf3e | ||
|
|
e63608fced | ||
|
|
d6c1b54289 | ||
|
|
594b03c275 | ||
|
|
18e4da1903 | ||
|
|
7a199276fb | ||
|
|
63c4d8f68f | ||
|
|
afcdd90b77 | ||
|
|
ae6ad1c04a | ||
|
|
301ec28a65 | ||
|
|
5b842ade1d | ||
|
|
cf947fee17 | ||
|
|
73f0594704 | ||
|
|
2daf7ed02e | ||
|
|
38e8fea828 | ||
|
|
536dc8653b | ||
|
|
a705dfc63a | ||
|
|
9fba9de0b5 | ||
|
|
6c3005e412 | ||
|
|
40b310c208 | ||
|
|
edb7214e6c | ||
|
|
48b0b7fe6d | ||
|
|
e0cbef0dce | ||
|
|
14832c5547 | ||
|
|
d919a76dd6 | ||
|
|
973fa98796 | ||
|
|
b7a6087d71 | ||
|
|
55230c16e6 | ||
|
|
e2161e7e13 | ||
|
|
d5431fcfe6 | ||
|
|
b04f98885f | ||
|
|
1779ee807c | ||
|
|
42cec2f0e2 | ||
|
|
5b7fd9ed0e | ||
|
|
ed9353f95e | ||
|
|
4573b5b844 | ||
|
|
5a75bcde13 | ||
|
|
afc5f50237 | ||
|
|
ca8d8065ec | ||
|
|
0bcb3137d3 | ||
|
|
b79bbfe289 | ||
|
|
72490281e5 | ||
|
|
60ab798991 | ||
|
|
e1de7563e1 | ||
|
|
3d361c8b49 | ||
|
|
0759da233f | ||
|
|
9978424177 | ||
|
|
d42f536a74 | ||
|
|
f78d197523 | ||
|
|
80fb7c7375 | ||
|
|
e2bfeefc9d | ||
|
|
cff2c2690b | ||
|
|
d0272d4a98 | ||
|
|
48ebc15e63 | ||
|
|
2e8e7a000c | ||
|
|
c1584b8a35 | ||
|
|
a0f13ea5bb | ||
|
|
c2bd4095eb | ||
|
|
0a7313e66c | ||
|
|
83293ea50c | ||
|
|
de7c947161 | ||
|
|
033c977fea | ||
|
|
d957a81c0a | ||
|
|
0b98086c3d | ||
|
|
f6c5318560 | ||
|
|
ad1fa514ed | ||
|
|
24c43c8f4d | ||
|
|
d69eb3ca00 | ||
|
|
3f53add5f1 | ||
|
|
fcaff77ed7 | ||
|
|
25c61fcd5a | ||
|
|
d0b5f9b587 | ||
|
|
1400e05e11 | ||
|
|
c8e3a91602 | ||
|
|
2e5f7f10ae | ||
|
|
559c95ee2c | ||
|
|
262f8863cb | ||
|
|
05f5ea0070 | ||
|
|
46ce975175 | ||
|
|
fa4822f8b8 | ||
|
|
8881e671d4 | ||
|
|
97d55411de | ||
|
|
f247277375 | ||
|
|
b93468ca48 | ||
|
|
35e9f3d4a2 | ||
|
|
9142cdcb1a | ||
|
|
822445d922 | ||
|
|
a34e10db53 | ||
|
|
684f7ecd09 | ||
|
|
d189759576 | ||
|
|
e395dec309 | ||
|
|
ff6af0e2f7 | ||
|
|
1085908386 | ||
|
|
a56488f221 | ||
|
|
fe8f8242fd | ||
|
|
c69ed120e9 | ||
|
|
edea077947 | ||
|
|
669b34ff6c | ||
|
|
eb7727819a | ||
|
|
84604888e9 | ||
|
|
6286824e28 | ||
|
|
dcb51bda60 | ||
|
|
36e2870fc8 | ||
|
|
5ac0a9a95c | ||
|
|
448fad8213 | ||
|
|
0315c97e7b |
4
.github/CODEOWNERS
vendored
4
.github/CODEOWNERS
vendored
@@ -3,3 +3,7 @@
|
||||
|
||||
# Tests
|
||||
/test/expectations.txt @Jarred-Sumner
|
||||
|
||||
# Types
|
||||
*.d.ts @alii
|
||||
/packages/bun-types/ @alii
|
||||
|
||||
19
.github/workflows/auto-assign-types.yml
vendored
Normal file
19
.github/workflows/auto-assign-types.yml
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
name: Auto Assign Types Issues
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [labeled]
|
||||
|
||||
jobs:
|
||||
auto-assign:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.label.name == 'types'
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
- name: Assign to alii
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GH_REPO: ${{ github.repository }}
|
||||
run: |
|
||||
gh issue edit ${{ github.event.issue.number }} --add-assignee alii
|
||||
24
.github/workflows/format.yml
vendored
24
.github/workflows/format.yml
vendored
@@ -8,10 +8,8 @@ on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
merge_group:
|
||||
push:
|
||||
branches: ["main"]
|
||||
env:
|
||||
BUN_VERSION: "1.2.11"
|
||||
BUN_VERSION: "1.2.20"
|
||||
LLVM_VERSION: "19.1.7"
|
||||
LLVM_VERSION_MAJOR: "19"
|
||||
|
||||
@@ -37,13 +35,14 @@ jobs:
|
||||
- name: Setup Dependencies
|
||||
run: |
|
||||
bun install
|
||||
bun scripts/glob-sources.mjs
|
||||
- name: Format Code
|
||||
run: |
|
||||
# Start prettier in background with prefixed output
|
||||
echo "::group::Prettier"
|
||||
(bun run prettier 2>&1 | sed 's/^/[prettier] /' || echo "[prettier] Failed with exit code $?") &
|
||||
PRETTIER_PID=$!
|
||||
|
||||
|
||||
# Start clang-format installation and formatting in background with prefixed output
|
||||
echo "::group::Clang-format"
|
||||
(
|
||||
@@ -56,13 +55,13 @@ jobs:
|
||||
LLVM_VERSION_MAJOR=${{ env.LLVM_VERSION_MAJOR }} ./scripts/run-clang-format.sh format 2>&1 | sed 's/^/[clang-format] /'
|
||||
) &
|
||||
CLANG_PID=$!
|
||||
|
||||
|
||||
# Setup Zig in temp directory and run zig fmt in background with prefixed output
|
||||
echo "::group::Zig fmt"
|
||||
(
|
||||
ZIG_TEMP=$(mktemp -d)
|
||||
echo "[zig] Downloading Zig (musl build)..."
|
||||
wget -q -O "$ZIG_TEMP/zig.zip" https://github.com/oven-sh/zig/releases/download/autobuild-d1a4e0b0ddc75f37c6a090b97eef0cbb6335556e/bootstrap-x86_64-linux-musl.zip
|
||||
wget -q -O "$ZIG_TEMP/zig.zip" https://github.com/oven-sh/zig/releases/download/autobuild-e0b7c318f318196c5f81fdf3423816a7b5bb3112/bootstrap-x86_64-linux-musl.zip
|
||||
unzip -q -d "$ZIG_TEMP" "$ZIG_TEMP/zig.zip"
|
||||
export PATH="$ZIG_TEMP/bootstrap-x86_64-linux-musl:$PATH"
|
||||
echo "[zig] Running zig fmt..."
|
||||
@@ -72,38 +71,39 @@ jobs:
|
||||
rm -rf "$ZIG_TEMP"
|
||||
) &
|
||||
ZIG_PID=$!
|
||||
|
||||
|
||||
# Wait for all formatting tasks to complete
|
||||
echo ""
|
||||
echo "Running formatters in parallel..."
|
||||
FAILED=0
|
||||
|
||||
|
||||
if ! wait $PRETTIER_PID; then
|
||||
echo "::error::Prettier failed"
|
||||
FAILED=1
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
|
||||
|
||||
if ! wait $CLANG_PID; then
|
||||
echo "::error::Clang-format failed"
|
||||
FAILED=1
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
|
||||
|
||||
if ! wait $ZIG_PID; then
|
||||
echo "::error::Zig fmt failed"
|
||||
FAILED=1
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
|
||||
|
||||
# Exit with error if any formatter failed
|
||||
if [ $FAILED -eq 1 ]; then
|
||||
echo "::error::One or more formatters failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
echo "✅ All formatters completed successfully"
|
||||
- name: Ban Words
|
||||
run: |
|
||||
bun ./test/internal/ban-words.test.ts
|
||||
git rm -f cmake/sources/*.txt || true
|
||||
- uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27
|
||||
|
||||
41
.github/workflows/glob-sources.yml
vendored
41
.github/workflows/glob-sources.yml
vendored
@@ -1,41 +0,0 @@
|
||||
name: Glob Sources
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
|
||||
env:
|
||||
BUN_VERSION: "1.2.11"
|
||||
|
||||
jobs:
|
||||
glob-sources:
|
||||
name: Glob Sources
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config --global core.autocrlf true
|
||||
git config --global core.ignorecase true
|
||||
git config --global core.precomposeUnicode true
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- name: Setup Dependencies
|
||||
run: |
|
||||
bun install
|
||||
- name: Glob sources
|
||||
run: bun scripts/glob-sources.mjs
|
||||
- name: Commit
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: "`bun scripts/glob-sources.mjs`"
|
||||
|
||||
104
.github/workflows/labeled.yml
vendored
104
.github/workflows/labeled.yml
vendored
@@ -5,6 +5,8 @@ env:
|
||||
on:
|
||||
issues:
|
||||
types: [labeled]
|
||||
pull_request_target:
|
||||
types: [labeled, opened, reopened, synchronize, unlabeled]
|
||||
|
||||
jobs:
|
||||
# on-bug:
|
||||
@@ -43,9 +45,46 @@ jobs:
|
||||
# token: ${{ secrets.GITHUB_TOKEN }}
|
||||
# issue-number: ${{ github.event.issue.number }}
|
||||
# labels: ${{ steps.add-labels.outputs.labels }}
|
||||
on-slop:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'slop')
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
contents: write
|
||||
steps:
|
||||
- name: Update PR title and body for slop and close
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const pr = await github.rest.pulls.get({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
pull_number: context.issue.number
|
||||
});
|
||||
|
||||
await github.rest.pulls.update({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
pull_number: context.issue.number,
|
||||
title: 'ai slop',
|
||||
body: 'This PR has been marked as AI slop and the description has been updated to avoid confusion or misleading reviewers.\n\nMany AI PRs are fine, but sometimes they submit a PR too early, fail to test if the problem is real, fail to reproduce the problem, or fail to test that the problem is fixed. If you think this PR is not AI slop, please leave a comment.',
|
||||
state: 'closed'
|
||||
});
|
||||
|
||||
// Delete the branch if it's from a fork or if it's not a protected branch
|
||||
try {
|
||||
await github.rest.git.deleteRef({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
ref: `heads/${pr.data.head.ref}`
|
||||
});
|
||||
} catch (error) {
|
||||
console.log('Could not delete branch:', error.message);
|
||||
}
|
||||
on-labeled:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.label.name == 'crash' || github.event.label.name == 'needs repro'
|
||||
if: github.event_name == 'issues' && (github.event.label.name == 'crash' || github.event.label.name == 'needs repro')
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
@@ -66,11 +105,16 @@ jobs:
|
||||
env:
|
||||
GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
|
||||
GITHUB_ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
GITHUB_ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||
shell: bash
|
||||
run: |
|
||||
LABELS=$(bun scripts/read-issue.ts)
|
||||
bun scripts/is-outdated.ts
|
||||
|
||||
# Check for patterns that should close the issue
|
||||
CLOSE_ACTION=$(bun scripts/handle-crash-patterns.ts)
|
||||
echo "close-action=$CLOSE_ACTION" >> $GITHUB_OUTPUT
|
||||
|
||||
if [[ -f "is-outdated.txt" ]]; then
|
||||
echo "is-outdated=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
@@ -79,6 +123,10 @@ jobs:
|
||||
echo "outdated=$(cat outdated.txt)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ -f "is-standalone.txt" ]]; then
|
||||
echo "is-standalone=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ -f "is-very-outdated.txt" ]]; then
|
||||
echo "is-very-outdated=true" >> $GITHUB_OUTPUT
|
||||
LABELS="$LABELS,old-version"
|
||||
@@ -88,9 +136,32 @@ jobs:
|
||||
|
||||
echo "latest=$(cat LATEST)" >> $GITHUB_OUTPUT
|
||||
echo "labels=$LABELS" >> $GITHUB_OUTPUT
|
||||
rm -rf is-outdated.txt outdated.txt latest.txt is-very-outdated.txt
|
||||
rm -rf is-outdated.txt outdated.txt latest.txt is-very-outdated.txt is-standalone.txt
|
||||
- name: Close issue if pattern detected
|
||||
if: github.event.label.name == 'crash' && fromJson(steps.add-labels.outputs.close-action).close == true
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const closeAction = JSON.parse('${{ steps.add-labels.outputs.close-action }}');
|
||||
|
||||
// Comment with the reason
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
body: closeAction.comment
|
||||
});
|
||||
|
||||
// Close the issue
|
||||
await github.rest.issues.update({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
state: 'closed',
|
||||
state_reason: closeAction.reason
|
||||
});
|
||||
- name: Generate comment text with Sentry Link
|
||||
if: github.event.label.name == 'crash'
|
||||
if: github.event.label.name == 'crash' && fromJson(steps.add-labels.outputs.close-action).close != true
|
||||
# ignore if fail
|
||||
continue-on-error: true
|
||||
id: generate-comment-text
|
||||
@@ -124,8 +195,17 @@ jobs:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
labels: ${{ steps.add-labels.outputs.labels }}
|
||||
- name: Comment outdated (standalone executable)
|
||||
if: steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone == 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
@${{ github.event.issue.user.login }}, the latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but the standalone executable is running Bun v${{ steps.add-labels.outputs.outdated }}. When the CLI using Bun's single-file executable next updates it might be fixed.
|
||||
- name: Comment outdated
|
||||
if: steps.add-labels.outputs.is-outdated == 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
|
||||
if: steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone != 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
@@ -139,8 +219,22 @@ jobs:
|
||||
```sh
|
||||
bun upgrade
|
||||
```
|
||||
- name: Comment with Sentry Link and outdated version (standalone executable)
|
||||
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone == 'true'
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
@${{ github.event.issue.user.login }}, thank you for reporting this crash. The latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but the standalone executable is running Bun v${{ steps.add-labels.outputs.outdated }}. When the CLI using Bun's single-file executable next updates it might be fixed.
|
||||
|
||||
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
|
||||
|
||||
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
|
||||
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
|
||||
- name: Comment with Sentry Link and outdated version
|
||||
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true'
|
||||
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone != 'true'
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
|
||||
89
.github/workflows/on-submodule-update.yml
vendored
89
.github/workflows/on-submodule-update.yml
vendored
@@ -1,89 +0,0 @@
|
||||
name: Comment on updated submodule
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
paths:
|
||||
- "src/generated_versions_list.zig"
|
||||
- ".github/workflows/on-submodule-update.yml"
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
name: Comment
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
issues: write
|
||||
steps:
|
||||
- name: Checkout current
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
src
|
||||
- name: Hash generated versions list
|
||||
id: hash
|
||||
run: |
|
||||
echo "hash=$(sha256sum src/generated_versions_list.zig | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
|
||||
- name: Checkout base
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.base_ref }}
|
||||
sparse-checkout: |
|
||||
src
|
||||
- name: Hash base
|
||||
id: base
|
||||
run: |
|
||||
echo "base=$(sha256sum src/generated_versions_list.zig | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
|
||||
- name: Compare
|
||||
id: compare
|
||||
run: |
|
||||
if [ "${{ steps.hash.outputs.hash }}" != "${{ steps.base.outputs.base }}" ]; then
|
||||
echo "changed=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "changed=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Find Comment
|
||||
id: comment
|
||||
uses: peter-evans/find-comment@v3
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: github-actions[bot]
|
||||
body-includes: <!-- generated-comment submodule-updated -->
|
||||
- name: Write Warning Comment
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
if: steps.compare.outputs.changed == 'true'
|
||||
with:
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
edit-mode: replace
|
||||
body: |
|
||||
⚠️ **Warning:** @${{ github.actor }}, this PR has changes to submodule versions.
|
||||
|
||||
If this change was intentional, please ignore this message. If not, please undo changes to submodules and rebase your branch.
|
||||
|
||||
<!-- generated-comment submodule-updated -->
|
||||
- name: Add labels
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: steps.compare.outputs.changed == 'true'
|
||||
with:
|
||||
actions: "add-labels"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
labels: "changed-submodules"
|
||||
- name: Remove labels
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: steps.compare.outputs.changed == 'false'
|
||||
with:
|
||||
actions: "remove-labels"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
labels: "changed-submodules"
|
||||
- name: Delete outdated comment
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: steps.compare.outputs.changed == 'false' && steps.comment.outputs.comment-id != ''
|
||||
with:
|
||||
actions: "delete-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -186,4 +186,7 @@ scratch*.{js,ts,tsx,cjs,mjs}
|
||||
|
||||
*.bun-build
|
||||
|
||||
scripts/lldb-inline
|
||||
scripts/lldb-inline
|
||||
|
||||
# We regenerate these in all the build scripts
|
||||
cmake/sources/*.txt
|
||||
21
CLAUDE.md
21
CLAUDE.md
@@ -4,18 +4,14 @@ This is the Bun repository - an all-in-one JavaScript runtime & toolkit designed
|
||||
|
||||
### Build Commands
|
||||
|
||||
- **Build debug version**: `bun bd`
|
||||
- **Build Bun**: `bun bd`
|
||||
- Creates a debug build at `./build/debug/bun-debug`
|
||||
- **CRITICAL**: DO NOT set a build timeout. Compilation takes ~5 minutes. Be patient.
|
||||
- **CRITICAL**: no need for a timeout, the build is really fast!
|
||||
- **Run tests with your debug build**: `bun bd test <test-file>`
|
||||
- **CRITICAL**: Never use `bun test` directly - it won't include your changes
|
||||
- **Run any command with debug build**: `bun bd <command>`
|
||||
|
||||
### Other Build Variants
|
||||
|
||||
- `bun run build:release` - Release build
|
||||
|
||||
Address sanitizer is enabled by default in debug builds of Bun.
|
||||
Tip: Bun is already installed and in $PATH. The `bd` subcommand is a package.json script.
|
||||
|
||||
## Testing
|
||||
|
||||
@@ -43,16 +39,11 @@ Tests use Bun's Jest-compatible test runner with proper test fixtures:
|
||||
|
||||
```typescript
|
||||
import { test, expect } from "bun:test";
|
||||
import {
|
||||
bunEnv,
|
||||
bunExe,
|
||||
normalizeBunSnapshot,
|
||||
tempDirWithFiles,
|
||||
} from "harness";
|
||||
import { bunEnv, bunExe, normalizeBunSnapshot, tempDir } from "harness";
|
||||
|
||||
test("my feature", async () => {
|
||||
// Create temp directory with test files
|
||||
const dir = tempDirWithFiles("test-prefix", {
|
||||
using dir = tempDir("test-prefix", {
|
||||
"index.js": `console.log("hello");`,
|
||||
});
|
||||
|
||||
@@ -60,7 +51,7 @@ test("my feature", async () => {
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "index.js"],
|
||||
env: bunEnv,
|
||||
cwd: dir,
|
||||
cwd: String(dir),
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
|
||||
@@ -31,6 +31,11 @@ include(SetupCcache)
|
||||
parse_package_json(VERSION_VARIABLE DEFAULT_VERSION)
|
||||
optionx(VERSION STRING "The version of Bun" DEFAULT ${DEFAULT_VERSION})
|
||||
project(Bun VERSION ${VERSION})
|
||||
|
||||
# Bun uses C++23, which is compatible with BoringSSL's C++17 requirement
|
||||
set(CMAKE_CXX_STANDARD 23)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||
|
||||
include(Options)
|
||||
include(CompilerFlags)
|
||||
|
||||
@@ -43,6 +48,9 @@ include(SetupEsbuild)
|
||||
include(SetupZig)
|
||||
include(SetupRust)
|
||||
|
||||
# Generate dependency versions header
|
||||
include(GenerateDependencyVersions)
|
||||
|
||||
# --- Targets ---
|
||||
|
||||
include(BuildBun)
|
||||
|
||||
116
bench/postMessage/postMessage-object.mjs
Normal file
116
bench/postMessage/postMessage-object.mjs
Normal file
@@ -0,0 +1,116 @@
|
||||
// Benchmark for object fast path optimization in postMessage with Workers
|
||||
|
||||
import { bench, run } from "mitata";
|
||||
import { Worker } from "node:worker_threads";
|
||||
|
||||
const extraProperties = {
|
||||
a: "a!",
|
||||
b: "b!",
|
||||
"second": "c!",
|
||||
bool: true,
|
||||
nully: null,
|
||||
undef: undefined,
|
||||
int: 0,
|
||||
double: 1.234,
|
||||
falsy: false,
|
||||
};
|
||||
|
||||
const objects = {
|
||||
small: { property: "Hello world", ...extraProperties },
|
||||
medium: {
|
||||
property: Buffer.alloc("Hello World!!!".length * 1024, "Hello World!!!").toString(),
|
||||
...extraProperties,
|
||||
},
|
||||
large: {
|
||||
property: Buffer.alloc("Hello World!!!".length * 1024 * 256, "Hello World!!!").toString(),
|
||||
...extraProperties,
|
||||
},
|
||||
};
|
||||
|
||||
let worker;
|
||||
let receivedCount = new Int32Array(new SharedArrayBuffer(4));
|
||||
let sentCount = 0;
|
||||
|
||||
function createWorker() {
|
||||
const workerCode = `
|
||||
import { parentPort, workerData } from "node:worker_threads";
|
||||
|
||||
let int = workerData;
|
||||
|
||||
parentPort?.on("message", data => {
|
||||
switch (data.property.length) {
|
||||
case ${objects.small.property.length}:
|
||||
case ${objects.medium.property.length}:
|
||||
case ${objects.large.property.length}: {
|
||||
if (
|
||||
data.a === "a!" &&
|
||||
data.b === "b!" &&
|
||||
data.second === "c!" &&
|
||||
data.bool === true &&
|
||||
data.nully === null &&
|
||||
data.undef === undefined &&
|
||||
data.int === 0 &&
|
||||
data.double === 1.234 &&
|
||||
data.falsy === false) {
|
||||
Atomics.add(int, 0, 1);
|
||||
break;
|
||||
}
|
||||
}
|
||||
default: {
|
||||
throw new Error("Invalid data object: " + JSON.stringify(data));
|
||||
}
|
||||
}
|
||||
|
||||
});
|
||||
`;
|
||||
|
||||
worker = new Worker(workerCode, { eval: true, workerData: receivedCount });
|
||||
|
||||
worker.on("message", confirmationId => {});
|
||||
|
||||
worker.on("error", error => {
|
||||
console.error("Worker error:", error);
|
||||
});
|
||||
}
|
||||
|
||||
// Initialize worker before running benchmarks
|
||||
createWorker();
|
||||
|
||||
function fmt(int) {
|
||||
if (int < 1000) {
|
||||
return `${int} chars`;
|
||||
}
|
||||
|
||||
if (int < 100000) {
|
||||
return `${(int / 1024) | 0} KB`;
|
||||
}
|
||||
|
||||
return `${(int / 1024 / 1024) | 0} MB`;
|
||||
}
|
||||
|
||||
// Benchmark postMessage with pure strings (uses fast path)
|
||||
bench("postMessage({ prop: " + fmt(objects.small.property.length) + " string, ...9 more props })", async () => {
|
||||
sentCount++;
|
||||
worker.postMessage(objects.small);
|
||||
});
|
||||
|
||||
bench("postMessage({ prop: " + fmt(objects.medium.property.length) + " string, ...9 more props })", async () => {
|
||||
sentCount++;
|
||||
worker.postMessage(objects.medium);
|
||||
});
|
||||
|
||||
bench("postMessage({ prop: " + fmt(objects.large.property.length) + " string, ...9 more props })", async () => {
|
||||
sentCount++;
|
||||
worker.postMessage(objects.large);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 5000));
|
||||
|
||||
if (receivedCount[0] !== sentCount) {
|
||||
throw new Error("Expected " + receivedCount[0] + " to equal " + sentCount);
|
||||
}
|
||||
|
||||
// Cleanup worker
|
||||
worker?.terminate();
|
||||
Binary file not shown.
58
bench/postgres/mysql.mjs
Normal file
58
bench/postgres/mysql.mjs
Normal file
@@ -0,0 +1,58 @@
|
||||
const isBun = typeof globalThis?.Bun?.sql !== "undefined";
|
||||
let conn;
|
||||
let sql;
|
||||
import * as mariadb from "mariadb";
|
||||
import * as mysql2 from "mysql2/promise";
|
||||
let useMYSQL2 = false;
|
||||
if (process.argv.includes("--mysql2")) {
|
||||
useMYSQL2 = true;
|
||||
}
|
||||
if (isBun) {
|
||||
sql = new Bun.SQL({
|
||||
adapter: "mysql",
|
||||
database: "test",
|
||||
username: "root",
|
||||
});
|
||||
} else {
|
||||
const pool = (useMYSQL2 ? mysql2 : mariadb).createPool({
|
||||
// Add your MariaDB connection details here
|
||||
user: "root",
|
||||
database: "test",
|
||||
});
|
||||
conn = await pool.getConnection();
|
||||
}
|
||||
|
||||
if (isBun) {
|
||||
// Initialize the benchmark table (equivalent to initFct)
|
||||
await sql`DROP TABLE IF EXISTS test100`;
|
||||
await sql`CREATE TABLE test100 (i1 int,i2 int,i3 int,i4 int,i5 int,i6 int,i7 int,i8 int,i9 int,i10 int,i11 int,i12 int,i13 int,i14 int,i15 int,i16 int,i17 int,i18 int,i19 int,i20 int,i21 int,i22 int,i23 int,i24 int,i25 int,i26 int,i27 int,i28 int,i29 int,i30 int,i31 int,i32 int,i33 int,i34 int,i35 int,i36 int,i37 int,i38 int,i39 int,i40 int,i41 int,i42 int,i43 int,i44 int,i45 int,i46 int,i47 int,i48 int,i49 int,i50 int,i51 int,i52 int,i53 int,i54 int,i55 int,i56 int,i57 int,i58 int,i59 int,i60 int,i61 int,i62 int,i63 int,i64 int,i65 int,i66 int,i67 int,i68 int,i69 int,i70 int,i71 int,i72 int,i73 int,i74 int,i75 int,i76 int,i77 int,i78 int,i79 int,i80 int,i81 int,i82 int,i83 int,i84 int,i85 int,i86 int,i87 int,i88 int,i89 int,i90 int,i91 int,i92 int,i93 int,i94 int,i95 int,i96 int,i97 int,i98 int,i99 int,i100 int)`;
|
||||
await sql`INSERT INTO test100 value (1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100)`;
|
||||
} else {
|
||||
// Initialize the benchmark table (equivalent to initFct)
|
||||
await conn.query("DROP TABLE IF EXISTS test100");
|
||||
await conn.query(
|
||||
"CREATE TABLE test100 (i1 int,i2 int,i3 int,i4 int,i5 int,i6 int,i7 int,i8 int,i9 int,i10 int,i11 int,i12 int,i13 int,i14 int,i15 int,i16 int,i17 int,i18 int,i19 int,i20 int,i21 int,i22 int,i23 int,i24 int,i25 int,i26 int,i27 int,i28 int,i29 int,i30 int,i31 int,i32 int,i33 int,i34 int,i35 int,i36 int,i37 int,i38 int,i39 int,i40 int,i41 int,i42 int,i43 int,i44 int,i45 int,i46 int,i47 int,i48 int,i49 int,i50 int,i51 int,i52 int,i53 int,i54 int,i55 int,i56 int,i57 int,i58 int,i59 int,i60 int,i61 int,i62 int,i63 int,i64 int,i65 int,i66 int,i67 int,i68 int,i69 int,i70 int,i71 int,i72 int,i73 int,i74 int,i75 int,i76 int,i77 int,i78 int,i79 int,i80 int,i81 int,i82 int,i83 int,i84 int,i85 int,i86 int,i87 int,i88 int,i89 int,i90 int,i91 int,i92 int,i93 int,i94 int,i95 int,i96 int,i97 int,i98 int,i99 int,i100 int)",
|
||||
);
|
||||
await conn.query(
|
||||
"INSERT INTO test100 value (1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100)",
|
||||
);
|
||||
}
|
||||
// Run the benchmark (equivalent to benchFct)
|
||||
const type = isBun ? "Bun.SQL" : useMYSQL2 ? "mysql2" : "mariadb";
|
||||
console.time(type);
|
||||
let promises = [];
|
||||
|
||||
for (let i = 0; i < 100_000; i++) {
|
||||
if (isBun) {
|
||||
promises.push(sql`select * FROM test100`);
|
||||
} else {
|
||||
promises.push(conn.query("select * FROM test100"));
|
||||
}
|
||||
}
|
||||
await Promise.all(promises);
|
||||
console.timeEnd(type);
|
||||
|
||||
// Clean up connection
|
||||
if (!isBun && conn.release) {
|
||||
conn.release();
|
||||
}
|
||||
@@ -9,6 +9,8 @@
|
||||
"typescript": "^5.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"mariadb": "^3.4.5",
|
||||
"mysql2": "^3.14.3",
|
||||
"postgres": "^3.4.7"
|
||||
}
|
||||
}
|
||||
@@ -12,6 +12,9 @@ const scenarios = [
|
||||
{ alg: "sha1", digest: "base64" },
|
||||
{ alg: "sha256", digest: "hex" },
|
||||
{ alg: "sha256", digest: "base64" },
|
||||
{ alg: "blake2b512", digest: "hex" },
|
||||
{ alg: "sha512-224", digest: "hex" },
|
||||
{ alg: "sha512-256", digest: "hex" },
|
||||
];
|
||||
|
||||
for (const { alg, digest } of scenarios) {
|
||||
@@ -23,6 +26,10 @@ for (const { alg, digest } of scenarios) {
|
||||
bench(`${alg}-${digest} (Bun.CryptoHasher)`, () => {
|
||||
new Bun.CryptoHasher(alg).update(data).digest(digest);
|
||||
});
|
||||
|
||||
bench(`${alg}-${digest} (Bun.CryptoHasher.hash)`, () => {
|
||||
return Bun.CryptoHasher.hash(alg, data, digest);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
407
bench/yaml/yaml-stringify.mjs
Normal file
407
bench/yaml/yaml-stringify.mjs
Normal file
@@ -0,0 +1,407 @@
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
import jsYaml from "js-yaml";
|
||||
import yaml from "yaml";
|
||||
|
||||
// Small object
|
||||
const smallObject = {
|
||||
name: "John Doe",
|
||||
age: 30,
|
||||
email: "john@example.com",
|
||||
active: true,
|
||||
};
|
||||
|
||||
// Medium object with nested structures
|
||||
const mediumObject = {
|
||||
company: "Acme Corp",
|
||||
employees: [
|
||||
{
|
||||
name: "John Doe",
|
||||
age: 30,
|
||||
position: "Developer",
|
||||
skills: ["JavaScript", "TypeScript", "Node.js"],
|
||||
},
|
||||
{
|
||||
name: "Jane Smith",
|
||||
age: 28,
|
||||
position: "Designer",
|
||||
skills: ["Figma", "Photoshop", "Illustrator"],
|
||||
},
|
||||
{
|
||||
name: "Bob Johnson",
|
||||
age: 35,
|
||||
position: "Manager",
|
||||
skills: ["Leadership", "Communication", "Planning"],
|
||||
},
|
||||
],
|
||||
settings: {
|
||||
database: {
|
||||
host: "localhost",
|
||||
port: 5432,
|
||||
name: "mydb",
|
||||
},
|
||||
cache: {
|
||||
enabled: true,
|
||||
ttl: 3600,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// Large object with complex structures
|
||||
const largeObject = {
|
||||
apiVersion: "apps/v1",
|
||||
kind: "Deployment",
|
||||
metadata: {
|
||||
name: "nginx-deployment",
|
||||
labels: {
|
||||
app: "nginx",
|
||||
},
|
||||
},
|
||||
spec: {
|
||||
replicas: 3,
|
||||
selector: {
|
||||
matchLabels: {
|
||||
app: "nginx",
|
||||
},
|
||||
},
|
||||
template: {
|
||||
metadata: {
|
||||
labels: {
|
||||
app: "nginx",
|
||||
},
|
||||
},
|
||||
spec: {
|
||||
containers: [
|
||||
{
|
||||
name: "nginx",
|
||||
image: "nginx:1.14.2",
|
||||
ports: [
|
||||
{
|
||||
containerPort: 80,
|
||||
},
|
||||
],
|
||||
env: [
|
||||
{
|
||||
name: "ENV_VAR_1",
|
||||
value: "value1",
|
||||
},
|
||||
{
|
||||
name: "ENV_VAR_2",
|
||||
value: "value2",
|
||||
},
|
||||
],
|
||||
volumeMounts: [
|
||||
{
|
||||
name: "config",
|
||||
mountPath: "/etc/nginx",
|
||||
},
|
||||
],
|
||||
resources: {
|
||||
limits: {
|
||||
cpu: "1",
|
||||
memory: "1Gi",
|
||||
},
|
||||
requests: {
|
||||
cpu: "0.5",
|
||||
memory: "512Mi",
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
volumes: [
|
||||
{
|
||||
name: "config",
|
||||
configMap: {
|
||||
name: "nginx-config",
|
||||
items: [
|
||||
{
|
||||
key: "nginx.conf",
|
||||
path: "nginx.conf",
|
||||
},
|
||||
{
|
||||
key: "mime.types",
|
||||
path: "mime.types",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
nodeSelector: {
|
||||
disktype: "ssd",
|
||||
},
|
||||
tolerations: [
|
||||
{
|
||||
key: "key1",
|
||||
operator: "Equal",
|
||||
value: "value1",
|
||||
effect: "NoSchedule",
|
||||
},
|
||||
{
|
||||
key: "key2",
|
||||
operator: "Exists",
|
||||
effect: "NoExecute",
|
||||
},
|
||||
],
|
||||
affinity: {
|
||||
nodeAffinity: {
|
||||
requiredDuringSchedulingIgnoredDuringExecution: {
|
||||
nodeSelectorTerms: [
|
||||
{
|
||||
matchExpressions: [
|
||||
{
|
||||
key: "kubernetes.io/e2e-az-name",
|
||||
operator: "In",
|
||||
values: ["e2e-az1", "e2e-az2"],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
podAntiAffinity: {
|
||||
preferredDuringSchedulingIgnoredDuringExecution: [
|
||||
{
|
||||
weight: 100,
|
||||
podAffinityTerm: {
|
||||
labelSelector: {
|
||||
matchExpressions: [
|
||||
{
|
||||
key: "app",
|
||||
operator: "In",
|
||||
values: ["web-store"],
|
||||
},
|
||||
],
|
||||
},
|
||||
topologyKey: "kubernetes.io/hostname",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// Object with anchors and references (after resolution)
|
||||
const objectWithAnchors = {
|
||||
defaults: {
|
||||
adapter: "postgresql",
|
||||
host: "localhost",
|
||||
port: 5432,
|
||||
},
|
||||
development: {
|
||||
adapter: "postgresql",
|
||||
host: "localhost",
|
||||
port: 5432,
|
||||
database: "dev_db",
|
||||
},
|
||||
test: {
|
||||
adapter: "postgresql",
|
||||
host: "localhost",
|
||||
port: 5432,
|
||||
database: "test_db",
|
||||
},
|
||||
production: {
|
||||
adapter: "postgresql",
|
||||
host: "prod.example.com",
|
||||
port: 5432,
|
||||
database: "prod_db",
|
||||
},
|
||||
};
|
||||
|
||||
// Array of items
|
||||
const arrayObject = [
|
||||
{
|
||||
id: 1,
|
||||
name: "Item 1",
|
||||
price: 10.99,
|
||||
tags: ["electronics", "gadgets"],
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: "Item 2",
|
||||
price: 25.5,
|
||||
tags: ["books", "education"],
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
name: "Item 3",
|
||||
price: 5.0,
|
||||
tags: ["food", "snacks"],
|
||||
},
|
||||
{
|
||||
id: 4,
|
||||
name: "Item 4",
|
||||
price: 100.0,
|
||||
tags: ["electronics", "computers"],
|
||||
},
|
||||
{
|
||||
id: 5,
|
||||
name: "Item 5",
|
||||
price: 15.75,
|
||||
tags: ["clothing", "accessories"],
|
||||
},
|
||||
];
|
||||
|
||||
// Multiline strings
|
||||
const multilineObject = {
|
||||
description:
|
||||
"This is a multiline string\nthat preserves line breaks\nand indentation.\n\nIt can contain multiple paragraphs\nand special characters: !@#$%^&*()\n",
|
||||
folded: "This is a folded string where line breaks are converted to spaces unless there are\nempty lines like above.",
|
||||
plain: "This is a plain string",
|
||||
quoted: 'This is a quoted string with "escapes"',
|
||||
literal: "This is a literal string with 'quotes'",
|
||||
};
|
||||
|
||||
// Numbers and special values
|
||||
const numbersObject = {
|
||||
integer: 42,
|
||||
negative: -17,
|
||||
float: 3.14159,
|
||||
scientific: 0.000123,
|
||||
infinity: Infinity,
|
||||
negativeInfinity: -Infinity,
|
||||
notANumber: NaN,
|
||||
octal: 493, // 0o755
|
||||
hex: 255, // 0xFF
|
||||
binary: 10, // 0b1010
|
||||
};
|
||||
|
||||
// Dates and timestamps
|
||||
const datesObject = {
|
||||
date: new Date("2024-01-15"),
|
||||
datetime: new Date("2024-01-15T10:30:00Z"),
|
||||
timestamp: new Date("2024-01-15T15:30:00.123456789Z"), // Adjusted for UTC-5
|
||||
canonical: new Date("2024-01-15T10:30:00.123456789Z"),
|
||||
};
|
||||
|
||||
// Stringify benchmarks
|
||||
group("stringify small object", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.stringify", () => {
|
||||
return Bun.YAML.stringify(smallObject);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.dump", () => {
|
||||
return jsYaml.dump(smallObject);
|
||||
});
|
||||
|
||||
bench("yaml.stringify", () => {
|
||||
return yaml.stringify(smallObject);
|
||||
});
|
||||
});
|
||||
|
||||
group("stringify medium object", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.stringify", () => {
|
||||
return Bun.YAML.stringify(mediumObject);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.dump", () => {
|
||||
return jsYaml.dump(mediumObject);
|
||||
});
|
||||
|
||||
bench("yaml.stringify", () => {
|
||||
return yaml.stringify(mediumObject);
|
||||
});
|
||||
});
|
||||
|
||||
group("stringify large object", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.stringify", () => {
|
||||
return Bun.YAML.stringify(largeObject);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.dump", () => {
|
||||
return jsYaml.dump(largeObject);
|
||||
});
|
||||
|
||||
bench("yaml.stringify", () => {
|
||||
return yaml.stringify(largeObject);
|
||||
});
|
||||
});
|
||||
|
||||
group("stringify object with anchors", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.stringify", () => {
|
||||
return Bun.YAML.stringify(objectWithAnchors);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.dump", () => {
|
||||
return jsYaml.dump(objectWithAnchors);
|
||||
});
|
||||
|
||||
bench("yaml.stringify", () => {
|
||||
return yaml.stringify(objectWithAnchors);
|
||||
});
|
||||
});
|
||||
|
||||
group("stringify array", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.stringify", () => {
|
||||
return Bun.YAML.stringify(arrayObject);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.dump", () => {
|
||||
return jsYaml.dump(arrayObject);
|
||||
});
|
||||
|
||||
bench("yaml.stringify", () => {
|
||||
return yaml.stringify(arrayObject);
|
||||
});
|
||||
});
|
||||
|
||||
group("stringify object with multiline strings", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.stringify", () => {
|
||||
return Bun.YAML.stringify(multilineObject);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.dump", () => {
|
||||
return jsYaml.dump(multilineObject);
|
||||
});
|
||||
|
||||
bench("yaml.stringify", () => {
|
||||
return yaml.stringify(multilineObject);
|
||||
});
|
||||
});
|
||||
|
||||
group("stringify object with numbers", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.stringify", () => {
|
||||
return Bun.YAML.stringify(numbersObject);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.dump", () => {
|
||||
return jsYaml.dump(numbersObject);
|
||||
});
|
||||
|
||||
bench("yaml.stringify", () => {
|
||||
return yaml.stringify(numbersObject);
|
||||
});
|
||||
});
|
||||
|
||||
group("stringify object with dates", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.stringify", () => {
|
||||
return Bun.YAML.stringify(datesObject);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.dump", () => {
|
||||
return jsYaml.dump(datesObject);
|
||||
});
|
||||
|
||||
bench("yaml.stringify", () => {
|
||||
return yaml.stringify(datesObject);
|
||||
});
|
||||
});
|
||||
|
||||
await run();
|
||||
2
bun.lock
2
bun.lock
@@ -40,8 +40,8 @@
|
||||
},
|
||||
},
|
||||
"overrides": {
|
||||
"bun-types": "workspace:packages/bun-types",
|
||||
"@types/bun": "workspace:packages/@types/bun",
|
||||
"bun-types": "workspace:packages/bun-types",
|
||||
},
|
||||
"packages": {
|
||||
"@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.21.5", "", { "os": "aix", "cpu": "ppc64" }, "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ=="],
|
||||
|
||||
@@ -13,7 +13,10 @@
|
||||
},
|
||||
{
|
||||
"output": "JavaScriptSources.txt",
|
||||
"paths": ["src/js/**/*.{js,ts}"]
|
||||
"paths": [
|
||||
"src/js/**/*.{js,ts}",
|
||||
"src/install/PackageManager/scanner-entry.ts"
|
||||
]
|
||||
},
|
||||
{
|
||||
"output": "JavaScriptCodegenSources.txt",
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
src/bake/bake.d.ts
|
||||
src/bake/bake.private.d.ts
|
||||
src/bake/bun-framework-react/index.ts
|
||||
src/bake/client/css-reloader.ts
|
||||
src/bake/client/data-view.ts
|
||||
src/bake/client/error-serialization.ts
|
||||
src/bake/client/inspect.ts
|
||||
src/bake/client/JavaScriptSyntaxHighlighter.css
|
||||
src/bake/client/JavaScriptSyntaxHighlighter.ts
|
||||
src/bake/client/overlay.css
|
||||
src/bake/client/overlay.ts
|
||||
src/bake/client/stack-trace.ts
|
||||
src/bake/client/websocket.ts
|
||||
src/bake/debug.ts
|
||||
src/bake/DevServer.bind.ts
|
||||
src/bake/enums.ts
|
||||
src/bake/hmr-module.ts
|
||||
src/bake/hmr-runtime-client.ts
|
||||
src/bake/hmr-runtime-error.ts
|
||||
src/bake/hmr-runtime-server.ts
|
||||
src/bake/server/stack-trace-stub.ts
|
||||
src/bake/shared.ts
|
||||
@@ -1,7 +0,0 @@
|
||||
src/bake.bind.ts
|
||||
src/bake/DevServer.bind.ts
|
||||
src/bun.js/api/BunObject.bind.ts
|
||||
src/bun.js/bindgen_test.bind.ts
|
||||
src/bun.js/bindings/NodeModuleModule.bind.ts
|
||||
src/bun.js/node/node_os.bind.ts
|
||||
src/fmt.bind.ts
|
||||
@@ -1,12 +0,0 @@
|
||||
packages/bun-error/bun-error.css
|
||||
packages/bun-error/img/close.png
|
||||
packages/bun-error/img/error.png
|
||||
packages/bun-error/img/powered-by.png
|
||||
packages/bun-error/img/powered-by.webp
|
||||
packages/bun-error/index.tsx
|
||||
packages/bun-error/markdown.ts
|
||||
packages/bun-error/package.json
|
||||
packages/bun-error/runtime-error.ts
|
||||
packages/bun-error/sourcemap.ts
|
||||
packages/bun-error/stack-trace-parser.ts
|
||||
packages/bun-error/tsconfig.json
|
||||
@@ -1,15 +0,0 @@
|
||||
packages/bun-usockets/src/bsd.c
|
||||
packages/bun-usockets/src/context.c
|
||||
packages/bun-usockets/src/crypto/openssl.c
|
||||
packages/bun-usockets/src/eventing/epoll_kqueue.c
|
||||
packages/bun-usockets/src/eventing/libuv.c
|
||||
packages/bun-usockets/src/loop.c
|
||||
packages/bun-usockets/src/quic.c
|
||||
packages/bun-usockets/src/socket.c
|
||||
packages/bun-usockets/src/udp.c
|
||||
src/asan-config.c
|
||||
src/bun.js/bindings/node/http/llhttp/api.c
|
||||
src/bun.js/bindings/node/http/llhttp/http.c
|
||||
src/bun.js/bindings/node/http/llhttp/llhttp.c
|
||||
src/bun.js/bindings/uv-posix-polyfills.c
|
||||
src/bun.js/bindings/uv-posix-stubs.c
|
||||
@@ -1,505 +0,0 @@
|
||||
packages/bun-usockets/src/crypto/root_certs.cpp
|
||||
packages/bun-usockets/src/crypto/sni_tree.cpp
|
||||
src/bake/BakeGlobalObject.cpp
|
||||
src/bake/BakeProduction.cpp
|
||||
src/bake/BakeSourceProvider.cpp
|
||||
src/bun.js/bindings/ActiveDOMCallback.cpp
|
||||
src/bun.js/bindings/AsymmetricKeyValue.cpp
|
||||
src/bun.js/bindings/AsyncContextFrame.cpp
|
||||
src/bun.js/bindings/Base64Helpers.cpp
|
||||
src/bun.js/bindings/bindings.cpp
|
||||
src/bun.js/bindings/blob.cpp
|
||||
src/bun.js/bindings/bun-simdutf.cpp
|
||||
src/bun.js/bindings/bun-spawn.cpp
|
||||
src/bun.js/bindings/BunClientData.cpp
|
||||
src/bun.js/bindings/BunCommonStrings.cpp
|
||||
src/bun.js/bindings/BunDebugger.cpp
|
||||
src/bun.js/bindings/BunGCOutputConstraint.cpp
|
||||
src/bun.js/bindings/BunGlobalScope.cpp
|
||||
src/bun.js/bindings/BunHttp2CommonStrings.cpp
|
||||
src/bun.js/bindings/BunInjectedScriptHost.cpp
|
||||
src/bun.js/bindings/BunInspector.cpp
|
||||
src/bun.js/bindings/BunJSCEventLoop.cpp
|
||||
src/bun.js/bindings/BunObject.cpp
|
||||
src/bun.js/bindings/BunPlugin.cpp
|
||||
src/bun.js/bindings/BunProcess.cpp
|
||||
src/bun.js/bindings/BunString.cpp
|
||||
src/bun.js/bindings/BunWorkerGlobalScope.cpp
|
||||
src/bun.js/bindings/c-bindings.cpp
|
||||
src/bun.js/bindings/CallSite.cpp
|
||||
src/bun.js/bindings/CallSitePrototype.cpp
|
||||
src/bun.js/bindings/CatchScopeBinding.cpp
|
||||
src/bun.js/bindings/CodeCoverage.cpp
|
||||
src/bun.js/bindings/ConsoleObject.cpp
|
||||
src/bun.js/bindings/Cookie.cpp
|
||||
src/bun.js/bindings/CookieMap.cpp
|
||||
src/bun.js/bindings/coroutine.cpp
|
||||
src/bun.js/bindings/CPUFeatures.cpp
|
||||
src/bun.js/bindings/decodeURIComponentSIMD.cpp
|
||||
src/bun.js/bindings/DOMException.cpp
|
||||
src/bun.js/bindings/DOMFormData.cpp
|
||||
src/bun.js/bindings/DOMURL.cpp
|
||||
src/bun.js/bindings/DOMWrapperWorld.cpp
|
||||
src/bun.js/bindings/DoubleFormatter.cpp
|
||||
src/bun.js/bindings/EncodeURIComponent.cpp
|
||||
src/bun.js/bindings/EncodingTables.cpp
|
||||
src/bun.js/bindings/ErrorCode.cpp
|
||||
src/bun.js/bindings/ErrorStackFrame.cpp
|
||||
src/bun.js/bindings/ErrorStackTrace.cpp
|
||||
src/bun.js/bindings/EventLoopTaskNoContext.cpp
|
||||
src/bun.js/bindings/ExposeNodeModuleGlobals.cpp
|
||||
src/bun.js/bindings/ffi.cpp
|
||||
src/bun.js/bindings/helpers.cpp
|
||||
src/bun.js/bindings/highway_strings.cpp
|
||||
src/bun.js/bindings/HTMLEntryPoint.cpp
|
||||
src/bun.js/bindings/ImportMetaObject.cpp
|
||||
src/bun.js/bindings/inlines.cpp
|
||||
src/bun.js/bindings/InspectorBunFrontendDevServerAgent.cpp
|
||||
src/bun.js/bindings/InspectorHTTPServerAgent.cpp
|
||||
src/bun.js/bindings/InspectorLifecycleAgent.cpp
|
||||
src/bun.js/bindings/InspectorTestReporterAgent.cpp
|
||||
src/bun.js/bindings/InternalForTesting.cpp
|
||||
src/bun.js/bindings/InternalModuleRegistry.cpp
|
||||
src/bun.js/bindings/IPC.cpp
|
||||
src/bun.js/bindings/isBuiltinModule.cpp
|
||||
src/bun.js/bindings/JS2Native.cpp
|
||||
src/bun.js/bindings/JSBigIntBinding.cpp
|
||||
src/bun.js/bindings/JSBuffer.cpp
|
||||
src/bun.js/bindings/JSBufferEncodingType.cpp
|
||||
src/bun.js/bindings/JSBufferList.cpp
|
||||
src/bun.js/bindings/JSBundlerPlugin.cpp
|
||||
src/bun.js/bindings/JSBunRequest.cpp
|
||||
src/bun.js/bindings/JSCommonJSExtensions.cpp
|
||||
src/bun.js/bindings/JSCommonJSModule.cpp
|
||||
src/bun.js/bindings/JSCTaskScheduler.cpp
|
||||
src/bun.js/bindings/JSCTestingHelpers.cpp
|
||||
src/bun.js/bindings/JSDOMExceptionHandling.cpp
|
||||
src/bun.js/bindings/JSDOMFile.cpp
|
||||
src/bun.js/bindings/JSDOMGlobalObject.cpp
|
||||
src/bun.js/bindings/JSDOMWrapper.cpp
|
||||
src/bun.js/bindings/JSDOMWrapperCache.cpp
|
||||
src/bun.js/bindings/JSEnvironmentVariableMap.cpp
|
||||
src/bun.js/bindings/JSFFIFunction.cpp
|
||||
src/bun.js/bindings/JSMockFunction.cpp
|
||||
src/bun.js/bindings/JSNextTickQueue.cpp
|
||||
src/bun.js/bindings/JSNodePerformanceHooksHistogram.cpp
|
||||
src/bun.js/bindings/JSNodePerformanceHooksHistogramConstructor.cpp
|
||||
src/bun.js/bindings/JSNodePerformanceHooksHistogramPrototype.cpp
|
||||
src/bun.js/bindings/JSPropertyIterator.cpp
|
||||
src/bun.js/bindings/JSS3File.cpp
|
||||
src/bun.js/bindings/JSSecrets.cpp
|
||||
src/bun.js/bindings/JSSocketAddressDTO.cpp
|
||||
src/bun.js/bindings/JSStringDecoder.cpp
|
||||
src/bun.js/bindings/JSWrappingFunction.cpp
|
||||
src/bun.js/bindings/JSX509Certificate.cpp
|
||||
src/bun.js/bindings/JSX509CertificateConstructor.cpp
|
||||
src/bun.js/bindings/JSX509CertificatePrototype.cpp
|
||||
src/bun.js/bindings/linux_perf_tracing.cpp
|
||||
src/bun.js/bindings/MarkedArgumentBufferBinding.cpp
|
||||
src/bun.js/bindings/MarkingConstraint.cpp
|
||||
src/bun.js/bindings/ModuleLoader.cpp
|
||||
src/bun.js/bindings/napi_external.cpp
|
||||
src/bun.js/bindings/napi_finalizer.cpp
|
||||
src/bun.js/bindings/napi_handle_scope.cpp
|
||||
src/bun.js/bindings/napi_type_tag.cpp
|
||||
src/bun.js/bindings/napi.cpp
|
||||
src/bun.js/bindings/NapiClass.cpp
|
||||
src/bun.js/bindings/NapiRef.cpp
|
||||
src/bun.js/bindings/NapiWeakValue.cpp
|
||||
src/bun.js/bindings/ncrpyto_engine.cpp
|
||||
src/bun.js/bindings/ncrypto.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoDhJob.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoGenDhKeyPair.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoGenDsaKeyPair.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoGenEcKeyPair.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoGenKeyPair.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoGenNidKeyPair.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoGenRsaKeyPair.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoHkdf.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoKeygen.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoKeys.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoPrimes.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoSignJob.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoUtil.cpp
|
||||
src/bun.js/bindings/node/crypto/JSCipher.cpp
|
||||
src/bun.js/bindings/node/crypto/JSCipherConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSCipherPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSDiffieHellman.cpp
|
||||
src/bun.js/bindings/node/crypto/JSDiffieHellmanConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSDiffieHellmanGroup.cpp
|
||||
src/bun.js/bindings/node/crypto/JSDiffieHellmanGroupConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSDiffieHellmanGroupPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSDiffieHellmanPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSECDH.cpp
|
||||
src/bun.js/bindings/node/crypto/JSECDHConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSECDHPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSHash.cpp
|
||||
src/bun.js/bindings/node/crypto/JSHmac.cpp
|
||||
src/bun.js/bindings/node/crypto/JSKeyObject.cpp
|
||||
src/bun.js/bindings/node/crypto/JSKeyObjectConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSKeyObjectPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSPrivateKeyObject.cpp
|
||||
src/bun.js/bindings/node/crypto/JSPrivateKeyObjectConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSPrivateKeyObjectPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSPublicKeyObject.cpp
|
||||
src/bun.js/bindings/node/crypto/JSPublicKeyObjectConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSPublicKeyObjectPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSSecretKeyObject.cpp
|
||||
src/bun.js/bindings/node/crypto/JSSecretKeyObjectConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSSecretKeyObjectPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSSign.cpp
|
||||
src/bun.js/bindings/node/crypto/JSVerify.cpp
|
||||
src/bun.js/bindings/node/crypto/KeyObject.cpp
|
||||
src/bun.js/bindings/node/crypto/node_crypto_binding.cpp
|
||||
src/bun.js/bindings/node/http/JSConnectionsList.cpp
|
||||
src/bun.js/bindings/node/http/JSConnectionsListConstructor.cpp
|
||||
src/bun.js/bindings/node/http/JSConnectionsListPrototype.cpp
|
||||
src/bun.js/bindings/node/http/JSHTTPParser.cpp
|
||||
src/bun.js/bindings/node/http/JSHTTPParserConstructor.cpp
|
||||
src/bun.js/bindings/node/http/JSHTTPParserPrototype.cpp
|
||||
src/bun.js/bindings/node/http/NodeHTTPParser.cpp
|
||||
src/bun.js/bindings/node/NodeTimers.cpp
|
||||
src/bun.js/bindings/NodeAsyncHooks.cpp
|
||||
src/bun.js/bindings/NodeDirent.cpp
|
||||
src/bun.js/bindings/NodeFetch.cpp
|
||||
src/bun.js/bindings/NodeFSStatBinding.cpp
|
||||
src/bun.js/bindings/NodeFSStatFSBinding.cpp
|
||||
src/bun.js/bindings/NodeHTTP.cpp
|
||||
src/bun.js/bindings/NodeTimerObject.cpp
|
||||
src/bun.js/bindings/NodeTLS.cpp
|
||||
src/bun.js/bindings/NodeURL.cpp
|
||||
src/bun.js/bindings/NodeValidator.cpp
|
||||
src/bun.js/bindings/NodeVM.cpp
|
||||
src/bun.js/bindings/NodeVMModule.cpp
|
||||
src/bun.js/bindings/NodeVMScript.cpp
|
||||
src/bun.js/bindings/NodeVMSourceTextModule.cpp
|
||||
src/bun.js/bindings/NodeVMSyntheticModule.cpp
|
||||
src/bun.js/bindings/NoOpForTesting.cpp
|
||||
src/bun.js/bindings/ObjectBindings.cpp
|
||||
src/bun.js/bindings/objects.cpp
|
||||
src/bun.js/bindings/OsBinding.cpp
|
||||
src/bun.js/bindings/Path.cpp
|
||||
src/bun.js/bindings/ProcessBindingBuffer.cpp
|
||||
src/bun.js/bindings/ProcessBindingConstants.cpp
|
||||
src/bun.js/bindings/ProcessBindingFs.cpp
|
||||
src/bun.js/bindings/ProcessBindingHTTPParser.cpp
|
||||
src/bun.js/bindings/ProcessBindingNatives.cpp
|
||||
src/bun.js/bindings/ProcessBindingTTYWrap.cpp
|
||||
src/bun.js/bindings/ProcessBindingUV.cpp
|
||||
src/bun.js/bindings/ProcessIdentifier.cpp
|
||||
src/bun.js/bindings/RegularExpression.cpp
|
||||
src/bun.js/bindings/S3Error.cpp
|
||||
src/bun.js/bindings/ScriptExecutionContext.cpp
|
||||
src/bun.js/bindings/SecretsDarwin.cpp
|
||||
src/bun.js/bindings/SecretsLinux.cpp
|
||||
src/bun.js/bindings/SecretsWindows.cpp
|
||||
src/bun.js/bindings/Serialization.cpp
|
||||
src/bun.js/bindings/ServerRouteList.cpp
|
||||
src/bun.js/bindings/spawn.cpp
|
||||
src/bun.js/bindings/SQLClient.cpp
|
||||
src/bun.js/bindings/sqlite/JSSQLStatement.cpp
|
||||
src/bun.js/bindings/stripANSI.cpp
|
||||
src/bun.js/bindings/Strong.cpp
|
||||
src/bun.js/bindings/TextCodec.cpp
|
||||
src/bun.js/bindings/TextCodecCJK.cpp
|
||||
src/bun.js/bindings/TextCodecReplacement.cpp
|
||||
src/bun.js/bindings/TextCodecSingleByte.cpp
|
||||
src/bun.js/bindings/TextCodecUserDefined.cpp
|
||||
src/bun.js/bindings/TextCodecWrapper.cpp
|
||||
src/bun.js/bindings/TextEncoding.cpp
|
||||
src/bun.js/bindings/TextEncodingRegistry.cpp
|
||||
src/bun.js/bindings/Uint8Array.cpp
|
||||
src/bun.js/bindings/Undici.cpp
|
||||
src/bun.js/bindings/URLDecomposition.cpp
|
||||
src/bun.js/bindings/URLSearchParams.cpp
|
||||
src/bun.js/bindings/UtilInspect.cpp
|
||||
src/bun.js/bindings/v8/node.cpp
|
||||
src/bun.js/bindings/v8/shim/Function.cpp
|
||||
src/bun.js/bindings/v8/shim/FunctionTemplate.cpp
|
||||
src/bun.js/bindings/v8/shim/GlobalInternals.cpp
|
||||
src/bun.js/bindings/v8/shim/Handle.cpp
|
||||
src/bun.js/bindings/v8/shim/HandleScopeBuffer.cpp
|
||||
src/bun.js/bindings/v8/shim/InternalFieldObject.cpp
|
||||
src/bun.js/bindings/v8/shim/Map.cpp
|
||||
src/bun.js/bindings/v8/shim/ObjectTemplate.cpp
|
||||
src/bun.js/bindings/v8/shim/Oddball.cpp
|
||||
src/bun.js/bindings/v8/shim/TaggedPointer.cpp
|
||||
src/bun.js/bindings/v8/v8_api_internal.cpp
|
||||
src/bun.js/bindings/v8/v8_internal.cpp
|
||||
src/bun.js/bindings/v8/V8Array.cpp
|
||||
src/bun.js/bindings/v8/V8Boolean.cpp
|
||||
src/bun.js/bindings/v8/V8Context.cpp
|
||||
src/bun.js/bindings/v8/V8EscapableHandleScope.cpp
|
||||
src/bun.js/bindings/v8/V8EscapableHandleScopeBase.cpp
|
||||
src/bun.js/bindings/v8/V8External.cpp
|
||||
src/bun.js/bindings/v8/V8Function.cpp
|
||||
src/bun.js/bindings/v8/V8FunctionCallbackInfo.cpp
|
||||
src/bun.js/bindings/v8/V8FunctionTemplate.cpp
|
||||
src/bun.js/bindings/v8/V8HandleScope.cpp
|
||||
src/bun.js/bindings/v8/V8Isolate.cpp
|
||||
src/bun.js/bindings/v8/V8Local.cpp
|
||||
src/bun.js/bindings/v8/V8Maybe.cpp
|
||||
src/bun.js/bindings/v8/V8Number.cpp
|
||||
src/bun.js/bindings/v8/V8Object.cpp
|
||||
src/bun.js/bindings/v8/V8ObjectTemplate.cpp
|
||||
src/bun.js/bindings/v8/V8String.cpp
|
||||
src/bun.js/bindings/v8/V8Template.cpp
|
||||
src/bun.js/bindings/v8/V8Value.cpp
|
||||
src/bun.js/bindings/Weak.cpp
|
||||
src/bun.js/bindings/webcore/AbortController.cpp
|
||||
src/bun.js/bindings/webcore/AbortSignal.cpp
|
||||
src/bun.js/bindings/webcore/ActiveDOMObject.cpp
|
||||
src/bun.js/bindings/webcore/BroadcastChannel.cpp
|
||||
src/bun.js/bindings/webcore/BunBroadcastChannelRegistry.cpp
|
||||
src/bun.js/bindings/webcore/CloseEvent.cpp
|
||||
src/bun.js/bindings/webcore/CommonAtomStrings.cpp
|
||||
src/bun.js/bindings/webcore/ContextDestructionObserver.cpp
|
||||
src/bun.js/bindings/webcore/CustomEvent.cpp
|
||||
src/bun.js/bindings/webcore/CustomEventCustom.cpp
|
||||
src/bun.js/bindings/webcore/DOMJITHelpers.cpp
|
||||
src/bun.js/bindings/webcore/ErrorCallback.cpp
|
||||
src/bun.js/bindings/webcore/ErrorEvent.cpp
|
||||
src/bun.js/bindings/webcore/Event.cpp
|
||||
src/bun.js/bindings/webcore/EventContext.cpp
|
||||
src/bun.js/bindings/webcore/EventDispatcher.cpp
|
||||
src/bun.js/bindings/webcore/EventEmitter.cpp
|
||||
src/bun.js/bindings/webcore/EventFactory.cpp
|
||||
src/bun.js/bindings/webcore/EventListenerMap.cpp
|
||||
src/bun.js/bindings/webcore/EventNames.cpp
|
||||
src/bun.js/bindings/webcore/EventPath.cpp
|
||||
src/bun.js/bindings/webcore/EventTarget.cpp
|
||||
src/bun.js/bindings/webcore/EventTargetConcrete.cpp
|
||||
src/bun.js/bindings/webcore/EventTargetFactory.cpp
|
||||
src/bun.js/bindings/webcore/FetchHeaders.cpp
|
||||
src/bun.js/bindings/webcore/HeaderFieldTokenizer.cpp
|
||||
src/bun.js/bindings/webcore/HTTPHeaderField.cpp
|
||||
src/bun.js/bindings/webcore/HTTPHeaderIdentifiers.cpp
|
||||
src/bun.js/bindings/webcore/HTTPHeaderMap.cpp
|
||||
src/bun.js/bindings/webcore/HTTPHeaderNames.cpp
|
||||
src/bun.js/bindings/webcore/HTTPHeaderStrings.cpp
|
||||
src/bun.js/bindings/webcore/HTTPHeaderValues.cpp
|
||||
src/bun.js/bindings/webcore/HTTPParsers.cpp
|
||||
src/bun.js/bindings/webcore/IdentifierEventListenerMap.cpp
|
||||
src/bun.js/bindings/webcore/InternalWritableStream.cpp
|
||||
src/bun.js/bindings/webcore/JSAbortAlgorithm.cpp
|
||||
src/bun.js/bindings/webcore/JSAbortController.cpp
|
||||
src/bun.js/bindings/webcore/JSAbortSignal.cpp
|
||||
src/bun.js/bindings/webcore/JSAbortSignalCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSAddEventListenerOptions.cpp
|
||||
src/bun.js/bindings/webcore/JSBroadcastChannel.cpp
|
||||
src/bun.js/bindings/webcore/JSByteLengthQueuingStrategy.cpp
|
||||
src/bun.js/bindings/webcore/JSCallbackData.cpp
|
||||
src/bun.js/bindings/webcore/JSCloseEvent.cpp
|
||||
src/bun.js/bindings/webcore/JSCookie.cpp
|
||||
src/bun.js/bindings/webcore/JSCookieMap.cpp
|
||||
src/bun.js/bindings/webcore/JSCountQueuingStrategy.cpp
|
||||
src/bun.js/bindings/webcore/JSCustomEvent.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMBindingInternalsBuiltins.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMBuiltinConstructorBase.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMConstructorBase.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMConvertDate.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMConvertNumbers.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMConvertStrings.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMConvertWebGL.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMException.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMFormData.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMGuardedObject.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMIterator.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMOperation.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMPromise.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMPromiseDeferred.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMURL.cpp
|
||||
src/bun.js/bindings/webcore/JSErrorCallback.cpp
|
||||
src/bun.js/bindings/webcore/JSErrorEvent.cpp
|
||||
src/bun.js/bindings/webcore/JSErrorEventCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSErrorHandler.cpp
|
||||
src/bun.js/bindings/webcore/JSEvent.cpp
|
||||
src/bun.js/bindings/webcore/JSEventCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSEventDOMJIT.cpp
|
||||
src/bun.js/bindings/webcore/JSEventEmitter.cpp
|
||||
src/bun.js/bindings/webcore/JSEventEmitterCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSEventInit.cpp
|
||||
src/bun.js/bindings/webcore/JSEventListener.cpp
|
||||
src/bun.js/bindings/webcore/JSEventListenerOptions.cpp
|
||||
src/bun.js/bindings/webcore/JSEventModifierInit.cpp
|
||||
src/bun.js/bindings/webcore/JSEventTarget.cpp
|
||||
src/bun.js/bindings/webcore/JSEventTargetCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSEventTargetNode.cpp
|
||||
src/bun.js/bindings/webcore/JSFetchHeaders.cpp
|
||||
src/bun.js/bindings/webcore/JSMessageChannel.cpp
|
||||
src/bun.js/bindings/webcore/JSMessageChannelCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSMessageEvent.cpp
|
||||
src/bun.js/bindings/webcore/JSMessageEventCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSMessagePort.cpp
|
||||
src/bun.js/bindings/webcore/JSMessagePortCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSMIMEBindings.cpp
|
||||
src/bun.js/bindings/webcore/JSMIMEParams.cpp
|
||||
src/bun.js/bindings/webcore/JSMIMEType.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformance.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceEntry.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceEntryCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceMark.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceMarkOptions.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceMeasure.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceMeasureOptions.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceObserver.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceObserverCallback.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceObserverCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceObserverEntryList.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceResourceTiming.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceServerTiming.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceTiming.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableByteStreamController.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStream.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStreamBYOBReader.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStreamBYOBRequest.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStreamDefaultController.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStreamDefaultReader.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStreamSink.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStreamSource.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStreamSourceCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSStructuredSerializeOptions.cpp
|
||||
src/bun.js/bindings/webcore/JSTextDecoderStream.cpp
|
||||
src/bun.js/bindings/webcore/JSTextEncoder.cpp
|
||||
src/bun.js/bindings/webcore/JSTextEncoderStream.cpp
|
||||
src/bun.js/bindings/webcore/JSTransformStream.cpp
|
||||
src/bun.js/bindings/webcore/JSTransformStreamDefaultController.cpp
|
||||
src/bun.js/bindings/webcore/JSURLSearchParams.cpp
|
||||
src/bun.js/bindings/webcore/JSWasmStreamingCompiler.cpp
|
||||
src/bun.js/bindings/webcore/JSWebSocket.cpp
|
||||
src/bun.js/bindings/webcore/JSWorker.cpp
|
||||
src/bun.js/bindings/webcore/JSWorkerOptions.cpp
|
||||
src/bun.js/bindings/webcore/JSWritableStream.cpp
|
||||
src/bun.js/bindings/webcore/JSWritableStreamDefaultController.cpp
|
||||
src/bun.js/bindings/webcore/JSWritableStreamDefaultWriter.cpp
|
||||
src/bun.js/bindings/webcore/JSWritableStreamSink.cpp
|
||||
src/bun.js/bindings/webcore/MessageChannel.cpp
|
||||
src/bun.js/bindings/webcore/MessageEvent.cpp
|
||||
src/bun.js/bindings/webcore/MessagePort.cpp
|
||||
src/bun.js/bindings/webcore/MessagePortChannel.cpp
|
||||
src/bun.js/bindings/webcore/MessagePortChannelProvider.cpp
|
||||
src/bun.js/bindings/webcore/MessagePortChannelProviderImpl.cpp
|
||||
src/bun.js/bindings/webcore/MessagePortChannelRegistry.cpp
|
||||
src/bun.js/bindings/webcore/NetworkLoadMetrics.cpp
|
||||
src/bun.js/bindings/webcore/Performance.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceEntry.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceMark.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceMeasure.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceObserver.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceObserverEntryList.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceResourceTiming.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceServerTiming.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceTiming.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceUserTiming.cpp
|
||||
src/bun.js/bindings/webcore/ReadableStream.cpp
|
||||
src/bun.js/bindings/webcore/ReadableStreamDefaultController.cpp
|
||||
src/bun.js/bindings/webcore/ReadableStreamSink.cpp
|
||||
src/bun.js/bindings/webcore/ReadableStreamSource.cpp
|
||||
src/bun.js/bindings/webcore/ResourceTiming.cpp
|
||||
src/bun.js/bindings/webcore/RFC7230.cpp
|
||||
src/bun.js/bindings/webcore/SerializedScriptValue.cpp
|
||||
src/bun.js/bindings/webcore/ServerTiming.cpp
|
||||
src/bun.js/bindings/webcore/ServerTimingParser.cpp
|
||||
src/bun.js/bindings/webcore/StructuredClone.cpp
|
||||
src/bun.js/bindings/webcore/TextEncoder.cpp
|
||||
src/bun.js/bindings/webcore/WebCoreTypedArrayController.cpp
|
||||
src/bun.js/bindings/webcore/WebSocket.cpp
|
||||
src/bun.js/bindings/webcore/Worker.cpp
|
||||
src/bun.js/bindings/webcore/WritableStream.cpp
|
||||
src/bun.js/bindings/webcrypto/CommonCryptoDERUtilities.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithm.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_CBC.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_CBCOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_CFB.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_CFBOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_CTR.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_CTROpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_GCM.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_GCMOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_KW.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_KWOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmECDH.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmECDHOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmECDSA.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmECDSAOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmEd25519.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmHKDF.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmHKDFOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmHMAC.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmHMACOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmPBKDF2.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmPBKDF2OpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRegistry.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRegistryOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSA_OAEP.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSA_OAEPOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSA_PSS.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSA_PSSOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSAES_PKCS1_v1_5.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSAES_PKCS1_v1_5OpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSASSA_PKCS1_v1_5.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSASSA_PKCS1_v1_5OpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA1.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA224.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA256.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA384.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA512.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmX25519.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoDigest.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKey.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyAES.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyEC.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyECOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyHMAC.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyOKP.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyOKPOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyRaw.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyRSA.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyRSAComponents.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyRSAOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/JSAesCbcCfbParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSAesCtrParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSAesGcmParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSAesKeyParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoAesKeyAlgorithm.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoAlgorithmParameters.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoEcKeyAlgorithm.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoHmacKeyAlgorithm.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoKey.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoKeyAlgorithm.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoKeyPair.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoKeyUsage.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoRsaHashedKeyAlgorithm.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoRsaKeyAlgorithm.cpp
|
||||
src/bun.js/bindings/webcrypto/JSEcdhKeyDeriveParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSEcdsaParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSEcKeyParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSHkdfParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSHmacKeyParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSJsonWebKey.cpp
|
||||
src/bun.js/bindings/webcrypto/JSPbkdf2Params.cpp
|
||||
src/bun.js/bindings/webcrypto/JSRsaHashedImportParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSRsaHashedKeyGenParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSRsaKeyGenParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSRsaOaepParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSRsaOtherPrimesInfo.cpp
|
||||
src/bun.js/bindings/webcrypto/JSRsaPssParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSSubtleCrypto.cpp
|
||||
src/bun.js/bindings/webcrypto/JSX25519Params.cpp
|
||||
src/bun.js/bindings/webcrypto/OpenSSLUtilities.cpp
|
||||
src/bun.js/bindings/webcrypto/PhonyWorkQueue.cpp
|
||||
src/bun.js/bindings/webcrypto/SerializedCryptoKeyWrapOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/SubtleCrypto.cpp
|
||||
src/bun.js/bindings/workaround-missing-symbols.cpp
|
||||
src/bun.js/bindings/wtf-bindings.cpp
|
||||
src/bun.js/bindings/ZigGeneratedCode.cpp
|
||||
src/bun.js/bindings/ZigGlobalObject.cpp
|
||||
src/bun.js/bindings/ZigSourceProvider.cpp
|
||||
src/bun.js/modules/NodeModuleModule.cpp
|
||||
src/bun.js/modules/NodeTTYModule.cpp
|
||||
src/bun.js/modules/NodeUtilTypesModule.cpp
|
||||
src/bun.js/modules/ObjectModule.cpp
|
||||
src/deps/libuwsockets.cpp
|
||||
src/io/io_darwin.cpp
|
||||
src/vm/Semaphore.cpp
|
||||
src/vm/SigintWatcher.cpp
|
||||
@@ -1,21 +0,0 @@
|
||||
src/codegen/bake-codegen.ts
|
||||
src/codegen/bindgen-lib-internal.ts
|
||||
src/codegen/bindgen-lib.ts
|
||||
src/codegen/bindgen.ts
|
||||
src/codegen/buildTypeFlag.ts
|
||||
src/codegen/builtin-parser.ts
|
||||
src/codegen/bundle-functions.ts
|
||||
src/codegen/bundle-modules.ts
|
||||
src/codegen/class-definitions.ts
|
||||
src/codegen/client-js.ts
|
||||
src/codegen/cppbind.ts
|
||||
src/codegen/create-hash-table.ts
|
||||
src/codegen/generate-classes.ts
|
||||
src/codegen/generate-compact-string-table.ts
|
||||
src/codegen/generate-js2native.ts
|
||||
src/codegen/generate-jssink.ts
|
||||
src/codegen/generate-node-errors.ts
|
||||
src/codegen/helpers.ts
|
||||
src/codegen/internal-module-registry-scanner.ts
|
||||
src/codegen/replacements.ts
|
||||
src/codegen/shared-types.ts
|
||||
@@ -1,171 +0,0 @@
|
||||
src/js/builtins.d.ts
|
||||
src/js/builtins/Bake.ts
|
||||
src/js/builtins/BundlerPlugin.ts
|
||||
src/js/builtins/ByteLengthQueuingStrategy.ts
|
||||
src/js/builtins/CommonJS.ts
|
||||
src/js/builtins/ConsoleObject.ts
|
||||
src/js/builtins/CountQueuingStrategy.ts
|
||||
src/js/builtins/Glob.ts
|
||||
src/js/builtins/ImportMetaObject.ts
|
||||
src/js/builtins/Ipc.ts
|
||||
src/js/builtins/JSBufferConstructor.ts
|
||||
src/js/builtins/JSBufferPrototype.ts
|
||||
src/js/builtins/NodeModuleObject.ts
|
||||
src/js/builtins/Peek.ts
|
||||
src/js/builtins/ProcessObjectInternals.ts
|
||||
src/js/builtins/ReadableByteStreamController.ts
|
||||
src/js/builtins/ReadableByteStreamInternals.ts
|
||||
src/js/builtins/ReadableStream.ts
|
||||
src/js/builtins/ReadableStreamBYOBReader.ts
|
||||
src/js/builtins/ReadableStreamBYOBRequest.ts
|
||||
src/js/builtins/ReadableStreamDefaultController.ts
|
||||
src/js/builtins/ReadableStreamDefaultReader.ts
|
||||
src/js/builtins/ReadableStreamInternals.ts
|
||||
src/js/builtins/shell.ts
|
||||
src/js/builtins/StreamInternals.ts
|
||||
src/js/builtins/TextDecoderStream.ts
|
||||
src/js/builtins/TextEncoderStream.ts
|
||||
src/js/builtins/TransformStream.ts
|
||||
src/js/builtins/TransformStreamDefaultController.ts
|
||||
src/js/builtins/TransformStreamInternals.ts
|
||||
src/js/builtins/UtilInspect.ts
|
||||
src/js/builtins/WasmStreaming.ts
|
||||
src/js/builtins/WritableStreamDefaultController.ts
|
||||
src/js/builtins/WritableStreamDefaultWriter.ts
|
||||
src/js/builtins/WritableStreamInternals.ts
|
||||
src/js/bun/ffi.ts
|
||||
src/js/bun/sql.ts
|
||||
src/js/bun/sqlite.ts
|
||||
src/js/internal-for-testing.ts
|
||||
src/js/internal/abort_listener.ts
|
||||
src/js/internal/assert/assertion_error.ts
|
||||
src/js/internal/assert/calltracker.ts
|
||||
src/js/internal/assert/myers_diff.ts
|
||||
src/js/internal/assert/utils.ts
|
||||
src/js/internal/buffer.ts
|
||||
src/js/internal/cluster/child.ts
|
||||
src/js/internal/cluster/isPrimary.ts
|
||||
src/js/internal/cluster/primary.ts
|
||||
src/js/internal/cluster/RoundRobinHandle.ts
|
||||
src/js/internal/cluster/Worker.ts
|
||||
src/js/internal/crypto/x509.ts
|
||||
src/js/internal/debugger.ts
|
||||
src/js/internal/errors.ts
|
||||
src/js/internal/fifo.ts
|
||||
src/js/internal/fixed_queue.ts
|
||||
src/js/internal/freelist.ts
|
||||
src/js/internal/fs/cp-sync.ts
|
||||
src/js/internal/fs/cp.ts
|
||||
src/js/internal/fs/glob.ts
|
||||
src/js/internal/fs/streams.ts
|
||||
src/js/internal/html.ts
|
||||
src/js/internal/http.ts
|
||||
src/js/internal/http/FakeSocket.ts
|
||||
src/js/internal/linkedlist.ts
|
||||
src/js/internal/primordials.js
|
||||
src/js/internal/promisify.ts
|
||||
src/js/internal/shared.ts
|
||||
src/js/internal/sql/errors.ts
|
||||
src/js/internal/sql/mysql.ts
|
||||
src/js/internal/sql/postgres.ts
|
||||
src/js/internal/sql/query.ts
|
||||
src/js/internal/sql/shared.ts
|
||||
src/js/internal/sql/sqlite.ts
|
||||
src/js/internal/stream.promises.ts
|
||||
src/js/internal/stream.ts
|
||||
src/js/internal/streams/add-abort-signal.ts
|
||||
src/js/internal/streams/compose.ts
|
||||
src/js/internal/streams/destroy.ts
|
||||
src/js/internal/streams/duplex.ts
|
||||
src/js/internal/streams/duplexify.ts
|
||||
src/js/internal/streams/duplexpair.ts
|
||||
src/js/internal/streams/end-of-stream.ts
|
||||
src/js/internal/streams/from.ts
|
||||
src/js/internal/streams/lazy_transform.ts
|
||||
src/js/internal/streams/legacy.ts
|
||||
src/js/internal/streams/native-readable.ts
|
||||
src/js/internal/streams/operators.ts
|
||||
src/js/internal/streams/passthrough.ts
|
||||
src/js/internal/streams/pipeline.ts
|
||||
src/js/internal/streams/readable.ts
|
||||
src/js/internal/streams/state.ts
|
||||
src/js/internal/streams/transform.ts
|
||||
src/js/internal/streams/utils.ts
|
||||
src/js/internal/streams/writable.ts
|
||||
src/js/internal/timers.ts
|
||||
src/js/internal/tls.ts
|
||||
src/js/internal/tty.ts
|
||||
src/js/internal/url.ts
|
||||
src/js/internal/util/colors.ts
|
||||
src/js/internal/util/inspect.d.ts
|
||||
src/js/internal/util/inspect.js
|
||||
src/js/internal/util/mime.ts
|
||||
src/js/internal/validators.ts
|
||||
src/js/internal/webstreams_adapters.ts
|
||||
src/js/node/_http_agent.ts
|
||||
src/js/node/_http_client.ts
|
||||
src/js/node/_http_common.ts
|
||||
src/js/node/_http_incoming.ts
|
||||
src/js/node/_http_outgoing.ts
|
||||
src/js/node/_http_server.ts
|
||||
src/js/node/_stream_duplex.ts
|
||||
src/js/node/_stream_passthrough.ts
|
||||
src/js/node/_stream_readable.ts
|
||||
src/js/node/_stream_transform.ts
|
||||
src/js/node/_stream_wrap.ts
|
||||
src/js/node/_stream_writable.ts
|
||||
src/js/node/_tls_common.ts
|
||||
src/js/node/assert.strict.ts
|
||||
src/js/node/assert.ts
|
||||
src/js/node/async_hooks.ts
|
||||
src/js/node/child_process.ts
|
||||
src/js/node/cluster.ts
|
||||
src/js/node/console.ts
|
||||
src/js/node/crypto.ts
|
||||
src/js/node/dgram.ts
|
||||
src/js/node/diagnostics_channel.ts
|
||||
src/js/node/dns.promises.ts
|
||||
src/js/node/dns.ts
|
||||
src/js/node/domain.ts
|
||||
src/js/node/events.ts
|
||||
src/js/node/fs.promises.ts
|
||||
src/js/node/fs.ts
|
||||
src/js/node/http.ts
|
||||
src/js/node/http2.ts
|
||||
src/js/node/https.ts
|
||||
src/js/node/inspector.ts
|
||||
src/js/node/net.ts
|
||||
src/js/node/os.ts
|
||||
src/js/node/path.posix.ts
|
||||
src/js/node/path.ts
|
||||
src/js/node/path.win32.ts
|
||||
src/js/node/perf_hooks.ts
|
||||
src/js/node/punycode.ts
|
||||
src/js/node/querystring.ts
|
||||
src/js/node/readline.promises.ts
|
||||
src/js/node/readline.ts
|
||||
src/js/node/repl.ts
|
||||
src/js/node/stream.consumers.ts
|
||||
src/js/node/stream.promises.ts
|
||||
src/js/node/stream.ts
|
||||
src/js/node/stream.web.ts
|
||||
src/js/node/test.ts
|
||||
src/js/node/timers.promises.ts
|
||||
src/js/node/timers.ts
|
||||
src/js/node/tls.ts
|
||||
src/js/node/trace_events.ts
|
||||
src/js/node/tty.ts
|
||||
src/js/node/url.ts
|
||||
src/js/node/util.ts
|
||||
src/js/node/v8.ts
|
||||
src/js/node/vm.ts
|
||||
src/js/node/wasi.ts
|
||||
src/js/node/worker_threads.ts
|
||||
src/js/node/zlib.ts
|
||||
src/js/private.d.ts
|
||||
src/js/thirdparty/isomorphic-fetch.ts
|
||||
src/js/thirdparty/node-fetch.ts
|
||||
src/js/thirdparty/undici.js
|
||||
src/js/thirdparty/vercel_fetch.js
|
||||
src/js/thirdparty/ws.js
|
||||
src/js/wasi-runner.js
|
||||
@@ -1,24 +0,0 @@
|
||||
src/node-fallbacks/assert.js
|
||||
src/node-fallbacks/buffer.js
|
||||
src/node-fallbacks/console.js
|
||||
src/node-fallbacks/constants.js
|
||||
src/node-fallbacks/crypto.js
|
||||
src/node-fallbacks/domain.js
|
||||
src/node-fallbacks/events.js
|
||||
src/node-fallbacks/http.js
|
||||
src/node-fallbacks/https.js
|
||||
src/node-fallbacks/net.js
|
||||
src/node-fallbacks/os.js
|
||||
src/node-fallbacks/path.js
|
||||
src/node-fallbacks/process.js
|
||||
src/node-fallbacks/punycode.js
|
||||
src/node-fallbacks/querystring.js
|
||||
src/node-fallbacks/stream.js
|
||||
src/node-fallbacks/string_decoder.js
|
||||
src/node-fallbacks/sys.js
|
||||
src/node-fallbacks/timers.js
|
||||
src/node-fallbacks/timers.promises.js
|
||||
src/node-fallbacks/tty.js
|
||||
src/node-fallbacks/url.js
|
||||
src/node-fallbacks/util.js
|
||||
src/node-fallbacks/zlib.js
|
||||
@@ -1,25 +0,0 @@
|
||||
src/bun.js/api/BunObject.classes.ts
|
||||
src/bun.js/api/crypto.classes.ts
|
||||
src/bun.js/api/ffi.classes.ts
|
||||
src/bun.js/api/filesystem_router.classes.ts
|
||||
src/bun.js/api/Glob.classes.ts
|
||||
src/bun.js/api/h2.classes.ts
|
||||
src/bun.js/api/html_rewriter.classes.ts
|
||||
src/bun.js/api/JSBundler.classes.ts
|
||||
src/bun.js/api/ResumableSink.classes.ts
|
||||
src/bun.js/api/S3Client.classes.ts
|
||||
src/bun.js/api/S3Stat.classes.ts
|
||||
src/bun.js/api/server.classes.ts
|
||||
src/bun.js/api/Shell.classes.ts
|
||||
src/bun.js/api/ShellArgs.classes.ts
|
||||
src/bun.js/api/sockets.classes.ts
|
||||
src/bun.js/api/sourcemap.classes.ts
|
||||
src/bun.js/api/sql.classes.ts
|
||||
src/bun.js/api/streams.classes.ts
|
||||
src/bun.js/api/valkey.classes.ts
|
||||
src/bun.js/api/zlib.classes.ts
|
||||
src/bun.js/node/node.classes.ts
|
||||
src/bun.js/resolve_message.classes.ts
|
||||
src/bun.js/test/jest.classes.ts
|
||||
src/bun.js/webcore/encoding.classes.ts
|
||||
src/bun.js/webcore/response.classes.ts
|
||||
File diff suppressed because it is too large
Load Diff
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
oven-sh/boringssl
|
||||
COMMIT
|
||||
7a5d984c69b0c34c4cbb56c6812eaa5b9bef485c
|
||||
f1ffd9e83d4f5c28a9c70d73f9a4e6fcf310062f
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
|
||||
@@ -636,6 +636,7 @@ register_command(
|
||||
SOURCES
|
||||
${BUN_ZIG_SOURCES}
|
||||
${BUN_ZIG_GENERATED_SOURCES}
|
||||
${CWD}/src/install/PackageManager/scanner-entry.ts # Is there a better way to do this?
|
||||
)
|
||||
|
||||
set_property(TARGET bun-zig PROPERTY JOB_POOL compile_pool)
|
||||
@@ -1125,6 +1126,9 @@ endif()
|
||||
|
||||
include_directories(${WEBKIT_INCLUDE_PATH})
|
||||
|
||||
# Include the generated dependency versions header
|
||||
include_directories(${CMAKE_BINARY_DIR})
|
||||
|
||||
if(NOT WEBKIT_LOCAL AND NOT APPLE)
|
||||
include_directories(${WEBKIT_INCLUDE_PATH}/wtf/unicode)
|
||||
endif()
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
HdrHistogram/HdrHistogram_c
|
||||
COMMIT
|
||||
8dcce8f68512fca460b171bccc3a5afce0048779
|
||||
be60a9987ee48d0abf0d7b6a175bad8d6c1585d1
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
|
||||
209
cmake/tools/GenerateDependencyVersions.cmake
Normal file
209
cmake/tools/GenerateDependencyVersions.cmake
Normal file
@@ -0,0 +1,209 @@
|
||||
# GenerateDependencyVersions.cmake
|
||||
# Generates a header file with all dependency versions
|
||||
|
||||
# Function to extract version from git tree object
|
||||
function(get_git_tree_hash dep_name output_var)
|
||||
execute_process(
|
||||
COMMAND git rev-parse HEAD:./src/deps/${dep_name}
|
||||
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
|
||||
OUTPUT_VARIABLE commit_hash
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
ERROR_QUIET
|
||||
RESULT_VARIABLE result
|
||||
)
|
||||
if(result EQUAL 0 AND commit_hash)
|
||||
set(${output_var} "${commit_hash}" PARENT_SCOPE)
|
||||
else()
|
||||
set(${output_var} "unknown" PARENT_SCOPE)
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
# Function to extract version from header file using regex
|
||||
function(extract_version_from_header header_file regex_pattern output_var)
|
||||
if(EXISTS "${header_file}")
|
||||
file(STRINGS "${header_file}" version_line REGEX "${regex_pattern}")
|
||||
if(version_line)
|
||||
string(REGEX MATCH "${regex_pattern}" _match "${version_line}")
|
||||
if(CMAKE_MATCH_1)
|
||||
set(${output_var} "${CMAKE_MATCH_1}" PARENT_SCOPE)
|
||||
else()
|
||||
set(${output_var} "unknown" PARENT_SCOPE)
|
||||
endif()
|
||||
else()
|
||||
set(${output_var} "unknown" PARENT_SCOPE)
|
||||
endif()
|
||||
else()
|
||||
set(${output_var} "unknown" PARENT_SCOPE)
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
# Main function to generate the header file
|
||||
function(generate_dependency_versions_header)
|
||||
set(DEPS_PATH "${CMAKE_SOURCE_DIR}/src/deps")
|
||||
set(VENDOR_PATH "${CMAKE_SOURCE_DIR}/vendor")
|
||||
|
||||
# Initialize version variables
|
||||
set(DEPENDENCY_VERSIONS "")
|
||||
|
||||
# WebKit version (from SetupWebKit.cmake or command line)
|
||||
if(WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION_STR "${WEBKIT_VERSION}")
|
||||
else()
|
||||
set(WEBKIT_VERSION_STR "0ddf6f47af0a9782a354f61e06d7f83d097d9f84")
|
||||
endif()
|
||||
list(APPEND DEPENDENCY_VERSIONS "WEBKIT" "${WEBKIT_VERSION_STR}")
|
||||
|
||||
# Track input files so CMake reconfigures when they change
|
||||
set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS
|
||||
"${CMAKE_SOURCE_DIR}/package.json"
|
||||
"${VENDOR_PATH}/libdeflate/libdeflate.h"
|
||||
"${VENDOR_PATH}/zlib/zlib.h"
|
||||
"${DEPS_PATH}/zstd/lib/zstd.h"
|
||||
)
|
||||
|
||||
# Hardcoded dependency versions (previously from generated_versions_list.zig)
|
||||
# These are the commit hashes/tree objects for each dependency
|
||||
list(APPEND DEPENDENCY_VERSIONS "BORINGSSL" "29a2cd359458c9384694b75456026e4b57e3e567")
|
||||
list(APPEND DEPENDENCY_VERSIONS "C_ARES" "d1722e6e8acaf10eb73fa995798a9cd421d9f85e")
|
||||
list(APPEND DEPENDENCY_VERSIONS "LIBARCHIVE" "898dc8319355b7e985f68a9819f182aaed61b53a")
|
||||
list(APPEND DEPENDENCY_VERSIONS "LIBDEFLATE_HASH" "dc76454a39e7e83b68c3704b6e3784654f8d5ac5")
|
||||
list(APPEND DEPENDENCY_VERSIONS "LOLHTML" "8d4c273ded322193d017042d1f48df2766b0f88b")
|
||||
list(APPEND DEPENDENCY_VERSIONS "LSHPACK" "3d0f1fc1d6e66a642e7a98c55deb38aa986eb4b0")
|
||||
list(APPEND DEPENDENCY_VERSIONS "MIMALLOC" "4c283af60cdae205df5a872530c77e2a6a307d43")
|
||||
list(APPEND DEPENDENCY_VERSIONS "PICOHTTPPARSER" "066d2b1e9ab820703db0837a7255d92d30f0c9f5")
|
||||
list(APPEND DEPENDENCY_VERSIONS "TINYCC" "ab631362d839333660a265d3084d8ff060b96753")
|
||||
list(APPEND DEPENDENCY_VERSIONS "ZLIB_HASH" "886098f3f339617b4243b286f5ed364b9989e245")
|
||||
list(APPEND DEPENDENCY_VERSIONS "ZSTD_HASH" "794ea1b0afca0f020f4e57b6732332231fb23c70")
|
||||
|
||||
# Extract semantic versions from header files where available
|
||||
extract_version_from_header(
|
||||
"${VENDOR_PATH}/libdeflate/libdeflate.h"
|
||||
"#define LIBDEFLATE_VERSION_STRING[ \t]+\"([0-9\\.]+)\""
|
||||
LIBDEFLATE_VERSION_STRING
|
||||
)
|
||||
list(APPEND DEPENDENCY_VERSIONS "LIBDEFLATE_VERSION" "${LIBDEFLATE_VERSION_STRING}")
|
||||
|
||||
extract_version_from_header(
|
||||
"${VENDOR_PATH}/zlib/zlib.h"
|
||||
"#define[ \t]+ZLIB_VERSION[ \t]+\"([^\"]+)\""
|
||||
ZLIB_VERSION_STRING
|
||||
)
|
||||
list(APPEND DEPENDENCY_VERSIONS "ZLIB_VERSION" "${ZLIB_VERSION_STRING}")
|
||||
|
||||
extract_version_from_header(
|
||||
"${DEPS_PATH}/zstd/lib/zstd.h"
|
||||
"#define[ \t]+ZSTD_VERSION_STRING[ \t]+\"([^\"]+)\""
|
||||
ZSTD_VERSION_STRING
|
||||
)
|
||||
list(APPEND DEPENDENCY_VERSIONS "ZSTD_VERSION" "${ZSTD_VERSION_STRING}")
|
||||
|
||||
# Bun version from package.json
|
||||
if(EXISTS "${CMAKE_SOURCE_DIR}/package.json")
|
||||
file(READ "${CMAKE_SOURCE_DIR}/package.json" PACKAGE_JSON)
|
||||
string(REGEX MATCH "\"version\"[ \t]*:[ \t]*\"([^\"]+)\"" _ ${PACKAGE_JSON})
|
||||
if(CMAKE_MATCH_1)
|
||||
set(BUN_VERSION_STRING "${CMAKE_MATCH_1}")
|
||||
else()
|
||||
set(BUN_VERSION_STRING "unknown")
|
||||
endif()
|
||||
else()
|
||||
set(BUN_VERSION_STRING "${VERSION}")
|
||||
endif()
|
||||
list(APPEND DEPENDENCY_VERSIONS "BUN_VERSION" "${BUN_VERSION_STRING}")
|
||||
|
||||
# Node.js compatibility version (hardcoded as in the current implementation)
|
||||
set(NODEJS_COMPAT_VERSION "22.12.0")
|
||||
list(APPEND DEPENDENCY_VERSIONS "NODEJS_COMPAT_VERSION" "${NODEJS_COMPAT_VERSION}")
|
||||
|
||||
# Get Bun's git SHA for uws/usockets versions (they use Bun's own SHA)
|
||||
execute_process(
|
||||
COMMAND git rev-parse HEAD
|
||||
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
|
||||
OUTPUT_VARIABLE BUN_GIT_SHA
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
ERROR_QUIET
|
||||
)
|
||||
if(NOT BUN_GIT_SHA)
|
||||
set(BUN_GIT_SHA "unknown")
|
||||
endif()
|
||||
list(APPEND DEPENDENCY_VERSIONS "UWS" "${BUN_GIT_SHA}")
|
||||
list(APPEND DEPENDENCY_VERSIONS "USOCKETS" "${BUN_GIT_SHA}")
|
||||
|
||||
# Zig version - hardcoded for now, can be updated as needed
|
||||
# This should match the version of Zig used to build Bun
|
||||
list(APPEND DEPENDENCY_VERSIONS "ZIG" "0.14.1")
|
||||
|
||||
# Generate the header file content
|
||||
set(HEADER_CONTENT "// This file is auto-generated by CMake. Do not edit manually.\n")
|
||||
string(APPEND HEADER_CONTENT "#ifndef BUN_DEPENDENCY_VERSIONS_H\n")
|
||||
string(APPEND HEADER_CONTENT "#define BUN_DEPENDENCY_VERSIONS_H\n\n")
|
||||
string(APPEND HEADER_CONTENT "#ifdef __cplusplus\n")
|
||||
string(APPEND HEADER_CONTENT "extern \"C\" {\n")
|
||||
string(APPEND HEADER_CONTENT "#endif\n\n")
|
||||
string(APPEND HEADER_CONTENT "// Dependency versions\n")
|
||||
|
||||
# Process the version list
|
||||
list(LENGTH DEPENDENCY_VERSIONS num_versions)
|
||||
math(EXPR last_idx "${num_versions} - 1")
|
||||
set(i 0)
|
||||
while(i LESS num_versions)
|
||||
list(GET DEPENDENCY_VERSIONS ${i} name)
|
||||
math(EXPR value_idx "${i} + 1")
|
||||
if(value_idx LESS num_versions)
|
||||
list(GET DEPENDENCY_VERSIONS ${value_idx} value)
|
||||
# Only emit #define if value is not "unknown"
|
||||
if(NOT "${value}" STREQUAL "unknown")
|
||||
string(APPEND HEADER_CONTENT "#define BUN_DEP_${name} \"${value}\"\n")
|
||||
endif()
|
||||
endif()
|
||||
math(EXPR i "${i} + 2")
|
||||
endwhile()
|
||||
|
||||
string(APPEND HEADER_CONTENT "\n")
|
||||
string(APPEND HEADER_CONTENT "// C string constants for easy access\n")
|
||||
|
||||
# Create C string constants
|
||||
set(i 0)
|
||||
while(i LESS num_versions)
|
||||
list(GET DEPENDENCY_VERSIONS ${i} name)
|
||||
math(EXPR value_idx "${i} + 1")
|
||||
if(value_idx LESS num_versions)
|
||||
list(GET DEPENDENCY_VERSIONS ${value_idx} value)
|
||||
# Only emit constant if value is not "unknown"
|
||||
if(NOT "${value}" STREQUAL "unknown")
|
||||
string(APPEND HEADER_CONTENT "static const char* const BUN_VERSION_${name} = \"${value}\";\n")
|
||||
endif()
|
||||
endif()
|
||||
math(EXPR i "${i} + 2")
|
||||
endwhile()
|
||||
|
||||
string(APPEND HEADER_CONTENT "\n#ifdef __cplusplus\n")
|
||||
string(APPEND HEADER_CONTENT "}\n")
|
||||
string(APPEND HEADER_CONTENT "#endif\n\n")
|
||||
string(APPEND HEADER_CONTENT "#endif // BUN_DEPENDENCY_VERSIONS_H\n")
|
||||
|
||||
# Write the header file
|
||||
set(OUTPUT_FILE "${CMAKE_BINARY_DIR}/bun_dependency_versions.h")
|
||||
file(WRITE "${OUTPUT_FILE}" "${HEADER_CONTENT}")
|
||||
|
||||
message(STATUS "Generated dependency versions header: ${OUTPUT_FILE}")
|
||||
|
||||
# Also create a more detailed version for debugging
|
||||
set(DEBUG_OUTPUT_FILE "${CMAKE_BINARY_DIR}/bun_dependency_versions_debug.txt")
|
||||
set(DEBUG_CONTENT "Bun Dependency Versions\n")
|
||||
string(APPEND DEBUG_CONTENT "=======================\n\n")
|
||||
set(i 0)
|
||||
while(i LESS num_versions)
|
||||
list(GET DEPENDENCY_VERSIONS ${i} name)
|
||||
math(EXPR value_idx "${i} + 1")
|
||||
if(value_idx LESS num_versions)
|
||||
list(GET DEPENDENCY_VERSIONS ${value_idx} value)
|
||||
string(APPEND DEBUG_CONTENT "${name}: ${value}\n")
|
||||
endif()
|
||||
math(EXPR i "${i} + 2")
|
||||
endwhile()
|
||||
file(WRITE "${DEBUG_OUTPUT_FILE}" "${DEBUG_CONTENT}")
|
||||
endfunction()
|
||||
|
||||
# Call the function to generate the header
|
||||
generate_dependency_versions_header()
|
||||
@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
|
||||
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
|
||||
|
||||
if(NOT WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION f9e86fe8dc0aa2fc1f137cc94777cb10637c23a4)
|
||||
set(WEBKIT_VERSION 2d2e8dd5b020cc165e2bc1d284461b4504d624e5)
|
||||
endif()
|
||||
|
||||
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)
|
||||
|
||||
@@ -20,7 +20,7 @@ else()
|
||||
unsupported(CMAKE_SYSTEM_NAME)
|
||||
endif()
|
||||
|
||||
set(ZIG_COMMIT "edc6229b1fafb1701a25fb4e17114cc756991546")
|
||||
set(ZIG_COMMIT "e0b7c318f318196c5f81fdf3423816a7b5bb3112")
|
||||
optionx(ZIG_TARGET STRING "The zig target to use" DEFAULT ${DEFAULT_ZIG_TARGET})
|
||||
|
||||
if(CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
|
||||
366
docs/api/sql.md
366
docs/api/sql.md
@@ -1,4 +1,4 @@
|
||||
Bun provides native bindings for working with SQL databases through a unified Promise-based API that supports both PostgreSQL and SQLite. The interface is designed to be simple and performant, using tagged template literals for queries and offering features like connection pooling, transactions, and prepared statements.
|
||||
Bun provides native bindings for working with SQL databases through a unified Promise-based API that supports PostgreSQL, MySQL, and SQLite. The interface is designed to be simple and performant, using tagged template literals for queries and offering features like connection pooling, transactions, and prepared statements.
|
||||
|
||||
```ts
|
||||
import { sql, SQL } from "bun";
|
||||
@@ -10,9 +10,16 @@ const users = await sql`
|
||||
LIMIT ${10}
|
||||
`;
|
||||
|
||||
// With a a SQLite db
|
||||
// With MySQL
|
||||
const mysql = new SQL("mysql://user:pass@localhost:3306/mydb");
|
||||
const mysqlResults = await mysql`
|
||||
SELECT * FROM users
|
||||
WHERE active = ${true}
|
||||
`;
|
||||
|
||||
// With SQLite
|
||||
const sqlite = new SQL("sqlite://myapp.db");
|
||||
const results = await sqlite`
|
||||
const sqliteResults = await sqlite`
|
||||
SELECT * FROM users
|
||||
WHERE active = ${1}
|
||||
`;
|
||||
@@ -52,7 +59,7 @@ Bun.SQL provides a unified API for multiple database systems:
|
||||
|
||||
PostgreSQL is used when:
|
||||
|
||||
- The connection string doesn't match SQLite patterns (it's the fallback adapter)
|
||||
- The connection string doesn't match SQLite or MySQL patterns (it's the fallback adapter)
|
||||
- The connection string explicitly uses `postgres://` or `postgresql://` protocols
|
||||
- No connection string is provided and environment variables point to PostgreSQL
|
||||
|
||||
@@ -66,9 +73,82 @@ const pg = new SQL("postgres://user:pass@localhost:5432/mydb");
|
||||
await pg`SELECT ...`;
|
||||
```
|
||||
|
||||
### MySQL
|
||||
|
||||
MySQL support is built into Bun.SQL, providing the same tagged template literal interface with full compatibility for MySQL 5.7+ and MySQL 8.0+:
|
||||
|
||||
```ts
|
||||
import { SQL } from "bun";
|
||||
|
||||
// MySQL connection
|
||||
const mysql = new SQL("mysql://user:password@localhost:3306/database");
|
||||
const mysql2 = new SQL("mysql2://user:password@localhost:3306/database"); // mysql2 protocol also works
|
||||
|
||||
// Using options object
|
||||
const mysql3 = new SQL({
|
||||
adapter: "mysql",
|
||||
hostname: "localhost",
|
||||
port: 3306,
|
||||
database: "myapp",
|
||||
username: "dbuser",
|
||||
password: "secretpass",
|
||||
});
|
||||
|
||||
// Works with parameters - automatically uses prepared statements
|
||||
const users = await mysql`SELECT * FROM users WHERE id = ${userId}`;
|
||||
|
||||
// Transactions work the same as PostgreSQL
|
||||
await mysql.begin(async tx => {
|
||||
await tx`INSERT INTO users (name) VALUES (${"Alice"})`;
|
||||
await tx`UPDATE accounts SET balance = balance - 100 WHERE user_id = ${userId}`;
|
||||
});
|
||||
|
||||
// Bulk inserts
|
||||
const newUsers = [
|
||||
{ name: "Alice", email: "alice@example.com" },
|
||||
{ name: "Bob", email: "bob@example.com" },
|
||||
];
|
||||
await mysql`INSERT INTO users ${mysql(newUsers)}`;
|
||||
```
|
||||
|
||||
{% details summary="MySQL Connection String Formats" %}
|
||||
|
||||
MySQL accepts various URL formats for connection strings:
|
||||
|
||||
```ts
|
||||
// Standard mysql:// protocol
|
||||
new SQL("mysql://user:pass@localhost:3306/database");
|
||||
new SQL("mysql://user:pass@localhost/database"); // Default port 3306
|
||||
|
||||
// mysql2:// protocol (compatibility with mysql2 npm package)
|
||||
new SQL("mysql2://user:pass@localhost:3306/database");
|
||||
|
||||
// With query parameters
|
||||
new SQL("mysql://user:pass@localhost/db?ssl=true");
|
||||
|
||||
// Unix socket connection
|
||||
new SQL("mysql://user:pass@/database?socket=/var/run/mysqld/mysqld.sock");
|
||||
```
|
||||
|
||||
{% /details %}
|
||||
|
||||
{% details summary="MySQL-Specific Features" %}
|
||||
|
||||
MySQL databases support:
|
||||
|
||||
- **Prepared statements**: Automatically created for parameterized queries with statement caching
|
||||
- **Binary protocol**: For better performance with prepared statements and accurate type handling
|
||||
- **Multiple result sets**: Support for stored procedures returning multiple result sets
|
||||
- **Authentication plugins**: Support for mysql_native_password, caching_sha2_password (MySQL 8.0 default), and sha256_password
|
||||
- **SSL/TLS connections**: Configurable SSL modes similar to PostgreSQL
|
||||
- **Connection attributes**: Client information sent to server for monitoring
|
||||
- **Query pipelining**: Execute multiple prepared statements without waiting for responses
|
||||
|
||||
{% /details %}
|
||||
|
||||
### SQLite
|
||||
|
||||
SQLite support is now built into Bun.SQL, providing the same tagged template literal interface as PostgreSQL:
|
||||
SQLite support is built into Bun.SQL, providing the same tagged template literal interface:
|
||||
|
||||
```ts
|
||||
import { SQL } from "bun";
|
||||
@@ -362,7 +442,24 @@ await query;
|
||||
|
||||
### Automatic Database Detection
|
||||
|
||||
When using `Bun.sql()` without arguments or `new SQL()` with a connection string, the adapter is automatically detected based on the URL format. SQLite becomes the default adapter in these cases:
|
||||
When using `Bun.sql()` without arguments or `new SQL()` with a connection string, the adapter is automatically detected based on the URL format:
|
||||
|
||||
#### MySQL Auto-Detection
|
||||
|
||||
MySQL is automatically selected when the connection string matches these patterns:
|
||||
|
||||
- `mysql://...` - MySQL protocol URLs
|
||||
- `mysql2://...` - MySQL2 protocol URLs (compatibility alias)
|
||||
|
||||
```ts
|
||||
// These all use MySQL automatically (no adapter needed)
|
||||
const sql1 = new SQL("mysql://user:pass@localhost/mydb");
|
||||
const sql2 = new SQL("mysql2://user:pass@localhost:3306/mydb");
|
||||
|
||||
// Works with DATABASE_URL environment variable
|
||||
DATABASE_URL="mysql://user:pass@localhost/mydb" bun run app.js
|
||||
DATABASE_URL="mysql2://user:pass@localhost:3306/mydb" bun run app.js
|
||||
```
|
||||
|
||||
#### SQLite Auto-Detection
|
||||
|
||||
@@ -388,17 +485,42 @@ DATABASE_URL="file://./data/app.db" bun run app.js
|
||||
|
||||
#### PostgreSQL Auto-Detection
|
||||
|
||||
PostgreSQL is the default for all other connection strings:
|
||||
PostgreSQL is the default for connection strings that don't match MySQL or SQLite patterns:
|
||||
|
||||
```bash
|
||||
# PostgreSQL is detected for these patterns
|
||||
DATABASE_URL="postgres://user:pass@localhost:5432/mydb" bun run app.js
|
||||
DATABASE_URL="postgresql://user:pass@localhost:5432/mydb" bun run app.js
|
||||
|
||||
# Or any URL that doesn't match SQLite patterns
|
||||
# Or any URL that doesn't match MySQL or SQLite patterns
|
||||
DATABASE_URL="localhost:5432/mydb" bun run app.js
|
||||
```
|
||||
|
||||
### MySQL Environment Variables
|
||||
|
||||
MySQL connections can be configured via environment variables:
|
||||
|
||||
```bash
|
||||
# Primary connection URL (checked first)
|
||||
MYSQL_URL="mysql://user:pass@localhost:3306/mydb"
|
||||
|
||||
# Alternative: DATABASE_URL with MySQL protocol
|
||||
DATABASE_URL="mysql://user:pass@localhost:3306/mydb"
|
||||
DATABASE_URL="mysql2://user:pass@localhost:3306/mydb"
|
||||
```
|
||||
|
||||
If no connection URL is provided, MySQL checks these individual parameters:
|
||||
|
||||
| Environment Variable | Default Value | Description |
|
||||
| ------------------------ | ------------- | -------------------------------- |
|
||||
| `MYSQL_HOST` | `localhost` | Database host |
|
||||
| `MYSQL_PORT` | `3306` | Database port |
|
||||
| `MYSQL_USER` | `root` | Database user |
|
||||
| `MYSQL_PASSWORD` | (empty) | Database password |
|
||||
| `MYSQL_DATABASE` | `mysql` | Database name |
|
||||
| `MYSQL_URL` | (empty) | Primary connection URL for MySQL |
|
||||
| `TLS_MYSQL_DATABASE_URL` | (empty) | SSL/TLS-enabled connection URL |
|
||||
|
||||
### PostgreSQL Environment Variables
|
||||
|
||||
The following environment variables can be used to define the PostgreSQL connection:
|
||||
@@ -456,6 +578,53 @@ The `--sql-preconnect` flag will automatically establish a PostgreSQL connection
|
||||
|
||||
You can configure your database connection manually by passing options to the SQL constructor. Options vary depending on the database adapter:
|
||||
|
||||
### MySQL Options
|
||||
|
||||
```ts
|
||||
import { SQL } from "bun";
|
||||
|
||||
const db = new SQL({
|
||||
// Required for MySQL when using options object
|
||||
adapter: "mysql",
|
||||
|
||||
// Connection details
|
||||
hostname: "localhost",
|
||||
port: 3306,
|
||||
database: "myapp",
|
||||
username: "dbuser",
|
||||
password: "secretpass",
|
||||
|
||||
// Unix socket connection (alternative to hostname/port)
|
||||
// socket: "/var/run/mysqld/mysqld.sock",
|
||||
|
||||
// Connection pool settings
|
||||
max: 20, // Maximum connections in pool (default: 10)
|
||||
idleTimeout: 30, // Close idle connections after 30s
|
||||
maxLifetime: 0, // Connection lifetime in seconds (0 = forever)
|
||||
connectionTimeout: 30, // Timeout when establishing new connections
|
||||
|
||||
// SSL/TLS options
|
||||
tls: {
|
||||
rejectUnauthorized: true,
|
||||
ca: "path/to/ca.pem",
|
||||
key: "path/to/key.pem",
|
||||
cert: "path/to/cert.pem",
|
||||
},
|
||||
|
||||
// Callbacks
|
||||
onconnect: client => {
|
||||
console.log("Connected to MySQL");
|
||||
},
|
||||
onclose: (client, err) => {
|
||||
if (err) {
|
||||
console.error("MySQL connection error:", err);
|
||||
} else {
|
||||
console.log("MySQL connection closed");
|
||||
}
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### PostgreSQL Options
|
||||
|
||||
```ts
|
||||
@@ -979,11 +1148,106 @@ console.log(typeof x, x); // "bigint" 9223372036854777n
|
||||
There's still some things we haven't finished yet.
|
||||
|
||||
- Connection preloading via `--db-preconnect` Bun CLI flag
|
||||
- MySQL support: [we're working on it](https://github.com/oven-sh/bun/pull/15274)
|
||||
- Column name transforms (e.g. `snake_case` to `camelCase`). This is mostly blocked on a unicode-aware implementation of changing the case in C++ using WebKit's `WTF::String`.
|
||||
- Column type transforms
|
||||
|
||||
### Postgres-specific features
|
||||
## Database-Specific Features
|
||||
|
||||
#### Authentication Methods
|
||||
|
||||
MySQL supports multiple authentication plugins that are automatically negotiated:
|
||||
|
||||
- **`mysql_native_password`** - Traditional MySQL authentication, widely compatible
|
||||
- **`caching_sha2_password`** - Default in MySQL 8.0+, more secure with RSA key exchange
|
||||
- **`sha256_password`** - SHA-256 based authentication
|
||||
|
||||
The client automatically handles authentication plugin switching when requested by the server, including secure password exchange over non-SSL connections.
|
||||
|
||||
#### Prepared Statements & Performance
|
||||
|
||||
MySQL uses server-side prepared statements for all parameterized queries:
|
||||
|
||||
```ts
|
||||
// This automatically creates a prepared statement on the server
|
||||
const user = await mysql`SELECT * FROM users WHERE id = ${userId}`;
|
||||
|
||||
// Prepared statements are cached and reused for identical queries
|
||||
for (const id of userIds) {
|
||||
// Same prepared statement is reused
|
||||
await mysql`SELECT * FROM users WHERE id = ${id}`;
|
||||
}
|
||||
|
||||
// Query pipelining - multiple statements sent without waiting
|
||||
const [users, orders, products] = await Promise.all([
|
||||
mysql`SELECT * FROM users WHERE active = ${true}`,
|
||||
mysql`SELECT * FROM orders WHERE status = ${"pending"}`,
|
||||
mysql`SELECT * FROM products WHERE in_stock = ${true}`,
|
||||
]);
|
||||
```
|
||||
|
||||
#### Multiple Result Sets
|
||||
|
||||
MySQL can return multiple result sets from multi-statement queries:
|
||||
|
||||
```ts
|
||||
const mysql = new SQL("mysql://user:pass@localhost/mydb");
|
||||
|
||||
// Multi-statement queries with simple() method
|
||||
const multiResults = await mysql`
|
||||
SELECT * FROM users WHERE id = 1;
|
||||
SELECT * FROM orders WHERE user_id = 1;
|
||||
`.simple();
|
||||
```
|
||||
|
||||
#### Character Sets & Collations
|
||||
|
||||
Bun.SQL automatically uses `utf8mb4` character set for MySQL connections, ensuring full Unicode support including emojis. This is the recommended character set for modern MySQL applications.
|
||||
|
||||
#### Connection Attributes
|
||||
|
||||
Bun automatically sends client information to MySQL for better monitoring:
|
||||
|
||||
```ts
|
||||
// These attributes are sent automatically:
|
||||
// _client_name: "Bun"
|
||||
// _client_version: <bun version>
|
||||
// You can see these in MySQL's performance_schema.session_connect_attrs
|
||||
```
|
||||
|
||||
#### Type Handling
|
||||
|
||||
MySQL types are automatically converted to JavaScript types:
|
||||
|
||||
| MySQL Type | JavaScript Type | Notes |
|
||||
| --------------------------------------- | ------------------------ | ---------------------------------------------------------------------------------------------------- |
|
||||
| INT, TINYINT, MEDIUMINT | number | Within safe integer range |
|
||||
| BIGINT | string, number or BigInt | If the value fits in i32/u32 size will be number otherwise string or BigInt Based on `bigint` option |
|
||||
| DECIMAL, NUMERIC | string | To preserve precision |
|
||||
| FLOAT, DOUBLE | number | |
|
||||
| DATE | Date | JavaScript Date object |
|
||||
| DATETIME, TIMESTAMP | Date | With timezone handling |
|
||||
| TIME | number | Total of microseconds |
|
||||
| YEAR | number | |
|
||||
| CHAR, VARCHAR, VARSTRING, STRING | string | |
|
||||
| TINY TEXT, MEDIUM TEXT, TEXT, LONG TEXT | string | |
|
||||
| TINY BLOB, MEDIUM BLOB, BLOG, LONG BLOB | string | BLOB Types are alias for TEXT types |
|
||||
| JSON | object/array | Automatically parsed |
|
||||
| BIT(1) | boolean | BIT(1) in MySQL |
|
||||
| GEOMETRY | string | Geometry data |
|
||||
|
||||
#### Differences from PostgreSQL
|
||||
|
||||
While the API is unified, there are some behavioral differences:
|
||||
|
||||
1. **Parameter placeholders**: MySQL uses `?` internally but Bun converts `$1, $2` style automatically
|
||||
2. **RETURNING clause**: MySQL doesn't support RETURNING; use `result.lastInsertRowid` or a separate SELECT
|
||||
3. **Array types**: MySQL doesn't have native array types like PostgreSQL
|
||||
|
||||
### MySQL-Specific Features
|
||||
|
||||
We haven't implemented `LOAD DATA INFILE` support yet
|
||||
|
||||
### PostgreSQL-Specific Features
|
||||
|
||||
We haven't implemented these yet:
|
||||
|
||||
@@ -998,6 +1262,88 @@ We also haven't implemented some of the more uncommon features like:
|
||||
- Point & PostGIS types
|
||||
- All the multi-dimensional integer array types (only a couple of the types are supported)
|
||||
|
||||
## Common Patterns & Best Practices
|
||||
|
||||
### Working with MySQL Result Sets
|
||||
|
||||
```ts
|
||||
// Getting insert ID after INSERT
|
||||
const result = await mysql`INSERT INTO users (name) VALUES (${"Alice"})`;
|
||||
console.log(result.lastInsertRowid); // MySQL's LAST_INSERT_ID()
|
||||
|
||||
// Handling affected rows
|
||||
const updated =
|
||||
await mysql`UPDATE users SET active = ${false} WHERE age < ${18}`;
|
||||
console.log(updated.affectedRows); // Number of rows updated
|
||||
|
||||
// Using MySQL-specific functions
|
||||
const now = await mysql`SELECT NOW() as current_time`;
|
||||
const uuid = await mysql`SELECT UUID() as id`;
|
||||
```
|
||||
|
||||
### MySQL Error Handling
|
||||
|
||||
```ts
|
||||
try {
|
||||
await mysql`INSERT INTO users (email) VALUES (${"duplicate@email.com"})`;
|
||||
} catch (error) {
|
||||
if (error.code === "ER_DUP_ENTRY") {
|
||||
console.log("Duplicate entry detected");
|
||||
} else if (error.code === "ER_ACCESS_DENIED_ERROR") {
|
||||
console.log("Access denied");
|
||||
} else if (error.code === "ER_BAD_DB_ERROR") {
|
||||
console.log("Database does not exist");
|
||||
}
|
||||
// MySQL error codes are compatible with mysql/mysql2 packages
|
||||
}
|
||||
```
|
||||
|
||||
### Performance Tips for MySQL
|
||||
|
||||
1. **Use connection pooling**: Set appropriate `max` pool size based on your workload
|
||||
2. **Enable prepared statements**: They're enabled by default and improve performance
|
||||
3. **Use transactions for bulk operations**: Group related queries in transactions
|
||||
4. **Index properly**: MySQL relies heavily on indexes for query performance
|
||||
5. **Use `utf8mb4` charset**: It's set by default and handles all Unicode characters
|
||||
|
||||
## Frequently Asked Questions
|
||||
|
||||
> Why is this `Bun.sql` and not `Bun.postgres`?
|
||||
|
||||
The plan was to add more database drivers in the future. Now with MySQL support added, this unified API supports PostgreSQL, MySQL, and SQLite.
|
||||
|
||||
> How do I know which database adapter is being used?
|
||||
|
||||
The adapter is automatically detected from the connection string:
|
||||
|
||||
- URLs starting with `mysql://` or `mysql2://` use MySQL
|
||||
- URLs matching SQLite patterns (`:memory:`, `sqlite://`, `file://`) use SQLite
|
||||
- Everything else defaults to PostgreSQL
|
||||
|
||||
> Are MySQL stored procedures supported?
|
||||
|
||||
Yes, stored procedures are fully supported including OUT parameters and multiple result sets:
|
||||
|
||||
```ts
|
||||
// Call stored procedure
|
||||
const results = await mysql`CALL GetUserStats(${userId}, @total_orders)`;
|
||||
|
||||
// Get OUT parameter
|
||||
const outParam = await mysql`SELECT @total_orders as total`;
|
||||
```
|
||||
|
||||
> Can I use MySQL-specific SQL syntax?
|
||||
|
||||
Yes, you can use any MySQL-specific syntax:
|
||||
|
||||
```ts
|
||||
// MySQL-specific syntax works fine
|
||||
await mysql`SET @user_id = ${userId}`;
|
||||
await mysql`SHOW TABLES`;
|
||||
await mysql`DESCRIBE users`;
|
||||
await mysql`EXPLAIN SELECT * FROM users WHERE id = ${id}`;
|
||||
```
|
||||
|
||||
## Why not just use an existing library?
|
||||
|
||||
npm packages like postgres.js, pg, and node-postgres can be used in Bun too. They're great options.
|
||||
|
||||
@@ -122,6 +122,59 @@ Messages are automatically enqueued until the worker is ready, so there is no ne
|
||||
|
||||
To send messages, use [`worker.postMessage`](https://developer.mozilla.org/en-US/docs/Web/API/Worker/postMessage) and [`self.postMessage`](https://developer.mozilla.org/en-US/docs/Web/API/Window/postMessage). This leverages the [HTML Structured Clone Algorithm](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm).
|
||||
|
||||
### Performance optimizations
|
||||
|
||||
Bun includes optimized fast paths for `postMessage` to dramatically improve performance for common data types:
|
||||
|
||||
**String fast path** - When posting pure string values, Bun bypasses the structured clone algorithm entirely, achieving significant performance gains with no serialization overhead.
|
||||
|
||||
**Simple object fast path** - For plain objects containing only primitive values (strings, numbers, booleans, null, undefined), Bun uses an optimized serialization path that stores properties directly without full structured cloning.
|
||||
|
||||
The simple object fast path activates when the object:
|
||||
|
||||
- Is a plain object with no prototype chain modifications
|
||||
- Contains only enumerable, configurable data properties
|
||||
- Has no indexed properties or getter/setter methods
|
||||
- All property values are primitives or strings
|
||||
|
||||
With these fast paths, Bun's `postMessage` performs **2-241x faster** because the message length no longer has a meaningful impact on performance.
|
||||
|
||||
**Bun (with fast paths):**
|
||||
|
||||
```
|
||||
postMessage({ prop: 11 chars string, ...9 more props }) - 648ns
|
||||
postMessage({ prop: 14 KB string, ...9 more props }) - 719ns
|
||||
postMessage({ prop: 3 MB string, ...9 more props }) - 1.26µs
|
||||
```
|
||||
|
||||
**Node.js v24.6.0 (for comparison):**
|
||||
|
||||
```
|
||||
postMessage({ prop: 11 chars string, ...9 more props }) - 1.19µs
|
||||
postMessage({ prop: 14 KB string, ...9 more props }) - 2.69µs
|
||||
postMessage({ prop: 3 MB string, ...9 more props }) - 304µs
|
||||
```
|
||||
|
||||
```js
|
||||
// String fast path - optimized
|
||||
postMessage("Hello, worker!");
|
||||
|
||||
// Simple object fast path - optimized
|
||||
postMessage({
|
||||
message: "Hello",
|
||||
count: 42,
|
||||
enabled: true,
|
||||
data: null,
|
||||
});
|
||||
|
||||
// Complex objects still work but use standard structured clone
|
||||
postMessage({
|
||||
nested: { deep: { object: true } },
|
||||
date: new Date(),
|
||||
buffer: new ArrayBuffer(8),
|
||||
});
|
||||
```
|
||||
|
||||
```js
|
||||
// On the worker thread, `postMessage` is automatically "routed" to the parent thread.
|
||||
postMessage({ hello: "world" });
|
||||
|
||||
@@ -733,6 +733,10 @@ Whether to enable minification. Default `false`.
|
||||
When targeting `bun`, identifiers will be minified by default.
|
||||
{% /callout %}
|
||||
|
||||
{% callout %}
|
||||
When `minify.syntax` is enabled, unused function and class expression names are removed unless `minify.keepNames` is set to `true` or `--keep-names` flag is used.
|
||||
{% /callout %}
|
||||
|
||||
To enable all minification options:
|
||||
|
||||
{% codetabs group="a" %}
|
||||
@@ -763,12 +767,16 @@ await Bun.build({
|
||||
whitespace: true,
|
||||
identifiers: true,
|
||||
syntax: true,
|
||||
keepNames: false, // default
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
```bash#CLI
|
||||
$ bun build ./index.tsx --outdir ./out --minify-whitespace --minify-identifiers --minify-syntax
|
||||
|
||||
# To preserve function and class names during minification:
|
||||
$ bun build ./index.tsx --outdir ./out --minify --keep-names
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
@@ -1553,6 +1561,7 @@ interface BuildConfig {
|
||||
whitespace?: boolean;
|
||||
syntax?: boolean;
|
||||
identifiers?: boolean;
|
||||
keepNames?: boolean;
|
||||
};
|
||||
/**
|
||||
* Ignore dead code elimination/tree-shaking annotations such as @__PURE__ and package.json
|
||||
|
||||
@@ -245,8 +245,8 @@ In Bun's CLI, simple boolean flags like `--minify` do not accept an argument. Ot
|
||||
---
|
||||
|
||||
- `--jsx-side-effects`
|
||||
- n/a
|
||||
- JSX is always assumed to be side-effect-free
|
||||
- `--jsx-side-effects`
|
||||
- Controls whether JSX expressions are marked as `/* @__PURE__ */` for dead code elimination. Default is `false` (JSX marked as pure).
|
||||
|
||||
---
|
||||
|
||||
@@ -617,7 +617,7 @@ In Bun's CLI, simple boolean flags like `--minify` do not accept an argument. Ot
|
||||
|
||||
- `jsxSideEffects`
|
||||
- `jsxSideEffects`
|
||||
- Not supported in JS API, configure in `tsconfig.json`
|
||||
- Controls whether JSX expressions are marked as pure for dead code elimination
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -230,16 +230,15 @@ $ bun install --backend copyfile
|
||||
|
||||
**`symlink`** is typically only used for `file:` dependencies (and eventually `link:`) internally. To prevent infinite loops, it skips symlinking the `node_modules` folder.
|
||||
|
||||
If you install with `--backend=symlink`, Node.js won't resolve node_modules of dependencies unless each dependency has its own node_modules folder or you pass `--preserve-symlinks` to `node`. See [Node.js documentation on `--preserve-symlinks`](https://nodejs.org/api/cli.html#--preserve-symlinks).
|
||||
If you install with `--backend=symlink`, Node.js won't resolve node_modules of dependencies unless each dependency has its own node_modules folder or you pass `--preserve-symlinks` to `node` or `bun`. See [Node.js documentation on `--preserve-symlinks`](https://nodejs.org/api/cli.html#--preserve-symlinks).
|
||||
|
||||
```bash
|
||||
$ rm -rf node_modules
|
||||
$ bun install --backend symlink
|
||||
$ bun --preserve-symlinks ./my-file.js
|
||||
$ node --preserve-symlinks ./my-file.js # https://nodejs.org/api/cli.html#--preserve-symlinks
|
||||
```
|
||||
|
||||
Bun's runtime does not currently expose an equivalent of `--preserve-symlinks`, though the code for it does exist.
|
||||
|
||||
## npm registry metadata
|
||||
|
||||
bun uses a binary format for caching NPM registry responses. This loads much faster than JSON and tends to be smaller on disk.
|
||||
|
||||
@@ -8,6 +8,14 @@ The `bun` CLI contains a Node.js-compatible package manager designed to be a dra
|
||||
|
||||
{% /callout %}
|
||||
|
||||
{% callout %}
|
||||
|
||||
**💾 Disk efficient** — Bun install stores all packages in a global cache (`~/.bun/install/cache/`) and creates hardlinks (Linux) or copy-on-write clones (macOS) to `node_modules`. This means duplicate packages across projects point to the same underlying data, taking up virtually no extra disk space.
|
||||
|
||||
For more details, see [Package manager > Global cache](https://bun.com/docs/install/cache).
|
||||
|
||||
{% /callout %}
|
||||
|
||||
{% details summary="For Linux users" %}
|
||||
The recommended minimum Linux Kernel version is 5.6. If you're on Linux kernel 5.1 - 5.5, `bun install` will work, but HTTP requests will be slow due to a lack of support for io_uring's `connect()` operation.
|
||||
|
||||
@@ -207,6 +215,12 @@ Isolated installs create a central package store in `node_modules/.bun/` with sy
|
||||
|
||||
For complete documentation on isolated installs, refer to [Package manager > Isolated installs](https://bun.com/docs/install/isolated).
|
||||
|
||||
## Disk efficiency
|
||||
|
||||
Bun uses a global cache at `~/.bun/install/cache/` to minimize disk usage. Packages are stored once and linked to `node_modules` using hardlinks (Linux/Windows) or copy-on-write (macOS), so duplicate packages across projects don't consume additional disk space.
|
||||
|
||||
For complete documentation refer to [Package manager > Global cache](https://bun.com/docs/install/cache).
|
||||
|
||||
## Configuration
|
||||
|
||||
The default behavior of `bun install` can be configured in `bunfig.toml`. The default values are shown below.
|
||||
|
||||
@@ -9,8 +9,9 @@ $ bun create next-app
|
||||
✔ What is your project named? … my-app
|
||||
✔ Would you like to use TypeScript with this project? … No / Yes
|
||||
✔ Would you like to use ESLint with this project? … No / Yes
|
||||
✔ Would you like to use Tailwind CSS? ... No / Yes
|
||||
✔ Would you like to use `src/` directory with this project? … No / Yes
|
||||
✔ Would you like to use experimental `app/` directory with this project? … No / Yes
|
||||
✔ Would you like to use App Router? (recommended) ... No / Yes
|
||||
✔ What import alias would you like configured? … @/*
|
||||
Creating a new Next.js app in /path/to/my-app.
|
||||
```
|
||||
|
||||
@@ -48,12 +48,12 @@ This behavior is configurable with the `--backend` flag, which is respected by a
|
||||
- **`copyfile`**: The fallback used when any of the above fail. It is the slowest option. On macOS, it uses `fcopyfile()`; on Linux it uses `copy_file_range()`.
|
||||
- **`symlink`**: Currently used only `file:` (and eventually `link:`) dependencies. To prevent infinite loops, it skips symlinking the `node_modules` folder.
|
||||
|
||||
If you install with `--backend=symlink`, Node.js won't resolve node_modules of dependencies unless each dependency has its own `node_modules` folder or you pass `--preserve-symlinks` to `node`. See [Node.js documentation on `--preserve-symlinks`](https://nodejs.org/api/cli.html#--preserve-symlinks).
|
||||
If you install with `--backend=symlink`, Node.js won't resolve node_modules of dependencies unless each dependency has its own `node_modules` folder or you pass `--preserve-symlinks` to `node` or `bun`. See [Node.js documentation on `--preserve-symlinks`](https://nodejs.org/api/cli.html#--preserve-symlinks).
|
||||
|
||||
```bash
|
||||
$ bun install --backend symlink
|
||||
$ node --preserve-symlinks ./foo.js
|
||||
$ bun --preserve-symlinks ./foo.js
|
||||
```
|
||||
|
||||
Bun's runtime does not currently expose an equivalent of `--preserve-symlinks`.
|
||||
{% /details %}
|
||||
|
||||
@@ -407,6 +407,9 @@ export default {
|
||||
page("api/cc", "C Compiler", {
|
||||
description: `Build & run native C from JavaScript with Bun's native C compiler API`,
|
||||
}), // "`bun:ffi`"),
|
||||
page("api/secrets", "Secrets", {
|
||||
description: `Store and retrieve sensitive credentials securely using the operating system's native credential storage APIs.`,
|
||||
}), // "`Bun.secrets`"),
|
||||
page("cli/test", "Testing", {
|
||||
description: `Bun's built-in test runner is fast and uses Jest-compatible syntax.`,
|
||||
}), // "`bun:test`"),
|
||||
|
||||
@@ -246,6 +246,65 @@ The module from which the component factory function (`createElement`, `jsx`, `j
|
||||
|
||||
{% /table %}
|
||||
|
||||
### `jsxSideEffects`
|
||||
|
||||
By default, Bun marks JSX expressions as `/* @__PURE__ */` so they can be removed during bundling if they are unused (known as "dead code elimination" or "tree shaking"). Set `jsxSideEffects` to `true` to prevent this behavior.
|
||||
|
||||
{% table %}
|
||||
|
||||
- Compiler options
|
||||
- Transpiled output
|
||||
|
||||
---
|
||||
|
||||
- ```jsonc
|
||||
{
|
||||
"jsx": "react",
|
||||
// jsxSideEffects is false by default
|
||||
}
|
||||
```
|
||||
|
||||
- ```tsx
|
||||
// JSX expressions are marked as pure
|
||||
/* @__PURE__ */ React.createElement("div", null, "Hello");
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
- ```jsonc
|
||||
{
|
||||
"jsx": "react",
|
||||
"jsxSideEffects": true,
|
||||
}
|
||||
```
|
||||
|
||||
- ```tsx
|
||||
// JSX expressions are not marked as pure
|
||||
React.createElement("div", null, "Hello");
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
- ```jsonc
|
||||
{
|
||||
"jsx": "react-jsx",
|
||||
"jsxSideEffects": true,
|
||||
}
|
||||
```
|
||||
|
||||
- ```tsx
|
||||
// Automatic runtime also respects jsxSideEffects
|
||||
jsx("div", { children: "Hello" });
|
||||
```
|
||||
|
||||
{% /table %}
|
||||
|
||||
This option is also available as a CLI flag:
|
||||
|
||||
```bash
|
||||
$ bun build --jsx-side-effects
|
||||
```
|
||||
|
||||
### JSX pragma
|
||||
|
||||
All of these values can be set on a per-file basis using _pragmas_. A pragma is a special comment that sets a compiler option in a particular file.
|
||||
|
||||
@@ -756,3 +756,76 @@ Bun implements the following matchers. Full Jest compatibility is on the roadmap
|
||||
- [`.toThrowErrorMatchingInlineSnapshot()`](https://jestjs.io/docs/expect#tothrowerrormatchinginlinesnapshotinlinesnapshot)
|
||||
|
||||
{% /table %}
|
||||
|
||||
## TypeScript Type Safety
|
||||
|
||||
Bun's test runner provides enhanced TypeScript support with intelligent type checking for your test assertions. The type system helps catch potential bugs at compile time while still allowing flexibility when needed.
|
||||
|
||||
### Strict Type Checking by Default
|
||||
|
||||
By default, Bun's test matchers enforce strict type checking between the actual value and expected value:
|
||||
|
||||
```ts
|
||||
import { expect, test } from "bun:test";
|
||||
|
||||
test("strict typing", () => {
|
||||
const str = "hello";
|
||||
const num = 42;
|
||||
|
||||
expect(str).toBe("hello"); // ✅ OK: string to string
|
||||
expect(num).toBe(42); // ✅ OK: number to number
|
||||
expect(str).toBe(42); // ❌ TypeScript error: string vs number
|
||||
});
|
||||
```
|
||||
|
||||
This helps catch common mistakes where you might accidentally compare values of different types.
|
||||
|
||||
### Relaxed Type Checking with Type Parameters
|
||||
|
||||
Sometimes you need more flexibility in your tests, especially when working with:
|
||||
|
||||
- Dynamic data from APIs
|
||||
- Polymorphic functions that can return multiple types
|
||||
- Generic utility functions
|
||||
- Migration of existing test suites
|
||||
|
||||
For these cases, you can "opt out" of strict type checking by providing an explicit type parameter to matcher methods:
|
||||
|
||||
```ts
|
||||
import { expect, test } from "bun:test";
|
||||
|
||||
test("relaxed typing with type parameters", () => {
|
||||
const value: unknown = getSomeValue();
|
||||
|
||||
// These would normally cause TypeScript errors, but type parameters allow them:
|
||||
expect(value).toBe<number>(42); // No TS error, runtime check still works
|
||||
expect(value).toEqual<string>("hello"); // No TS error, runtime check still works
|
||||
expect(value).toStrictEqual<boolean>(true); // No TS error, runtime check still works
|
||||
});
|
||||
|
||||
test("useful for dynamic data", () => {
|
||||
const apiResponse: any = { status: "success" };
|
||||
|
||||
// Without type parameter: TypeScript error (any vs string)
|
||||
// expect(apiResponse.status).toBe("success");
|
||||
|
||||
// With type parameter: No TypeScript error, runtime assertion still enforced
|
||||
expect(apiResponse.status).toBe<string>("success"); // ✅ OK
|
||||
});
|
||||
```
|
||||
|
||||
### Migration from Looser Type Systems
|
||||
|
||||
If migrating from a test framework with looser TypeScript integration, you can use type parameters as a stepping stone:
|
||||
|
||||
```ts
|
||||
// Old Jest test that worked but wasn't type-safe
|
||||
expect(response.data).toBe(200); // No type error in some setups
|
||||
|
||||
// Bun equivalent with explicit typing during migration
|
||||
expect(response.data).toBe<number>(200); // Explicit about expected type
|
||||
|
||||
// Ideal Bun test after refactoring
|
||||
const statusCode: number = response.data;
|
||||
expect(statusCode).toBe(200); // Type-safe without explicit parameter
|
||||
```
|
||||
|
||||
@@ -32,7 +32,7 @@
|
||||
"watch-windows": "bun run zig build check-windows --watch -fincremental --prominent-compile-errors --global-cache-dir build/debug/zig-check-cache --zig-lib-dir vendor/zig/lib",
|
||||
"bd:v": "(bun run --silent build:debug &> /tmp/bun.debug.build.log || (cat /tmp/bun.debug.build.log && rm -rf /tmp/bun.debug.build.log && exit 1)) && rm -f /tmp/bun.debug.build.log && ./build/debug/bun-debug",
|
||||
"bd": "BUN_DEBUG_QUIET_LOGS=1 bun --silent bd:v",
|
||||
"build:debug": "export COMSPEC=\"C:\\Windows\\System32\\cmd.exe\" && bun scripts/glob-sources.mjs > /dev/null && bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -B build/debug --log-level=NOTICE",
|
||||
"build:debug": "export COMSPEC=\"C:\\Windows\\System32\\cmd.exe\" && bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -B build/debug --log-level=NOTICE",
|
||||
"build:debug:asan": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DENABLE_ASAN=ON -B build/debug-asan --log-level=NOTICE",
|
||||
"build:release": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -B build/release",
|
||||
"build:ci": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DCMAKE_VERBOSE_MAKEFILE=ON -DCI=true -B build/release-ci --verbose --fresh",
|
||||
|
||||
77
packages/bun-types/bun.d.ts
vendored
77
packages/bun-types/bun.d.ts
vendored
@@ -644,6 +644,38 @@ declare module "bun" {
|
||||
* ```
|
||||
*/
|
||||
export function parse(input: string): unknown;
|
||||
|
||||
/**
|
||||
* Convert a JavaScript value into a YAML string. Strings are double quoted if they contain keywords, non-printable or
|
||||
* escaped characters, or if a YAML parser would parse them as numbers. Anchors and aliases are inferred from objects, allowing cycles.
|
||||
*
|
||||
* @category Utilities
|
||||
*
|
||||
* @param input The JavaScript value to stringify.
|
||||
* @param replacer Currently not supported.
|
||||
* @param space A number for how many spaces each level of indentation gets, or a string used as indentation. The number is clamped between 0 and 10, and the first 10 characters of the string are used.
|
||||
* @returns A string containing the YAML document.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* import { YAML } from "bun";
|
||||
*
|
||||
* const input = {
|
||||
* abc: "def"
|
||||
* };
|
||||
* console.log(YAML.stringify(input));
|
||||
* // # output
|
||||
* // abc: def
|
||||
*
|
||||
* const cycle = {};
|
||||
* cycle.obj = cycle;
|
||||
* console.log(YAML.stringify(cycle));
|
||||
* // # output
|
||||
* // &root
|
||||
* // obj:
|
||||
* // *root
|
||||
*/
|
||||
export function stringify(input: unknown, replacer?: undefined | null, space?: string | number): string;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1673,11 +1705,16 @@ declare module "bun" {
|
||||
* @see [Bun.build API docs](https://bun.com/docs/bundler#api)
|
||||
*/
|
||||
interface BuildConfigBase {
|
||||
entrypoints: string[]; // list of file path
|
||||
/**
|
||||
* List of entrypoints, usually file paths
|
||||
*/
|
||||
entrypoints: string[];
|
||||
|
||||
/**
|
||||
* @default "browser"
|
||||
*/
|
||||
target?: Target; // default: "browser"
|
||||
|
||||
/**
|
||||
* Output module format. Top-level await is only supported for `"esm"`.
|
||||
*
|
||||
@@ -1782,6 +1819,7 @@ declare module "bun" {
|
||||
whitespace?: boolean;
|
||||
syntax?: boolean;
|
||||
identifiers?: boolean;
|
||||
keepNames?: boolean;
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -1908,12 +1946,28 @@ declare module "bun" {
|
||||
* ```
|
||||
*/
|
||||
compile: boolean | Bun.Build.Target | CompileBuildOptions;
|
||||
|
||||
/**
|
||||
* Splitting is not currently supported with `.compile`
|
||||
*/
|
||||
splitting?: never;
|
||||
}
|
||||
|
||||
interface NormalBuildConfig extends BuildConfigBase {
|
||||
/**
|
||||
* Enable code splitting
|
||||
*
|
||||
* This does not currently work with {@link CompileBuildConfig.compile `compile`}
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
splitting?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* @see [Bun.build API docs](https://bun.com/docs/bundler#api)
|
||||
*/
|
||||
type BuildConfig = BuildConfigBase | CompileBuildConfig;
|
||||
type BuildConfig = CompileBuildConfig | NormalBuildConfig;
|
||||
|
||||
/**
|
||||
* Hash and verify passwords using argon2 or bcrypt
|
||||
@@ -3793,6 +3847,11 @@ declare module "bun" {
|
||||
* @category HTTP & Networking
|
||||
*/
|
||||
interface Server extends Disposable {
|
||||
/*
|
||||
* Closes all connections connected to this server which are not sending a request or waiting for a response. Does not close the listen socket.
|
||||
*/
|
||||
closeIdleConnections(): void;
|
||||
|
||||
/**
|
||||
* Stop listening to prevent new connections from being accepted.
|
||||
*
|
||||
@@ -5514,6 +5573,11 @@ declare module "bun" {
|
||||
type OnLoadCallback = (args: OnLoadArgs) => OnLoadResult | Promise<OnLoadResult>;
|
||||
type OnStartCallback = () => void | Promise<void>;
|
||||
type OnEndCallback = (result: BuildOutput) => void | Promise<void>;
|
||||
type OnBeforeParseCallback = {
|
||||
napiModule: unknown;
|
||||
symbol: string;
|
||||
external?: unknown | undefined;
|
||||
};
|
||||
|
||||
interface OnResolveArgs {
|
||||
/**
|
||||
@@ -5610,14 +5674,7 @@ declare module "bun" {
|
||||
* @returns `this` for method chaining
|
||||
*/
|
||||
onEnd(callback: OnEndCallback): this;
|
||||
onBeforeParse(
|
||||
constraints: PluginConstraints,
|
||||
callback: {
|
||||
napiModule: unknown;
|
||||
symbol: string;
|
||||
external?: unknown | undefined;
|
||||
},
|
||||
): this;
|
||||
onBeforeParse(constraints: PluginConstraints, callback: OnBeforeParseCallback): this;
|
||||
/**
|
||||
* Register a callback to load imports with a specific import specifier
|
||||
* @param constraints The constraints to apply the plugin to
|
||||
|
||||
19
packages/bun-types/ffi.d.ts
vendored
19
packages/bun-types/ffi.d.ts
vendored
@@ -219,44 +219,39 @@ declare module "bun:ffi" {
|
||||
|
||||
/**
|
||||
* int64 is a 64-bit signed integer
|
||||
*
|
||||
* This is not implemented yet!
|
||||
*/
|
||||
int64_t = 7,
|
||||
/**
|
||||
* i64 is a 64-bit signed integer
|
||||
*
|
||||
* This is not implemented yet!
|
||||
*/
|
||||
i64 = 7,
|
||||
|
||||
/**
|
||||
* 64-bit unsigned integer
|
||||
*
|
||||
* This is not implemented yet!
|
||||
*/
|
||||
uint64_t = 8,
|
||||
/**
|
||||
* 64-bit unsigned integer
|
||||
*
|
||||
* This is not implemented yet!
|
||||
*/
|
||||
u64 = 8,
|
||||
|
||||
/**
|
||||
* Doubles are not supported yet!
|
||||
* IEEE-754 double precision float
|
||||
*/
|
||||
double = 9,
|
||||
|
||||
/**
|
||||
* Doubles are not supported yet!
|
||||
* Alias of {@link FFIType.double}
|
||||
*/
|
||||
f64 = 9,
|
||||
|
||||
/**
|
||||
* Floats are not supported yet!
|
||||
* IEEE-754 single precision float
|
||||
*/
|
||||
float = 10,
|
||||
|
||||
/**
|
||||
* Floats are not supported yet!
|
||||
* Alias of {@link FFIType.float}
|
||||
*/
|
||||
f32 = 10,
|
||||
|
||||
|
||||
33
packages/bun-types/globals.d.ts
vendored
33
packages/bun-types/globals.d.ts
vendored
@@ -1556,6 +1556,15 @@ declare var URL: Bun.__internal.UseLibDomIfAvailable<
|
||||
}
|
||||
>;
|
||||
|
||||
/**
|
||||
* The **`AbortController`** interface represents a controller object that allows you to abort one or more Web requests as and when desired.
|
||||
*
|
||||
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortController)
|
||||
*/
|
||||
interface AbortController {
|
||||
readonly signal: AbortSignal;
|
||||
abort(reason?: any): void;
|
||||
}
|
||||
declare var AbortController: Bun.__internal.UseLibDomIfAvailable<
|
||||
"AbortController",
|
||||
{
|
||||
@@ -1564,6 +1573,12 @@ declare var AbortController: Bun.__internal.UseLibDomIfAvailable<
|
||||
}
|
||||
>;
|
||||
|
||||
interface AbortSignal extends EventTarget {
|
||||
readonly aborted: boolean;
|
||||
onabort: ((this: AbortSignal, ev: Event) => any) | null;
|
||||
readonly reason: any;
|
||||
throwIfAborted(): void;
|
||||
}
|
||||
declare var AbortSignal: Bun.__internal.UseLibDomIfAvailable<
|
||||
"AbortSignal",
|
||||
{
|
||||
@@ -1948,3 +1963,21 @@ declare namespace fetch {
|
||||
): void;
|
||||
}
|
||||
//#endregion
|
||||
|
||||
interface RegExpConstructor {
|
||||
/**
|
||||
* Escapes any potential regex syntax characters in a string, and returns a
|
||||
* new string that can be safely used as a literal pattern for the RegExp()
|
||||
* constructor.
|
||||
*
|
||||
* [MDN Reference](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp/escape)
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const re = new RegExp(RegExp.escape("foo.bar"));
|
||||
* re.test("foo.bar"); // true
|
||||
* re.test("foo!bar"); // false
|
||||
* ```
|
||||
*/
|
||||
escape(string: string): string;
|
||||
}
|
||||
|
||||
4
packages/bun-types/index.d.ts
vendored
4
packages/bun-types/index.d.ts
vendored
@@ -26,6 +26,6 @@
|
||||
|
||||
/// <reference path="./bun.ns.d.ts" />
|
||||
|
||||
// @ts-ignore Must disable this so it doesn't conflict with the DOM onmessage type, but still
|
||||
// Must disable this so it doesn't conflict with the DOM onmessage type, but still
|
||||
// allows us to declare our own globals that Node's types can "see" and not conflict with
|
||||
declare var onmessage: never;
|
||||
declare var onmessage: Bun.__internal.UseLibDomIfAvailable<"onmessage", never>;
|
||||
|
||||
90
packages/bun-types/overrides.d.ts
vendored
90
packages/bun-types/overrides.d.ts
vendored
@@ -174,6 +174,96 @@ declare global {
|
||||
UV_ENODATA: number;
|
||||
UV_EUNATCH: number;
|
||||
};
|
||||
binding(m: "http_parser"): {
|
||||
methods: [
|
||||
"DELETE",
|
||||
"GET",
|
||||
"HEAD",
|
||||
"POST",
|
||||
"PUT",
|
||||
"CONNECT",
|
||||
"OPTIONS",
|
||||
"TRACE",
|
||||
"COPY",
|
||||
"LOCK",
|
||||
"MKCOL",
|
||||
"MOVE",
|
||||
"PROPFIND",
|
||||
"PROPPATCH",
|
||||
"SEARCH",
|
||||
"UNLOCK",
|
||||
"BIND",
|
||||
"REBIND",
|
||||
"UNBIND",
|
||||
"ACL",
|
||||
"REPORT",
|
||||
"MKACTIVITY",
|
||||
"CHECKOUT",
|
||||
"MERGE",
|
||||
"M - SEARCH",
|
||||
"NOTIFY",
|
||||
"SUBSCRIBE",
|
||||
"UNSUBSCRIBE",
|
||||
"PATCH",
|
||||
"PURGE",
|
||||
"MKCALENDAR",
|
||||
"LINK",
|
||||
"UNLINK",
|
||||
"SOURCE",
|
||||
"QUERY",
|
||||
];
|
||||
allMethods: [
|
||||
"DELETE",
|
||||
"GET",
|
||||
"HEAD",
|
||||
"POST",
|
||||
"PUT",
|
||||
"CONNECT",
|
||||
"OPTIONS",
|
||||
"TRACE",
|
||||
"COPY",
|
||||
"LOCK",
|
||||
"MKCOL",
|
||||
"MOVE",
|
||||
"PROPFIND",
|
||||
"PROPPATCH",
|
||||
"SEARCH",
|
||||
"UNLOCK",
|
||||
"BIND",
|
||||
"REBIND",
|
||||
"UNBIND",
|
||||
"ACL",
|
||||
"REPORT",
|
||||
"MKACTIVITY",
|
||||
"CHECKOUT",
|
||||
"MERGE",
|
||||
"M - SEARCH",
|
||||
"NOTIFY",
|
||||
"SUBSCRIBE",
|
||||
"UNSUBSCRIBE",
|
||||
"PATCH",
|
||||
"PURGE",
|
||||
"MKCALENDAR",
|
||||
"LINK",
|
||||
"UNLINK",
|
||||
"SOURCE",
|
||||
"PRI",
|
||||
"DESCRIBE",
|
||||
"ANNOUNCE",
|
||||
"SETUP",
|
||||
"PLAY",
|
||||
"PAUSE",
|
||||
"TEARDOWN",
|
||||
"GET_PARAMETER",
|
||||
"SET_PARAMETER",
|
||||
"REDIRECT",
|
||||
"RECORD",
|
||||
"FLUSH",
|
||||
"QUERY",
|
||||
];
|
||||
HTTPParser: unknown;
|
||||
ConnectionsList: unknown;
|
||||
};
|
||||
binding(m: string): object;
|
||||
}
|
||||
|
||||
|
||||
8
packages/bun-types/redis.d.ts
vendored
8
packages/bun-types/redis.d.ts
vendored
@@ -270,6 +270,14 @@ declare module "bun" {
|
||||
*/
|
||||
hmset(key: RedisClient.KeyLike, fieldValues: string[]): Promise<string>;
|
||||
|
||||
/**
|
||||
* Get the value of a hash field
|
||||
* @param key The hash key
|
||||
* @param field The field to get
|
||||
* @returns Promise that resolves with the field value or null if the field doesn't exist
|
||||
*/
|
||||
hget(key: RedisClient.KeyLike, field: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Get the values of all the given hash fields
|
||||
* @param key The hash key
|
||||
|
||||
4
packages/bun-types/shell.d.ts
vendored
4
packages/bun-types/shell.d.ts
vendored
@@ -58,7 +58,7 @@ declare module "bun" {
|
||||
* // "bun"
|
||||
* ```
|
||||
*/
|
||||
function env(newEnv?: Record<string, string | undefined>): $;
|
||||
function env(newEnv?: Record<string, string | undefined> | NodeJS.Dict<string> | undefined): $;
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -106,7 +106,7 @@ declare module "bun" {
|
||||
* expect(stdout.toString()).toBe("LOL!");
|
||||
* ```
|
||||
*/
|
||||
env(newEnv: Record<string, string> | undefined): this;
|
||||
env(newEnv: Record<string, string | undefined> | NodeJS.Dict<string> | undefined): this;
|
||||
|
||||
/**
|
||||
* By default, the shell will write to the current process's stdout and stderr, as well as buffering that output.
|
||||
|
||||
45
packages/bun-types/sql.d.ts
vendored
45
packages/bun-types/sql.d.ts
vendored
@@ -41,22 +41,22 @@ declare module "bun" {
|
||||
|
||||
class PostgresError extends SQLError {
|
||||
public readonly code: string;
|
||||
public readonly errno: string | undefined;
|
||||
public readonly detail: string | undefined;
|
||||
public readonly hint: string | undefined;
|
||||
public readonly severity: string | undefined;
|
||||
public readonly position: string | undefined;
|
||||
public readonly internalPosition: string | undefined;
|
||||
public readonly internalQuery: string | undefined;
|
||||
public readonly where: string | undefined;
|
||||
public readonly schema: string | undefined;
|
||||
public readonly table: string | undefined;
|
||||
public readonly column: string | undefined;
|
||||
public readonly dataType: string | undefined;
|
||||
public readonly constraint: string | undefined;
|
||||
public readonly file: string | undefined;
|
||||
public readonly line: string | undefined;
|
||||
public readonly routine: string | undefined;
|
||||
public readonly errno?: string | undefined;
|
||||
public readonly detail?: string | undefined;
|
||||
public readonly hint?: string | undefined;
|
||||
public readonly severity?: string | undefined;
|
||||
public readonly position?: string | undefined;
|
||||
public readonly internalPosition?: string | undefined;
|
||||
public readonly internalQuery?: string | undefined;
|
||||
public readonly where?: string | undefined;
|
||||
public readonly schema?: string | undefined;
|
||||
public readonly table?: string | undefined;
|
||||
public readonly column?: string | undefined;
|
||||
public readonly dataType?: string | undefined;
|
||||
public readonly constraint?: string | undefined;
|
||||
public readonly file?: string | undefined;
|
||||
public readonly line?: string | undefined;
|
||||
public readonly routine?: string | undefined;
|
||||
|
||||
constructor(
|
||||
message: string,
|
||||
@@ -84,8 +84,8 @@ declare module "bun" {
|
||||
|
||||
class MySQLError extends SQLError {
|
||||
public readonly code: string;
|
||||
public readonly errno: number | undefined;
|
||||
public readonly sqlState: string | undefined;
|
||||
public readonly errno?: number | undefined;
|
||||
public readonly sqlState?: string | undefined;
|
||||
constructor(message: string, options: { code: string; errno: number | undefined; sqlState: string | undefined });
|
||||
}
|
||||
|
||||
@@ -143,13 +143,13 @@ declare module "bun" {
|
||||
|
||||
/**
|
||||
* Database server hostname
|
||||
* @deprecated Prefer {@link hostname}
|
||||
* @default "localhost"
|
||||
*/
|
||||
host?: string | undefined;
|
||||
|
||||
/**
|
||||
* Database server hostname (alias for host)
|
||||
* @deprecated Prefer {@link host}
|
||||
* Database server hostname
|
||||
* @default "localhost"
|
||||
*/
|
||||
hostname?: string | undefined;
|
||||
@@ -264,13 +264,14 @@ declare module "bun" {
|
||||
* Whether to use TLS/SSL for the connection
|
||||
* @default false
|
||||
*/
|
||||
tls?: TLSOptions | boolean | undefined;
|
||||
tls?: Bun.BunFile | TLSOptions | boolean | undefined;
|
||||
|
||||
/**
|
||||
* Whether to use TLS/SSL for the connection (alias for tls)
|
||||
* @deprecated Prefer {@link tls}
|
||||
* @default false
|
||||
*/
|
||||
ssl?: TLSOptions | boolean | undefined;
|
||||
ssl?: Bun.BunFile | TLSOptions | boolean | undefined;
|
||||
|
||||
/**
|
||||
* Unix domain socket path for connection
|
||||
|
||||
156
packages/bun-types/test.d.ts
vendored
156
packages/bun-types/test.d.ts
vendored
@@ -14,11 +14,6 @@
|
||||
* ```
|
||||
*/
|
||||
declare module "bun:test" {
|
||||
/**
|
||||
* -- Mocks --
|
||||
*
|
||||
* @category Testing
|
||||
*/
|
||||
export type Mock<T extends (...args: any[]) => any> = JestMock.Mock<T>;
|
||||
|
||||
export const mock: {
|
||||
@@ -152,11 +147,41 @@ declare module "bun:test" {
|
||||
type SpiedSetter<T> = JestMock.SpiedSetter<T>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a spy on an object property or method
|
||||
*/
|
||||
export function spyOn<T extends object, K extends keyof T>(
|
||||
obj: T,
|
||||
methodOrPropertyValue: K,
|
||||
): Mock<Extract<T[K], (...args: any[]) => any>>;
|
||||
|
||||
/**
|
||||
* Vitest-compatible mocking utilities
|
||||
* Provides Vitest-style mocking API for easier migration from Vitest to Bun
|
||||
*/
|
||||
export const vi: {
|
||||
/**
|
||||
* Create a mock function
|
||||
*/
|
||||
fn: typeof jest.fn;
|
||||
/**
|
||||
* Create a spy on an object property or method
|
||||
*/
|
||||
spyOn: typeof spyOn;
|
||||
/**
|
||||
* Mock a module
|
||||
*/
|
||||
module: typeof mock.module;
|
||||
/**
|
||||
* Restore all mocks to their original implementation
|
||||
*/
|
||||
restoreAllMocks: typeof jest.restoreAllMocks;
|
||||
/**
|
||||
* Clear all mock state (calls, results, etc.) without restoring original implementation
|
||||
*/
|
||||
clearAllMocks: typeof jest.clearAllMocks;
|
||||
};
|
||||
|
||||
interface FunctionLike {
|
||||
readonly name: string;
|
||||
}
|
||||
@@ -558,7 +583,9 @@ declare module "bun:test" {
|
||||
* @param customFailMessage an optional custom message to display if the test fails.
|
||||
* */
|
||||
|
||||
<T = unknown>(actual?: T, customFailMessage?: string): Matchers<T>;
|
||||
(actual?: never, customFailMessage?: string): Matchers<undefined>;
|
||||
<T = unknown>(actual: T, customFailMessage?: string): Matchers<T>;
|
||||
<T = unknown>(actual?: T, customFailMessage?: string): Matchers<T | undefined>;
|
||||
|
||||
/**
|
||||
* Access to negated asymmetric matchers.
|
||||
@@ -876,6 +903,7 @@ declare module "bun:test" {
|
||||
* @param message the message to display if the test fails (optional)
|
||||
*/
|
||||
pass: (message?: string) => void;
|
||||
|
||||
/**
|
||||
* Assertion which fails.
|
||||
*
|
||||
@@ -887,6 +915,7 @@ declare module "bun:test" {
|
||||
* expect().not.fail("hi");
|
||||
*/
|
||||
fail: (message?: string) => void;
|
||||
|
||||
/**
|
||||
* Asserts that a value equals what is expected.
|
||||
*
|
||||
@@ -900,9 +929,15 @@ declare module "bun:test" {
|
||||
* expect([123]).toBe([123]); // fail, use toEqual()
|
||||
* expect(3 + 0.14).toBe(3.14); // fail, use toBeCloseTo()
|
||||
*
|
||||
* // TypeScript errors:
|
||||
* expect("hello").toBe(3.14); // typescript error + fail
|
||||
* expect("hello").toBe<number>(3.14); // no typescript error, but still fails
|
||||
*
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toBe(expected: T): void;
|
||||
toBe<X = T>(expected: NoInfer<X>): void;
|
||||
|
||||
/**
|
||||
* Asserts that a number is odd.
|
||||
*
|
||||
@@ -912,6 +947,7 @@ declare module "bun:test" {
|
||||
* expect(2).not.toBeOdd();
|
||||
*/
|
||||
toBeOdd(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a number is even.
|
||||
*
|
||||
@@ -921,6 +957,7 @@ declare module "bun:test" {
|
||||
* expect(1).not.toBeEven();
|
||||
*/
|
||||
toBeEven(): void;
|
||||
|
||||
/**
|
||||
* Asserts that value is close to the expected by floating point precision.
|
||||
*
|
||||
@@ -939,6 +976,7 @@ declare module "bun:test" {
|
||||
* @param numDigits the number of digits to check after the decimal point. Default is `2`
|
||||
*/
|
||||
toBeCloseTo(expected: number, numDigits?: number): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is deeply equal to what is expected.
|
||||
*
|
||||
@@ -951,6 +989,8 @@ declare module "bun:test" {
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toEqual(expected: T): void;
|
||||
toEqual<X = T>(expected: NoInfer<X>): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is deeply and strictly equal to
|
||||
* what is expected.
|
||||
@@ -975,6 +1015,8 @@ declare module "bun:test" {
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toStrictEqual(expected: T): void;
|
||||
toStrictEqual<X = T>(expected: NoInfer<X>): void;
|
||||
|
||||
/**
|
||||
* Asserts that the value is deep equal to an element in the expected array.
|
||||
*
|
||||
@@ -987,7 +1029,9 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toBeOneOf(expected: Array<unknown> | Iterable<unknown>): void;
|
||||
toBeOneOf(expected: Iterable<T>): void;
|
||||
toBeOneOf<X = T>(expected: NoInfer<Iterable<X>>): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value contains what is expected.
|
||||
*
|
||||
@@ -1001,7 +1045,9 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toContain(expected: unknown): void;
|
||||
toContain(expected: T extends Iterable<infer U> ? U : T): void;
|
||||
toContain<X = T>(expected: NoInfer<X extends Iterable<infer U> ? U : X>): void;
|
||||
|
||||
/**
|
||||
* Asserts that an `object` contains a key.
|
||||
*
|
||||
@@ -1015,7 +1061,9 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toContainKey(expected: unknown): void;
|
||||
toContainKey(expected: keyof T): void;
|
||||
toContainKey<X = T>(expected: NoInfer<keyof X>): void;
|
||||
|
||||
/**
|
||||
* Asserts that an `object` contains all the provided keys.
|
||||
*
|
||||
@@ -1030,7 +1078,9 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toContainAllKeys(expected: unknown): void;
|
||||
toContainAllKeys(expected: Array<keyof T>): void;
|
||||
toContainAllKeys<X = T>(expected: NoInfer<Array<keyof X>>): void;
|
||||
|
||||
/**
|
||||
* Asserts that an `object` contains at least one of the provided keys.
|
||||
* Asserts that an `object` contains all the provided keys.
|
||||
@@ -1045,12 +1095,16 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toContainAnyKeys(expected: unknown): void;
|
||||
toContainAnyKeys(expected: Array<keyof T>): void;
|
||||
toContainAnyKeys<X = T>(expected: NoInfer<Array<keyof X>>): void;
|
||||
|
||||
/**
|
||||
* Asserts that an `object` contain the provided value.
|
||||
*
|
||||
* The value must be an object
|
||||
* This method is deep and will look through child properties to find the
|
||||
* expected value.
|
||||
*
|
||||
* The input value must be an object.
|
||||
*
|
||||
* @example
|
||||
* const shallow = { hello: "world" };
|
||||
@@ -1074,11 +1128,16 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param expected the expected value
|
||||
*/
|
||||
// Contributor note: In theory we could type this better but it would be a
|
||||
// slow union to compute...
|
||||
toContainValue(expected: unknown): void;
|
||||
|
||||
/**
|
||||
* Asserts that an `object` contain the provided value.
|
||||
*
|
||||
* This is the same as {@link toContainValue}, but accepts an array of
|
||||
* values instead.
|
||||
*
|
||||
* The value must be an object
|
||||
*
|
||||
* @example
|
||||
@@ -1088,7 +1147,7 @@ declare module "bun:test" {
|
||||
* expect(o).not.toContainValues(['qux', 'foo']);
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toContainValues(expected: unknown): void;
|
||||
toContainValues(expected: Array<unknown>): void;
|
||||
|
||||
/**
|
||||
* Asserts that an `object` contain all the provided values.
|
||||
@@ -1102,7 +1161,7 @@ declare module "bun:test" {
|
||||
* expect(o).not.toContainAllValues(['bar', 'foo']);
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toContainAllValues(expected: unknown): void;
|
||||
toContainAllValues(expected: Array<unknown>): void;
|
||||
|
||||
/**
|
||||
* Asserts that an `object` contain any provided value.
|
||||
@@ -1117,7 +1176,7 @@ declare module "bun:test" {
|
||||
* expect(o).not.toContainAnyValues(['qux']);
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toContainAnyValues(expected: unknown): void;
|
||||
toContainAnyValues(expected: Array<unknown>): void;
|
||||
|
||||
/**
|
||||
* Asserts that an `object` contains all the provided keys.
|
||||
@@ -1129,7 +1188,9 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toContainKeys(expected: unknown): void;
|
||||
toContainKeys(expected: Array<keyof T>): void;
|
||||
toContainKeys<X = T>(expected: NoInfer<Array<keyof X>>): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value contains and equals what is expected.
|
||||
*
|
||||
@@ -1142,7 +1203,9 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toContainEqual(expected: unknown): void;
|
||||
toContainEqual(expected: T extends Iterable<infer U> ? U : T): void;
|
||||
toContainEqual<X = T>(expected: NoInfer<X extends Iterable<infer U> ? U : X>): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value has a `.length` property
|
||||
* that is equal to the expected length.
|
||||
@@ -1154,6 +1217,7 @@ declare module "bun:test" {
|
||||
* @param length the expected length
|
||||
*/
|
||||
toHaveLength(length: number): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value has a property with the
|
||||
* expected name, and value if provided.
|
||||
@@ -1168,6 +1232,7 @@ declare module "bun:test" {
|
||||
* @param value the expected property value, if provided
|
||||
*/
|
||||
toHaveProperty(keyPath: string | number | Array<string | number>, value?: unknown): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is "truthy".
|
||||
*
|
||||
@@ -1180,6 +1245,7 @@ declare module "bun:test" {
|
||||
* expect({}).toBeTruthy();
|
||||
*/
|
||||
toBeTruthy(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is "falsy".
|
||||
*
|
||||
@@ -1192,6 +1258,7 @@ declare module "bun:test" {
|
||||
* expect({}).toBeTruthy();
|
||||
*/
|
||||
toBeFalsy(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is defined. (e.g. is not `undefined`)
|
||||
*
|
||||
@@ -1200,6 +1267,7 @@ declare module "bun:test" {
|
||||
* expect(undefined).toBeDefined(); // fail
|
||||
*/
|
||||
toBeDefined(): void;
|
||||
|
||||
/**
|
||||
* Asserts that the expected value is an instance of value
|
||||
*
|
||||
@@ -1208,6 +1276,7 @@ declare module "bun:test" {
|
||||
* expect(null).toBeInstanceOf(Array); // fail
|
||||
*/
|
||||
toBeInstanceOf(value: unknown): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is `undefined`.
|
||||
*
|
||||
@@ -1216,6 +1285,7 @@ declare module "bun:test" {
|
||||
* expect(null).toBeUndefined(); // fail
|
||||
*/
|
||||
toBeUndefined(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is `null`.
|
||||
*
|
||||
@@ -1224,6 +1294,7 @@ declare module "bun:test" {
|
||||
* expect(undefined).toBeNull(); // fail
|
||||
*/
|
||||
toBeNull(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is `NaN`.
|
||||
*
|
||||
@@ -1235,6 +1306,7 @@ declare module "bun:test" {
|
||||
* expect("notanumber").toBeNaN(); // fail
|
||||
*/
|
||||
toBeNaN(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `number` and is greater than the expected value.
|
||||
*
|
||||
@@ -1246,6 +1318,7 @@ declare module "bun:test" {
|
||||
* @param expected the expected number
|
||||
*/
|
||||
toBeGreaterThan(expected: number | bigint): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `number` and is greater than or equal to the expected value.
|
||||
*
|
||||
@@ -1257,6 +1330,7 @@ declare module "bun:test" {
|
||||
* @param expected the expected number
|
||||
*/
|
||||
toBeGreaterThanOrEqual(expected: number | bigint): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `number` and is less than the expected value.
|
||||
*
|
||||
@@ -1268,6 +1342,7 @@ declare module "bun:test" {
|
||||
* @param expected the expected number
|
||||
*/
|
||||
toBeLessThan(expected: number | bigint): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `number` and is less than or equal to the expected value.
|
||||
*
|
||||
@@ -1279,6 +1354,7 @@ declare module "bun:test" {
|
||||
* @param expected the expected number
|
||||
*/
|
||||
toBeLessThanOrEqual(expected: number | bigint): void;
|
||||
|
||||
/**
|
||||
* Asserts that a function throws an error.
|
||||
*
|
||||
@@ -1299,6 +1375,7 @@ declare module "bun:test" {
|
||||
* @param expected the expected error, error message, or error pattern
|
||||
*/
|
||||
toThrow(expected?: unknown): void;
|
||||
|
||||
/**
|
||||
* Asserts that a function throws an error.
|
||||
*
|
||||
@@ -1320,6 +1397,7 @@ declare module "bun:test" {
|
||||
* @alias toThrow
|
||||
*/
|
||||
toThrowError(expected?: unknown): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value matches a regular expression or includes a substring.
|
||||
*
|
||||
@@ -1330,6 +1408,7 @@ declare module "bun:test" {
|
||||
* @param expected the expected substring or pattern.
|
||||
*/
|
||||
toMatch(expected: string | RegExp): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value matches the most recent snapshot.
|
||||
*
|
||||
@@ -1338,6 +1417,7 @@ declare module "bun:test" {
|
||||
* @param hint Hint used to identify the snapshot in the snapshot file.
|
||||
*/
|
||||
toMatchSnapshot(hint?: string): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value matches the most recent snapshot.
|
||||
*
|
||||
@@ -1350,6 +1430,7 @@ declare module "bun:test" {
|
||||
* @param hint Hint used to identify the snapshot in the snapshot file.
|
||||
*/
|
||||
toMatchSnapshot(propertyMatchers?: object, hint?: string): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value matches the most recent inline snapshot.
|
||||
*
|
||||
@@ -1360,6 +1441,7 @@ declare module "bun:test" {
|
||||
* @param value The latest automatically-updated snapshot value.
|
||||
*/
|
||||
toMatchInlineSnapshot(value?: string): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value matches the most recent inline snapshot.
|
||||
*
|
||||
@@ -1375,6 +1457,7 @@ declare module "bun:test" {
|
||||
* @param value The latest automatically-updated snapshot value.
|
||||
*/
|
||||
toMatchInlineSnapshot(propertyMatchers?: object, value?: string): void;
|
||||
|
||||
/**
|
||||
* Asserts that a function throws an error matching the most recent snapshot.
|
||||
*
|
||||
@@ -1388,6 +1471,7 @@ declare module "bun:test" {
|
||||
* @param value The latest automatically-updated snapshot value.
|
||||
*/
|
||||
toThrowErrorMatchingSnapshot(hint?: string): void;
|
||||
|
||||
/**
|
||||
* Asserts that a function throws an error matching the most recent snapshot.
|
||||
*
|
||||
@@ -1401,6 +1485,7 @@ declare module "bun:test" {
|
||||
* @param value The latest automatically-updated snapshot value.
|
||||
*/
|
||||
toThrowErrorMatchingInlineSnapshot(value?: string): void;
|
||||
|
||||
/**
|
||||
* Asserts that an object matches a subset of properties.
|
||||
*
|
||||
@@ -1411,6 +1496,7 @@ declare module "bun:test" {
|
||||
* @param subset Subset of properties to match with.
|
||||
*/
|
||||
toMatchObject(subset: object): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is empty.
|
||||
*
|
||||
@@ -1421,6 +1507,7 @@ declare module "bun:test" {
|
||||
* expect(new Set()).toBeEmpty();
|
||||
*/
|
||||
toBeEmpty(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is an empty `object`.
|
||||
*
|
||||
@@ -1429,6 +1516,7 @@ declare module "bun:test" {
|
||||
* expect({ a: 'hello' }).not.toBeEmptyObject();
|
||||
*/
|
||||
toBeEmptyObject(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is `null` or `undefined`.
|
||||
*
|
||||
@@ -1437,6 +1525,7 @@ declare module "bun:test" {
|
||||
* expect(undefined).toBeNil();
|
||||
*/
|
||||
toBeNil(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `array`.
|
||||
*
|
||||
@@ -1447,6 +1536,7 @@ declare module "bun:test" {
|
||||
* expect({}).not.toBeArray();
|
||||
*/
|
||||
toBeArray(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `array` of a certain length.
|
||||
*
|
||||
@@ -1458,6 +1548,7 @@ declare module "bun:test" {
|
||||
* expect({}).not.toBeArrayOfSize(0);
|
||||
*/
|
||||
toBeArrayOfSize(size: number): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `boolean`.
|
||||
*
|
||||
@@ -1468,6 +1559,7 @@ declare module "bun:test" {
|
||||
* expect(0).not.toBeBoolean();
|
||||
*/
|
||||
toBeBoolean(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is `true`.
|
||||
*
|
||||
@@ -1477,6 +1569,7 @@ declare module "bun:test" {
|
||||
* expect(1).not.toBeTrue();
|
||||
*/
|
||||
toBeTrue(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value matches a specific type.
|
||||
*
|
||||
@@ -1487,6 +1580,7 @@ declare module "bun:test" {
|
||||
* expect([]).not.toBeTypeOf("boolean");
|
||||
*/
|
||||
toBeTypeOf(type: "bigint" | "boolean" | "function" | "number" | "object" | "string" | "symbol" | "undefined"): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is `false`.
|
||||
*
|
||||
@@ -1496,6 +1590,7 @@ declare module "bun:test" {
|
||||
* expect(0).not.toBeFalse();
|
||||
*/
|
||||
toBeFalse(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `number`.
|
||||
*
|
||||
@@ -1506,6 +1601,7 @@ declare module "bun:test" {
|
||||
* expect(BigInt(1)).not.toBeNumber();
|
||||
*/
|
||||
toBeNumber(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `number`, and is an integer.
|
||||
*
|
||||
@@ -1515,6 +1611,7 @@ declare module "bun:test" {
|
||||
* expect(NaN).not.toBeInteger();
|
||||
*/
|
||||
toBeInteger(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is an `object`.
|
||||
*
|
||||
@@ -1524,6 +1621,7 @@ declare module "bun:test" {
|
||||
* expect(NaN).not.toBeObject();
|
||||
*/
|
||||
toBeObject(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `number`, and is not `NaN` or `Infinity`.
|
||||
*
|
||||
@@ -1534,6 +1632,7 @@ declare module "bun:test" {
|
||||
* expect(Infinity).not.toBeFinite();
|
||||
*/
|
||||
toBeFinite(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a positive `number`.
|
||||
*
|
||||
@@ -1543,6 +1642,7 @@ declare module "bun:test" {
|
||||
* expect(NaN).not.toBePositive();
|
||||
*/
|
||||
toBePositive(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a negative `number`.
|
||||
*
|
||||
@@ -1552,6 +1652,7 @@ declare module "bun:test" {
|
||||
* expect(NaN).not.toBeNegative();
|
||||
*/
|
||||
toBeNegative(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a number between a start and end value.
|
||||
*
|
||||
@@ -1559,6 +1660,7 @@ declare module "bun:test" {
|
||||
* @param end the end number (exclusive)
|
||||
*/
|
||||
toBeWithin(start: number, end: number): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is equal to the expected string, ignoring any whitespace.
|
||||
*
|
||||
@@ -1569,6 +1671,7 @@ declare module "bun:test" {
|
||||
* @param expected the expected string
|
||||
*/
|
||||
toEqualIgnoringWhitespace(expected: string): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `symbol`.
|
||||
*
|
||||
@@ -1577,6 +1680,7 @@ declare module "bun:test" {
|
||||
* expect("foo").not.toBeSymbol();
|
||||
*/
|
||||
toBeSymbol(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `function`.
|
||||
*
|
||||
@@ -1584,6 +1688,7 @@ declare module "bun:test" {
|
||||
* expect(() => {}).toBeFunction();
|
||||
*/
|
||||
toBeFunction(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `Date` object.
|
||||
*
|
||||
@@ -1595,6 +1700,7 @@ declare module "bun:test" {
|
||||
* expect("2020-03-01").not.toBeDate();
|
||||
*/
|
||||
toBeDate(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a valid `Date` object.
|
||||
*
|
||||
@@ -1604,6 +1710,7 @@ declare module "bun:test" {
|
||||
* expect("2020-03-01").not.toBeValidDate();
|
||||
*/
|
||||
toBeValidDate(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `string`.
|
||||
*
|
||||
@@ -1613,6 +1720,7 @@ declare module "bun:test" {
|
||||
* expect(123).not.toBeString();
|
||||
*/
|
||||
toBeString(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value includes a `string`.
|
||||
*
|
||||
@@ -1621,12 +1729,14 @@ declare module "bun:test" {
|
||||
* @param expected the expected substring
|
||||
*/
|
||||
toInclude(expected: string): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value includes a `string` {times} times.
|
||||
* @param expected the expected substring
|
||||
* @param times the number of times the substring should occur
|
||||
*/
|
||||
toIncludeRepeated(expected: string, times: number): void;
|
||||
|
||||
/**
|
||||
* Checks whether a value satisfies a custom condition.
|
||||
* @param {Function} predicate - The custom condition to be satisfied. It should be a function that takes a value as an argument (in this case the value from expect) and returns a boolean.
|
||||
@@ -1638,18 +1748,21 @@ declare module "bun:test" {
|
||||
* @link https://jest-extended.jestcommunity.dev/docs/matchers/toSatisfy
|
||||
*/
|
||||
toSatisfy(predicate: (value: T) => boolean): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value starts with a `string`.
|
||||
*
|
||||
* @param expected the string to start with
|
||||
*/
|
||||
toStartWith(expected: string): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value ends with a `string`.
|
||||
*
|
||||
* @param expected the string to end with
|
||||
*/
|
||||
toEndWith(expected: string): void;
|
||||
|
||||
/**
|
||||
* Ensures that a mock function has returned successfully at least once.
|
||||
*
|
||||
@@ -1690,42 +1803,51 @@ declare module "bun:test" {
|
||||
* Ensures that a mock function is called.
|
||||
*/
|
||||
toHaveBeenCalled(): void;
|
||||
|
||||
/**
|
||||
* Ensures that a mock function is called an exact number of times.
|
||||
* @alias toHaveBeenCalled
|
||||
*/
|
||||
toBeCalled(): void;
|
||||
|
||||
/**
|
||||
* Ensures that a mock function is called an exact number of times.
|
||||
*/
|
||||
toHaveBeenCalledTimes(expected: number): void;
|
||||
|
||||
/**
|
||||
* Ensure that a mock function is called with specific arguments.
|
||||
* @alias toHaveBeenCalledTimes
|
||||
*/
|
||||
toBeCalledTimes(expected: number): void;
|
||||
|
||||
/**
|
||||
* Ensure that a mock function is called with specific arguments.
|
||||
*/
|
||||
toHaveBeenCalledWith(...expected: unknown[]): void;
|
||||
|
||||
/**
|
||||
* Ensure that a mock function is called with specific arguments.
|
||||
* @alias toHaveBeenCalledWith
|
||||
*/
|
||||
toBeCalledWith(...expected: unknown[]): void;
|
||||
|
||||
/**
|
||||
* Ensure that a mock function is called with specific arguments for the last call.
|
||||
*/
|
||||
toHaveBeenLastCalledWith(...expected: unknown[]): void;
|
||||
|
||||
/**
|
||||
* Ensure that a mock function is called with specific arguments for the nth call.
|
||||
* @alias toHaveBeenCalledWith
|
||||
*/
|
||||
lastCalledWith(...expected: unknown[]): void;
|
||||
|
||||
/**
|
||||
* Ensure that a mock function is called with specific arguments for the nth call.
|
||||
*/
|
||||
toHaveBeenNthCalledWith(n: number, ...expected: unknown[]): void;
|
||||
|
||||
/**
|
||||
* Ensure that a mock function is called with specific arguments for the nth call.
|
||||
* @alias toHaveBeenCalledWith
|
||||
|
||||
@@ -25,6 +25,23 @@
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
#if BUN_DEBUG
|
||||
// Debug network traffic logging
|
||||
static FILE *debug_recv_file = NULL;
|
||||
static FILE *debug_send_file = NULL;
|
||||
static int debug_logging_initialized = 0;
|
||||
|
||||
static void init_debug_logging() {
|
||||
if (debug_logging_initialized) return;
|
||||
debug_logging_initialized = 1;
|
||||
|
||||
const char *recv_path = getenv("BUN_RECV");
|
||||
const char *send_path = getenv("BUN_SEND");
|
||||
if (recv_path) if (!debug_recv_file) debug_recv_file = fopen(recv_path, "w");
|
||||
if (send_path) if (!debug_send_file) debug_send_file = fopen(send_path, "w");
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifndef _WIN32
|
||||
// Necessary for the stdint include
|
||||
#ifndef _GNU_SOURCE
|
||||
@@ -721,6 +738,17 @@ ssize_t bsd_recv(LIBUS_SOCKET_DESCRIPTOR fd, void *buf, int length, int flags) {
|
||||
continue;
|
||||
}
|
||||
|
||||
#if BUN_DEBUG
|
||||
// Debug logging for received data
|
||||
if (ret > 0) {
|
||||
init_debug_logging();
|
||||
if (debug_recv_file) {
|
||||
fwrite(buf, 1, ret, debug_recv_file);
|
||||
fflush(debug_recv_file);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
@@ -788,6 +816,17 @@ ssize_t bsd_send(LIBUS_SOCKET_DESCRIPTOR fd, const char *buf, int length) {
|
||||
continue;
|
||||
}
|
||||
|
||||
#if BUN_DEBUG
|
||||
// Debug logging for sent data
|
||||
if (rc > 0) {
|
||||
init_debug_logging();
|
||||
if (debug_send_file) {
|
||||
fwrite(buf, 1, rc, debug_send_file);
|
||||
fflush(debug_send_file);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
return rc;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -153,7 +153,7 @@ void us_internal_socket_context_unlink_connecting_socket(int ssl, struct us_sock
|
||||
}
|
||||
|
||||
/* We always add in the top, so we don't modify any s.next */
|
||||
void us_internal_socket_context_link_listen_socket(struct us_socket_context_t *context, struct us_listen_socket_t *ls) {
|
||||
void us_internal_socket_context_link_listen_socket(int ssl, struct us_socket_context_t *context, struct us_listen_socket_t *ls) {
|
||||
struct us_socket_t* s = &ls->s;
|
||||
s->context = context;
|
||||
s->next = (struct us_socket_t *) context->head_listen_sockets;
|
||||
@@ -162,7 +162,7 @@ void us_internal_socket_context_link_listen_socket(struct us_socket_context_t *c
|
||||
context->head_listen_sockets->s.prev = s;
|
||||
}
|
||||
context->head_listen_sockets = ls;
|
||||
us_socket_context_ref(0, context);
|
||||
us_socket_context_ref(ssl, context);
|
||||
}
|
||||
|
||||
void us_internal_socket_context_link_connecting_socket(int ssl, struct us_socket_context_t *context, struct us_connecting_socket_t *c) {
|
||||
@@ -179,7 +179,7 @@ void us_internal_socket_context_link_connecting_socket(int ssl, struct us_socket
|
||||
|
||||
|
||||
/* We always add in the top, so we don't modify any s.next */
|
||||
void us_internal_socket_context_link_socket(struct us_socket_context_t *context, struct us_socket_t *s) {
|
||||
void us_internal_socket_context_link_socket(int ssl, struct us_socket_context_t *context, struct us_socket_t *s) {
|
||||
s->context = context;
|
||||
s->next = context->head_sockets;
|
||||
s->prev = 0;
|
||||
@@ -187,7 +187,7 @@ void us_internal_socket_context_link_socket(struct us_socket_context_t *context,
|
||||
context->head_sockets->prev = s;
|
||||
}
|
||||
context->head_sockets = s;
|
||||
us_socket_context_ref(0, context);
|
||||
us_socket_context_ref(ssl, context);
|
||||
us_internal_enable_sweep_timer(context->loop);
|
||||
}
|
||||
|
||||
@@ -388,7 +388,7 @@ struct us_listen_socket_t *us_socket_context_listen(int ssl, struct us_socket_co
|
||||
s->flags.is_ipc = 0;
|
||||
s->next = 0;
|
||||
s->flags.allow_half_open = (options & LIBUS_SOCKET_ALLOW_HALF_OPEN);
|
||||
us_internal_socket_context_link_listen_socket(context, ls);
|
||||
us_internal_socket_context_link_listen_socket(ssl, context, ls);
|
||||
|
||||
ls->socket_ext_size = socket_ext_size;
|
||||
|
||||
@@ -423,7 +423,7 @@ struct us_listen_socket_t *us_socket_context_listen_unix(int ssl, struct us_sock
|
||||
s->flags.is_paused = 0;
|
||||
s->flags.is_ipc = 0;
|
||||
s->next = 0;
|
||||
us_internal_socket_context_link_listen_socket(context, ls);
|
||||
us_internal_socket_context_link_listen_socket(ssl, context, ls);
|
||||
|
||||
ls->socket_ext_size = socket_ext_size;
|
||||
|
||||
@@ -456,7 +456,7 @@ struct us_socket_t* us_socket_context_connect_resolved_dns(struct us_socket_cont
|
||||
socket->connect_state = NULL;
|
||||
socket->connect_next = NULL;
|
||||
|
||||
us_internal_socket_context_link_socket(context, socket);
|
||||
us_internal_socket_context_link_socket(0, context, socket);
|
||||
|
||||
return socket;
|
||||
}
|
||||
@@ -584,7 +584,7 @@ int start_connections(struct us_connecting_socket_t *c, int count) {
|
||||
flags->is_paused = 0;
|
||||
flags->is_ipc = 0;
|
||||
/* Link it into context so that timeout fires properly */
|
||||
us_internal_socket_context_link_socket(context, s);
|
||||
us_internal_socket_context_link_socket(0, context, s);
|
||||
|
||||
// TODO check this, specifically how it interacts with the SSL code
|
||||
// does this work when we create multiple sockets at once? will we need multiple SSL contexts?
|
||||
@@ -762,7 +762,7 @@ struct us_socket_t *us_socket_context_connect_unix(int ssl, struct us_socket_con
|
||||
connect_socket->flags.is_ipc = 0;
|
||||
connect_socket->connect_state = NULL;
|
||||
connect_socket->connect_next = NULL;
|
||||
us_internal_socket_context_link_socket(context, connect_socket);
|
||||
us_internal_socket_context_link_socket(ssl, context, connect_socket);
|
||||
|
||||
return connect_socket;
|
||||
}
|
||||
@@ -804,12 +804,9 @@ struct us_socket_t *us_socket_context_adopt_socket(int ssl, struct us_socket_con
|
||||
}
|
||||
|
||||
struct us_connecting_socket_t *c = s->connect_state;
|
||||
|
||||
struct us_socket_t *new_s = s;
|
||||
|
||||
if (ext_size != -1) {
|
||||
struct us_poll_t *pool_ref = &s->p;
|
||||
|
||||
new_s = (struct us_socket_t *) us_poll_resize(pool_ref, loop, sizeof(struct us_socket_t) + ext_size);
|
||||
if (c) {
|
||||
c->connecting_head = new_s;
|
||||
@@ -831,7 +828,7 @@ struct us_socket_t *us_socket_context_adopt_socket(int ssl, struct us_socket_con
|
||||
/* We manually ref/unref context to handle context life cycle with low-priority queue */
|
||||
us_socket_context_ref(ssl, context);
|
||||
} else {
|
||||
us_internal_socket_context_link_socket(context, new_s);
|
||||
us_internal_socket_context_link_socket(ssl, context, new_s);
|
||||
}
|
||||
/* We can safely unref the old context here with can potentially be freed */
|
||||
us_socket_context_unref(ssl, old_context);
|
||||
|
||||
@@ -150,16 +150,12 @@ void us_internal_init_loop_ssl_data(us_loop_r loop);
|
||||
void us_internal_free_loop_ssl_data(us_loop_r loop);
|
||||
|
||||
/* Socket context related */
|
||||
void us_internal_socket_context_link_socket(us_socket_context_r context,
|
||||
us_socket_r s);
|
||||
void us_internal_socket_context_unlink_socket(int ssl,
|
||||
us_socket_context_r context, us_socket_r s);
|
||||
void us_internal_socket_context_link_socket(int ssl, us_socket_context_r context, us_socket_r s);
|
||||
void us_internal_socket_context_unlink_socket(int ssl, us_socket_context_r context, us_socket_r s);
|
||||
|
||||
void us_internal_socket_after_resolve(struct us_connecting_socket_t *s);
|
||||
void us_internal_socket_after_open(us_socket_r s, int error);
|
||||
struct us_internal_ssl_socket_t *
|
||||
us_internal_ssl_socket_close(us_internal_ssl_socket_r s, int code,
|
||||
void *reason);
|
||||
struct us_internal_ssl_socket_t *us_internal_ssl_socket_close(us_internal_ssl_socket_r s, int code, void *reason);
|
||||
|
||||
int us_internal_handle_dns_results(us_loop_r loop);
|
||||
|
||||
@@ -271,7 +267,7 @@ struct us_listen_socket_t {
|
||||
};
|
||||
|
||||
/* Listen sockets are keps in their own list */
|
||||
void us_internal_socket_context_link_listen_socket(
|
||||
void us_internal_socket_context_link_listen_socket(int ssl,
|
||||
us_socket_context_r context, struct us_listen_socket_t *s);
|
||||
void us_internal_socket_context_unlink_listen_socket(int ssl,
|
||||
us_socket_context_r context, struct us_listen_socket_t *s);
|
||||
@@ -288,8 +284,7 @@ struct us_socket_context_t {
|
||||
struct us_socket_t *iterator;
|
||||
struct us_socket_context_t *prev, *next;
|
||||
|
||||
struct us_socket_t *(*on_open)(struct us_socket_t *, int is_client, char *ip,
|
||||
int ip_length);
|
||||
struct us_socket_t *(*on_open)(struct us_socket_t *, int is_client, char *ip, int ip_length);
|
||||
struct us_socket_t *(*on_data)(struct us_socket_t *, char *data, int length);
|
||||
struct us_socket_t *(*on_fd)(struct us_socket_t *, int fd);
|
||||
struct us_socket_t *(*on_writable)(struct us_socket_t *);
|
||||
@@ -301,7 +296,6 @@ struct us_socket_context_t {
|
||||
struct us_connecting_socket_t *(*on_connect_error)(struct us_connecting_socket_t *, int code);
|
||||
struct us_socket_t *(*on_socket_connect_error)(struct us_socket_t *, int code);
|
||||
int (*is_low_prio)(struct us_socket_t *);
|
||||
|
||||
};
|
||||
|
||||
/* Internal SSL interface */
|
||||
|
||||
@@ -22,7 +22,16 @@
|
||||
#ifndef WIN32
|
||||
#include <sys/ioctl.h>
|
||||
#endif
|
||||
|
||||
#if __has_include("wtf/Platform.h")
|
||||
#include "wtf/Platform.h"
|
||||
#elif !defined(ASSERT_ENABLED)
|
||||
#if defined(BUN_DEBUG) || defined(__has_feature) && __has_feature(address_sanitizer) || defined(__SANITIZE_ADDRESS__)
|
||||
#define ASSERT_ENABLED 1
|
||||
#else
|
||||
#define ASSERT_ENABLED 0
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#if ASSERT_ENABLED
|
||||
extern const size_t Bun__lock__size;
|
||||
@@ -40,7 +49,6 @@ void us_internal_enable_sweep_timer(struct us_loop_t *loop) {
|
||||
us_timer_set(loop->data.sweep_timer, (void (*)(struct us_timer_t *)) sweep_timer_cb, LIBUS_TIMEOUT_GRANULARITY * 1000, LIBUS_TIMEOUT_GRANULARITY * 1000);
|
||||
Bun__internal_ensureDateHeaderTimerIsEnabled(loop);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
void us_internal_disable_sweep_timer(struct us_loop_t *loop) {
|
||||
@@ -183,7 +191,7 @@ void us_internal_handle_low_priority_sockets(struct us_loop_t *loop) {
|
||||
if (s->next) s->next->prev = 0;
|
||||
s->next = 0;
|
||||
|
||||
us_internal_socket_context_link_socket(s->context, s);
|
||||
us_internal_socket_context_link_socket(0, s->context, s);
|
||||
us_poll_change(&s->p, us_socket_context(0, s)->loop, us_poll_events(&s->p) | LIBUS_SOCKET_READABLE);
|
||||
|
||||
s->flags.low_prio_state = 2;
|
||||
@@ -340,7 +348,7 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int eof, in
|
||||
/* We always use nodelay */
|
||||
bsd_socket_nodelay(client_fd, 1);
|
||||
|
||||
us_internal_socket_context_link_socket(listen_socket->s.context, s);
|
||||
us_internal_socket_context_link_socket(0, listen_socket->s.context, s);
|
||||
|
||||
listen_socket->s.context->on_open(s, 0, bsd_addr_get_ip(&addr), bsd_addr_get_ip_length(&addr));
|
||||
|
||||
@@ -364,7 +372,7 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int eof, in
|
||||
/* Note: if we failed a write as a socket of one loop then adopted
|
||||
* to another loop, this will be wrong. Absurd case though */
|
||||
loop->data.last_write_failed = 0;
|
||||
|
||||
|
||||
s = s->context->on_writable(s);
|
||||
|
||||
if (!s || us_socket_is_closed(0, s)) {
|
||||
|
||||
@@ -329,7 +329,7 @@ struct us_socket_t *us_socket_from_fd(struct us_socket_context_t *ctx, int socke
|
||||
bsd_socket_nodelay(fd, 1);
|
||||
apple_no_sigpipe(fd);
|
||||
bsd_set_nonblocking(fd);
|
||||
us_internal_socket_context_link_socket(ctx, s);
|
||||
us_internal_socket_context_link_socket(0, ctx, s);
|
||||
|
||||
return s;
|
||||
#endif
|
||||
|
||||
@@ -298,6 +298,22 @@ public:
|
||||
return std::move(*this);
|
||||
}
|
||||
|
||||
/** Closes all connections connected to this server which are not sending a request or waiting for a response. Does not close the listen socket. */
|
||||
TemplatedApp &&closeIdle() {
|
||||
auto context = (struct us_socket_context_t *)this->httpContext;
|
||||
struct us_socket_t *s = context->head_sockets;
|
||||
while (s) {
|
||||
HttpResponseData<SSL> *httpResponseData = HttpResponse<SSL>::getHttpResponseDataS(s);
|
||||
httpResponseData->shouldCloseOnceIdle = true;
|
||||
struct us_socket_t *next = s->next;
|
||||
if (httpResponseData->isIdle) {
|
||||
us_socket_close(SSL, s, LIBUS_SOCKET_CLOSE_CODE_CLEAN_SHUTDOWN, 0);
|
||||
}
|
||||
s = next;
|
||||
}
|
||||
return std::move(*this);
|
||||
}
|
||||
|
||||
template <typename UserData>
|
||||
TemplatedApp &&ws(std::string_view pattern, WebSocketBehavior<UserData> &&behavior) {
|
||||
/* Don't compile if alignment rules cannot be satisfied */
|
||||
|
||||
@@ -386,6 +386,9 @@ public:
|
||||
/* We do not need to care for buffering here, write does that */
|
||||
return {0, true};
|
||||
}
|
||||
if (length == 0) {
|
||||
return {written, failed};
|
||||
}
|
||||
}
|
||||
|
||||
/* We should only return with new writes, not things written to cork already */
|
||||
|
||||
@@ -137,10 +137,6 @@ private:
|
||||
return (HttpContextData<SSL> *) us_socket_context_ext(SSL, getSocketContext());
|
||||
}
|
||||
|
||||
static HttpContextData<SSL> *getSocketContextDataS(us_socket_t *s) {
|
||||
return (HttpContextData<SSL> *) us_socket_context_ext(SSL, getSocketContext(s));
|
||||
}
|
||||
|
||||
/* Init the HttpContext by registering libusockets event handlers */
|
||||
HttpContext<SSL> *init() {
|
||||
|
||||
@@ -247,6 +243,7 @@ private:
|
||||
|
||||
/* Mark that we are inside the parser now */
|
||||
httpContextData->flags.isParsingHttp = true;
|
||||
httpResponseData->isIdle = false;
|
||||
// clients need to know the cursor after http parse, not servers!
|
||||
// how far did we read then? we need to know to continue with websocket parsing data? or?
|
||||
|
||||
@@ -398,6 +395,7 @@ private:
|
||||
/* Timeout on uncork failure */
|
||||
auto [written, failed] = ((AsyncSocket<SSL> *) returnedData)->uncork();
|
||||
if (written > 0 || failed) {
|
||||
httpResponseData->isIdle = true;
|
||||
/* All Http sockets timeout by this, and this behavior match the one in HttpResponse::cork */
|
||||
((HttpResponse<SSL> *) s)->resetTimeout();
|
||||
}
|
||||
@@ -642,6 +640,10 @@ public:
|
||||
}, priority);
|
||||
}
|
||||
|
||||
static HttpContextData<SSL> *getSocketContextDataS(us_socket_t *s) {
|
||||
return (HttpContextData<SSL> *) us_socket_context_ext(SSL, getSocketContext(s));
|
||||
}
|
||||
|
||||
/* Listen to port using this HttpContext */
|
||||
us_listen_socket_t *listen(const char *host, int port, int options) {
|
||||
int error = 0;
|
||||
|
||||
@@ -63,7 +63,6 @@ private:
|
||||
OnSocketClosedCallback onSocketClosed = nullptr;
|
||||
OnClientErrorCallback onClientError = nullptr;
|
||||
|
||||
HttpFlags flags;
|
||||
uint64_t maxHeaderSize = 0; // 0 means no limit
|
||||
|
||||
// TODO: SNI
|
||||
@@ -73,10 +72,8 @@ private:
|
||||
filterHandlers.clear();
|
||||
}
|
||||
|
||||
public:
|
||||
bool isAuthorized() const {
|
||||
return flags.isAuthorized;
|
||||
}
|
||||
public:
|
||||
HttpFlags flags;
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
@@ -50,6 +50,11 @@ public:
|
||||
HttpResponseData<SSL> *getHttpResponseData() {
|
||||
return (HttpResponseData<SSL> *) Super::getAsyncSocketData();
|
||||
}
|
||||
|
||||
static HttpResponseData<SSL> *getHttpResponseDataS(us_socket_t *s) {
|
||||
return (HttpResponseData<SSL> *) us_socket_ext(SSL, s);
|
||||
}
|
||||
|
||||
void setTimeout(uint8_t seconds) {
|
||||
auto* data = getHttpResponseData();
|
||||
data->idleTimeout = seconds;
|
||||
@@ -132,7 +137,7 @@ public:
|
||||
|
||||
/* Terminating 0 chunk */
|
||||
Super::write("0\r\n\r\n", 5);
|
||||
httpResponseData->markDone();
|
||||
httpResponseData->markDone(this);
|
||||
|
||||
/* We need to check if we should close this socket here now */
|
||||
if (!Super::isCorked()) {
|
||||
@@ -198,7 +203,7 @@ public:
|
||||
|
||||
/* Remove onAborted function if we reach the end */
|
||||
if (httpResponseData->offset == totalSize) {
|
||||
httpResponseData->markDone();
|
||||
httpResponseData->markDone(this);
|
||||
|
||||
/* We need to check if we should close this socket here now */
|
||||
if (!Super::isCorked()) {
|
||||
|
||||
@@ -22,11 +22,15 @@
|
||||
#include "HttpParser.h"
|
||||
#include "AsyncSocketData.h"
|
||||
#include "ProxyParser.h"
|
||||
#include "HttpContext.h"
|
||||
|
||||
#include "MoveOnlyFunction.h"
|
||||
|
||||
namespace uWS {
|
||||
|
||||
template <bool SSL>
|
||||
struct HttpContext;
|
||||
|
||||
template <bool SSL>
|
||||
struct HttpResponseData : AsyncSocketData<SSL>, HttpParser {
|
||||
template <bool> friend struct HttpResponse;
|
||||
@@ -38,7 +42,7 @@ struct HttpResponseData : AsyncSocketData<SSL>, HttpParser {
|
||||
using OnDataCallback = void (*)(uWS::HttpResponse<SSL>* response, const char* chunk, size_t chunk_length, bool, void*);
|
||||
|
||||
/* When we are done with a response we mark it like so */
|
||||
void markDone() {
|
||||
void markDone(uWS::HttpResponse<SSL> *uwsRes) {
|
||||
onAborted = nullptr;
|
||||
/* Also remove onWritable so that we do not emit when draining behind the scenes. */
|
||||
onWritable = nullptr;
|
||||
@@ -50,6 +54,9 @@ struct HttpResponseData : AsyncSocketData<SSL>, HttpParser {
|
||||
|
||||
/* We are done with this request */
|
||||
this->state &= ~HttpResponseData<SSL>::HTTP_RESPONSE_PENDING;
|
||||
|
||||
HttpResponseData<SSL> *httpResponseData = uwsRes->getHttpResponseData();
|
||||
httpResponseData->isIdle = true;
|
||||
}
|
||||
|
||||
/* Caller of onWritable. It is possible onWritable calls markDone so we need to borrow it. */
|
||||
@@ -101,6 +108,8 @@ struct HttpResponseData : AsyncSocketData<SSL>, HttpParser {
|
||||
uint8_t state = 0;
|
||||
uint8_t idleTimeout = 10; // default HTTP_TIMEOUT 10 seconds
|
||||
bool fromAncientRequest = false;
|
||||
bool isIdle = true;
|
||||
bool shouldCloseOnceIdle = false;
|
||||
|
||||
|
||||
#ifdef UWS_WITH_PROXY
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { spawn as nodeSpawn } from "node:child_process";
|
||||
import { chmodSync, cpSync, existsSync, mkdirSync, readFileSync } from "node:fs";
|
||||
import { basename, join, relative, resolve } from "node:path";
|
||||
@@ -14,6 +12,10 @@ import {
|
||||
startGroup,
|
||||
} from "./utils.mjs";
|
||||
|
||||
if (globalThis.Bun) {
|
||||
await import("./glob-sources.mjs");
|
||||
}
|
||||
|
||||
// https://cmake.org/cmake/help/latest/manual/cmake.1.html#generate-a-project-buildsystem
|
||||
const generateFlags = [
|
||||
["-S", "string", "path to source directory"],
|
||||
|
||||
107
scripts/buildkite-slow-tests.js
Executable file
107
scripts/buildkite-slow-tests.js
Executable file
@@ -0,0 +1,107 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
import { readFileSync } from "fs";
|
||||
|
||||
function parseLogFile(filename) {
|
||||
const testDetails = new Map(); // Track individual attempts and total for each test
|
||||
let currentTest = null;
|
||||
let startTime = null;
|
||||
|
||||
// Pattern to match test group start: --- [90m[N/TOTAL][0m test/path
|
||||
// Note: there are escape sequences before _bk
|
||||
const startPattern = /_bk;t=(\d+).*?--- .*?\[90m\[(\d+)\/(\d+)\].*?\[0m (.+)/;
|
||||
|
||||
const content = readFileSync(filename, "utf-8");
|
||||
const lines = content.split("\n");
|
||||
|
||||
for (const line of lines) {
|
||||
const match = line.match(startPattern);
|
||||
if (match) {
|
||||
// If we have a previous test, calculate its duration
|
||||
if (currentTest && startTime) {
|
||||
const endTime = parseInt(match[1]);
|
||||
const duration = endTime - startTime;
|
||||
|
||||
// Extract attempt info - match the actual ANSI pattern
|
||||
const attemptMatch = currentTest.match(/\s+\x1b\[90m\[attempt #(\d+)\]\x1b\[0m$/);
|
||||
const cleanName = currentTest.replace(/\s+\x1b\[90m\[attempt #\d+\]\x1b\[0m$/, "").trim();
|
||||
const attemptNum = attemptMatch ? parseInt(attemptMatch[1]) : 1;
|
||||
|
||||
if (!testDetails.has(cleanName)) {
|
||||
testDetails.set(cleanName, { total: 0, attempts: [] });
|
||||
}
|
||||
|
||||
const testInfo = testDetails.get(cleanName);
|
||||
testInfo.total += duration;
|
||||
testInfo.attempts.push({ attempt: attemptNum, duration });
|
||||
}
|
||||
|
||||
// Start new test
|
||||
startTime = parseInt(match[1]);
|
||||
currentTest = match[4].trim();
|
||||
}
|
||||
}
|
||||
|
||||
// Convert to array and sort by total duration
|
||||
const testGroups = Array.from(testDetails.entries())
|
||||
.map(([name, info]) => ({
|
||||
name,
|
||||
totalDuration: info.total,
|
||||
attempts: info.attempts.sort((a, b) => a.attempt - b.attempt),
|
||||
}))
|
||||
.sort((a, b) => b.totalDuration - a.totalDuration);
|
||||
|
||||
return testGroups;
|
||||
}
|
||||
|
||||
function formatAttempts(attempts) {
|
||||
if (attempts.length <= 1) return "";
|
||||
|
||||
const attemptStrings = attempts.map(
|
||||
({ attempt, duration }) => `${(duration / 1000).toFixed(1)}s attempt #${attempt}`,
|
||||
);
|
||||
return ` [${attemptStrings.join(", ")}]`;
|
||||
}
|
||||
|
||||
if (process.argv.length !== 3) {
|
||||
console.log("Usage: bun parse_test_logs.js <log_file>");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const filename = process.argv[2];
|
||||
const testGroups = parseLogFile(filename);
|
||||
|
||||
const totalTime = testGroups.reduce((sum, group) => sum + group.totalDuration, 0) / 1000;
|
||||
const avgTime = testGroups.length > 0 ? totalTime / testGroups.length : 0;
|
||||
|
||||
console.log(
|
||||
`## Slowest Tests Analysis - ${testGroups.length} tests (${totalTime.toFixed(1)}s total, ${avgTime.toFixed(2)}s avg)`,
|
||||
);
|
||||
console.log("");
|
||||
|
||||
// Top 10 summary
|
||||
console.log("**Top 10 slowest tests:**");
|
||||
for (let i = 0; i < Math.min(10, testGroups.length); i++) {
|
||||
const { name, totalDuration, attempts } = testGroups[i];
|
||||
const durationSec = totalDuration / 1000;
|
||||
const testName = name.replace("test/", "").replace(".test.ts", "").replace(".test.js", "");
|
||||
const attemptInfo = formatAttempts(attempts);
|
||||
console.log(`- **${durationSec.toFixed(1)}s** ${testName}${attemptInfo}`);
|
||||
}
|
||||
|
||||
console.log("");
|
||||
|
||||
// Filter tests > 1 second
|
||||
const slowTests = testGroups.filter(test => test.totalDuration > 1000);
|
||||
|
||||
console.log("```");
|
||||
console.log(`All tests > 1s (${slowTests.length} tests):`);
|
||||
|
||||
for (let i = 0; i < slowTests.length; i++) {
|
||||
const { name, totalDuration, attempts } = slowTests[i];
|
||||
const durationSec = totalDuration / 1000;
|
||||
const attemptInfo = formatAttempts(attempts);
|
||||
console.log(`${(i + 1).toString().padStart(3)}. ${durationSec.toFixed(2).padStart(7)}s ${name}${attemptInfo}`);
|
||||
}
|
||||
|
||||
console.log("```");
|
||||
72
scripts/handle-crash-patterns.ts
Normal file
72
scripts/handle-crash-patterns.ts
Normal file
@@ -0,0 +1,72 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
const body = process.env.GITHUB_ISSUE_BODY || "";
|
||||
const title = process.env.GITHUB_ISSUE_TITLE || "";
|
||||
const issueNumber = process.env.GITHUB_ISSUE_NUMBER;
|
||||
|
||||
if (!issueNumber) {
|
||||
throw new Error("GITHUB_ISSUE_NUMBER must be set");
|
||||
}
|
||||
|
||||
interface CloseAction {
|
||||
reason: "not_planned" | "completed";
|
||||
comment: string;
|
||||
}
|
||||
|
||||
let closeAction: CloseAction | null = null;
|
||||
|
||||
// Check for workers_terminated
|
||||
if (body.includes("workers_terminated")) {
|
||||
closeAction = {
|
||||
reason: "not_planned",
|
||||
comment: `Duplicate of #15964
|
||||
We are tracking worker stability issues in https://github.com/oven-sh/bun/issues/15964. For now, I recommend against terminating workers when possible.`,
|
||||
};
|
||||
}
|
||||
|
||||
// Check for better-sqlite3 with RunCommand or AutoCommand
|
||||
else if (body.includes("better-sqlite3") && (body.includes("[RunCommand]") || body.includes("[AutoCommand]"))) {
|
||||
closeAction = {
|
||||
reason: "not_planned",
|
||||
comment: `Duplicate of #4290.
|
||||
better-sqlite3 is not supported yet in Bun due to missing V8 C++ APIs. For now, you can try [bun:sqlite](https://bun.com/docs/api/sqlite) for an almost drop-in replacement.`,
|
||||
};
|
||||
}
|
||||
|
||||
// Check for CPU architecture issues (Segmentation Fault/Illegal Instruction with no_avx)
|
||||
else if (
|
||||
(body.includes("Segmentation Fault") ||
|
||||
body.includes("Illegal Instruction") ||
|
||||
body.includes("IllegalInstruction")) &&
|
||||
body.includes("no_avx")
|
||||
) {
|
||||
let comment = `Bun requires a CPU with the micro-architecture [\`nehalem\`](https://en.wikipedia.org/wiki/Nehalem_(microarchitecture)) or later (released in 2008). If you're using a CPU emulator like qemu, then try enabling x86-64-v2.`;
|
||||
|
||||
// Check if it's macOS
|
||||
const platformMatch = body.match(/Platform:\s*([^\n]+)/i) || body.match(/on\s+(macos|darwin)/i);
|
||||
const isMacOS =
|
||||
platformMatch &&
|
||||
(platformMatch[1]?.toLowerCase().includes("darwin") || platformMatch[1]?.toLowerCase().includes("macos"));
|
||||
|
||||
if (isMacOS) {
|
||||
comment += `\n\nIf you're on a macOS silicon device, you're running Bun via the Rosetta CPU emulator and your best option is to run Bun natively instead.`;
|
||||
}
|
||||
|
||||
closeAction = {
|
||||
reason: "not_planned",
|
||||
comment,
|
||||
};
|
||||
}
|
||||
|
||||
if (closeAction) {
|
||||
// Output the action to take
|
||||
console.write(
|
||||
JSON.stringify({
|
||||
close: true,
|
||||
reason: closeAction.reason,
|
||||
comment: closeAction.comment,
|
||||
}),
|
||||
);
|
||||
} else {
|
||||
console.write(JSON.stringify({ close: false }));
|
||||
}
|
||||
@@ -6,6 +6,9 @@ if (!body) {
|
||||
|
||||
const latest = (await Bun.file(join(import.meta.dir, "..", "LATEST")).text()).trim();
|
||||
|
||||
// Check if this is a standalone executable
|
||||
const isStandalone = body.includes("standalone_executable");
|
||||
|
||||
const lines = body.split("\n").reverse();
|
||||
|
||||
for (let line of lines) {
|
||||
@@ -39,6 +42,11 @@ for (let line of lines) {
|
||||
await Bun.write("is-outdated.txt", "true");
|
||||
await Bun.write("outdated.txt", version);
|
||||
|
||||
// Write flag for standalone executables
|
||||
if (isStandalone) {
|
||||
await Bun.write("is-standalone.txt", "true");
|
||||
}
|
||||
|
||||
const isVeryOutdated =
|
||||
major !== latestMajor || minor !== latestMinor || (latestPatch > patch && latestPatch - patch > 3);
|
||||
|
||||
|
||||
@@ -1,35 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -exo pipefail
|
||||
|
||||
WEBKIT_VERSION=$(grep 'set(WEBKIT_TAG' "CMakeLists.txt" | awk '{print $2}' | cut -f 1 -d ')')
|
||||
MIMALLOC_VERSION=$(git rev-parse HEAD:./src/deps/mimalloc)
|
||||
LIBARCHIVE_VERSION=$(git rev-parse HEAD:./src/deps/libarchive)
|
||||
PICOHTTPPARSER_VERSION=$(git rev-parse HEAD:./src/deps/picohttpparser)
|
||||
BORINGSSL_VERSION=$(git rev-parse HEAD:./src/deps/boringssl)
|
||||
ZLIB_VERSION=$(git rev-parse HEAD:./src/deps/zlib)
|
||||
LOLHTML=$(git rev-parse HEAD:./src/deps/lol-html)
|
||||
TINYCC=$(git rev-parse HEAD:./src/deps/tinycc)
|
||||
C_ARES=$(git rev-parse HEAD:./src/deps/c-ares)
|
||||
ZSTD=$(git rev-parse HEAD:./src/deps/zstd)
|
||||
LSHPACK=$(git rev-parse HEAD:./src/deps/ls-hpack)
|
||||
LIBDEFLATE=$(git rev-parse HEAD:./src/deps/libdeflate)
|
||||
|
||||
rm -rf src/generated_versions_list.zig
|
||||
echo "// AUTO-GENERATED FILE. Created via .scripts/write-versions.sh" >src/generated_versions_list.zig
|
||||
echo "" >>src/generated_versions_list.zig
|
||||
echo "pub const boringssl = \"$BORINGSSL_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const libarchive = \"$LIBARCHIVE_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const mimalloc = \"$MIMALLOC_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const picohttpparser = \"$PICOHTTPPARSER_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const webkit = \"$WEBKIT_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const zig = @import(\"std\").fmt.comptimePrint(\"{}\", .{@import(\"builtin\").zig_version});" >>src/generated_versions_list.zig
|
||||
echo "pub const zlib = \"$ZLIB_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const tinycc = \"$TINYCC\";" >>src/generated_versions_list.zig
|
||||
echo "pub const lolhtml = \"$LOLHTML\";" >>src/generated_versions_list.zig
|
||||
echo "pub const c_ares = \"$C_ARES\";" >>src/generated_versions_list.zig
|
||||
echo "pub const libdeflate = \"$LIBDEFLATE\";" >>src/generated_versions_list.zig
|
||||
echo "pub const zstd = \"$ZSTD\";" >>src/generated_versions_list.zig
|
||||
echo "pub const lshpack = \"$LSHPACK\";" >>src/generated_versions_list.zig
|
||||
echo "" >>src/generated_versions_list.zig
|
||||
|
||||
zig fmt src/generated_versions_list.zig
|
||||
@@ -159,7 +159,7 @@ pub inline fn mimalloc_cleanup(force: bool) void {
|
||||
Mimalloc.mi_collect(force);
|
||||
}
|
||||
}
|
||||
pub const versions = @import("./generated_versions_list.zig");
|
||||
// Versions are now handled by CMake-generated header (bun_dependency_versions.h)
|
||||
|
||||
// Enabling huge pages slows down bun by 8x or so
|
||||
// Keeping this code for:
|
||||
|
||||
@@ -18,7 +18,7 @@ pub fn deinit(this: *HTMLScanner) void {
|
||||
for (this.import_records.slice()) |*record| {
|
||||
this.allocator.free(record.path.text);
|
||||
}
|
||||
this.import_records.deinitWithAllocator(this.allocator);
|
||||
this.import_records.deinit(this.allocator);
|
||||
}
|
||||
|
||||
fn createImportRecord(this: *HTMLScanner, input_path: []const u8, kind: ImportKind) !void {
|
||||
@@ -44,7 +44,7 @@ fn createImportRecord(this: *HTMLScanner, input_path: []const u8, kind: ImportKi
|
||||
.range = logger.Range.None,
|
||||
};
|
||||
|
||||
try this.import_records.push(this.allocator, record);
|
||||
try this.import_records.append(this.allocator, record);
|
||||
}
|
||||
|
||||
const debug = bun.Output.scoped(.HTMLScanner, .hidden);
|
||||
@@ -222,7 +222,7 @@ pub fn HTMLProcessor(
|
||||
var builder = lol.HTMLRewriter.Builder.init();
|
||||
defer builder.deinit();
|
||||
|
||||
var selectors: std.BoundedArray(*lol.HTMLSelector, tag_handlers.len + if (visit_document_tags) 3 else 0) = .{};
|
||||
var selectors: bun.BoundedArray(*lol.HTMLSelector, tag_handlers.len + if (visit_document_tags) 3 else 0) = .{};
|
||||
defer for (selectors.slice()) |selector| {
|
||||
selector.deinit();
|
||||
};
|
||||
|
||||
@@ -44,11 +44,20 @@ pub const StandaloneModuleGraph = struct {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn isBunStandaloneFilePath(str: []const u8) bool {
|
||||
pub fn isBunStandaloneFilePathCanonicalized(str: []const u8) bool {
|
||||
return bun.strings.hasPrefixComptime(str, base_path) or
|
||||
(Environment.isWindows and bun.strings.hasPrefixComptime(str, base_public_path));
|
||||
}
|
||||
|
||||
pub fn isBunStandaloneFilePath(str: []const u8) bool {
|
||||
if (Environment.isWindows) {
|
||||
// On Windows, remove NT path prefixes before checking
|
||||
const canonicalized = strings.withoutNTPrefix(u8, str);
|
||||
return isBunStandaloneFilePathCanonicalized(canonicalized);
|
||||
}
|
||||
return isBunStandaloneFilePathCanonicalized(str);
|
||||
}
|
||||
|
||||
pub fn entryPoint(this: *const StandaloneModuleGraph) *File {
|
||||
return &this.files.values()[this.entry_point_id];
|
||||
}
|
||||
@@ -980,27 +989,54 @@ pub const StandaloneModuleGraph = struct {
|
||||
}
|
||||
|
||||
if (Environment.isWindows) {
|
||||
var outfile_buf: bun.OSPathBuffer = undefined;
|
||||
const outfile_slice = brk: {
|
||||
const outfile_w = bun.strings.toWPathNormalized(&outfile_buf, std.fs.path.basenameWindows(outfile));
|
||||
bun.assert(outfile_w.ptr == &outfile_buf);
|
||||
const outfile_buf_u16 = bun.reinterpretSlice(u16, &outfile_buf);
|
||||
outfile_buf_u16[outfile_w.len] = 0;
|
||||
break :brk outfile_buf_u16[0..outfile_w.len :0];
|
||||
// Get the current path of the temp file
|
||||
var temp_buf: bun.PathBuffer = undefined;
|
||||
const temp_path = bun.getFdPath(fd, &temp_buf) catch |err| {
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "Failed to get temp file path: {s}", .{@errorName(err)}) catch "Failed to get temp file path");
|
||||
};
|
||||
|
||||
bun.windows.moveOpenedFileAtLoose(fd, .fromStdDir(root_dir), outfile_slice, true).unwrap() catch |err| {
|
||||
_ = bun.windows.deleteOpenedFile(fd);
|
||||
if (err == error.EISDIR) {
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "{s} is a directory. Please choose a different --outfile or delete the directory", .{outfile}) catch "outfile is a directory");
|
||||
} else {
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "failed to move executable to result path: {s}", .{@errorName(err)}) catch "failed to move executable");
|
||||
}
|
||||
// Build the absolute destination path
|
||||
// On Windows, we need an absolute path for MoveFileExW
|
||||
// Get the current working directory and join with outfile
|
||||
var cwd_buf: bun.PathBuffer = undefined;
|
||||
const cwd_path = bun.getcwd(&cwd_buf) catch |err| {
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "Failed to get current directory: {s}", .{@errorName(err)}) catch "Failed to get current directory");
|
||||
};
|
||||
const dest_path = if (std.fs.path.isAbsolute(outfile))
|
||||
outfile
|
||||
else
|
||||
bun.path.joinAbsString(cwd_path, &[_][]const u8{outfile}, .auto);
|
||||
|
||||
// Convert paths to Windows UTF-16
|
||||
var temp_buf_w: bun.OSPathBuffer = undefined;
|
||||
var dest_buf_w: bun.OSPathBuffer = undefined;
|
||||
const temp_w = bun.strings.toWPathNormalized(&temp_buf_w, temp_path);
|
||||
const dest_w = bun.strings.toWPathNormalized(&dest_buf_w, dest_path);
|
||||
|
||||
// Ensure null termination
|
||||
const temp_buf_u16 = bun.reinterpretSlice(u16, &temp_buf_w);
|
||||
const dest_buf_u16 = bun.reinterpretSlice(u16, &dest_buf_w);
|
||||
temp_buf_u16[temp_w.len] = 0;
|
||||
dest_buf_u16[dest_w.len] = 0;
|
||||
|
||||
// Close the file handle before moving (Windows requires this)
|
||||
fd.close();
|
||||
fd = bun.invalid_fd;
|
||||
|
||||
// Move the file using MoveFileExW
|
||||
if (bun.windows.kernel32.MoveFileExW(temp_buf_u16[0..temp_w.len :0].ptr, dest_buf_u16[0..dest_w.len :0].ptr, bun.windows.MOVEFILE_COPY_ALLOWED | bun.windows.MOVEFILE_REPLACE_EXISTING | bun.windows.MOVEFILE_WRITE_THROUGH) == bun.windows.FALSE) {
|
||||
const err = bun.windows.Win32Error.get();
|
||||
if (err.toSystemErrno()) |sys_err| {
|
||||
if (sys_err == .EISDIR) {
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "{s} is a directory. Please choose a different --outfile or delete the directory", .{outfile}) catch "outfile is a directory");
|
||||
} else {
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "failed to move executable to {s}: {s}", .{ dest_path, @tagName(sys_err) }) catch "failed to move executable");
|
||||
}
|
||||
} else {
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "failed to move executable to {s}", .{dest_path}) catch "failed to move executable");
|
||||
}
|
||||
}
|
||||
|
||||
// Set Windows icon and/or metadata using unified function
|
||||
if (windows_options.icon != null or
|
||||
windows_options.title != null or
|
||||
@@ -1009,25 +1045,9 @@ pub const StandaloneModuleGraph = struct {
|
||||
windows_options.description != null or
|
||||
windows_options.copyright != null)
|
||||
{
|
||||
// Need to get the full path to the executable
|
||||
var full_path_buf: bun.OSPathBuffer = undefined;
|
||||
const full_path = brk: {
|
||||
// Get the directory path
|
||||
var dir_buf: bun.PathBuffer = undefined;
|
||||
const dir_path = bun.getFdPath(bun.FD.fromStdDir(root_dir), &dir_buf) catch |err| {
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "Failed to get directory path: {s}", .{@errorName(err)}) catch "Failed to get directory path");
|
||||
};
|
||||
|
||||
// Join with the outfile name
|
||||
const full_path_str = bun.path.joinAbsString(dir_path, &[_][]const u8{outfile}, .auto);
|
||||
const full_path_w = bun.strings.toWPathNormalized(&full_path_buf, full_path_str);
|
||||
const buf_u16 = bun.reinterpretSlice(u16, &full_path_buf);
|
||||
buf_u16[full_path_w.len] = 0;
|
||||
break :brk buf_u16[0..full_path_w.len :0];
|
||||
};
|
||||
|
||||
// The file has been moved to dest_path
|
||||
bun.windows.rescle.setWindowsMetadata(
|
||||
full_path.ptr,
|
||||
dest_buf_u16[0..dest_w.len :0].ptr,
|
||||
windows_options.icon,
|
||||
windows_options.title,
|
||||
windows_options.publisher,
|
||||
|
||||
@@ -3,11 +3,16 @@ pub const z_allocator = basic.z_allocator;
|
||||
pub const freeWithoutSize = basic.freeWithoutSize;
|
||||
pub const mimalloc = @import("./allocators/mimalloc.zig");
|
||||
pub const MimallocArena = @import("./allocators/MimallocArena.zig");
|
||||
pub const AllocationScope = @import("./allocators/AllocationScope.zig");
|
||||
|
||||
pub const allocation_scope = @import("./allocators/allocation_scope.zig");
|
||||
pub const AllocationScope = allocation_scope.AllocationScope;
|
||||
pub const AllocationScopeIn = allocation_scope.AllocationScopeIn;
|
||||
|
||||
pub const NullableAllocator = @import("./allocators/NullableAllocator.zig");
|
||||
pub const MaxHeapAllocator = @import("./allocators/MaxHeapAllocator.zig");
|
||||
pub const MemoryReportingAllocator = @import("./allocators/MemoryReportingAllocator.zig");
|
||||
pub const LinuxMemFdAllocator = @import("./allocators/LinuxMemFdAllocator.zig");
|
||||
pub const MaybeOwned = @import("./allocators/maybe_owned.zig").MaybeOwned;
|
||||
|
||||
pub fn isSliceInBufferT(comptime T: type, slice: []const T, buffer: []const T) bool {
|
||||
return (@intFromPtr(buffer.ptr) <= @intFromPtr(slice.ptr) and
|
||||
@@ -228,7 +233,7 @@ pub fn BSSList(comptime ValueType: type, comptime _count: anytype) type {
|
||||
|
||||
const Self = @This();
|
||||
|
||||
allocator: Allocator,
|
||||
allocator: std.mem.Allocator,
|
||||
mutex: Mutex = .{},
|
||||
head: *OverflowBlock,
|
||||
tail: OverflowBlock,
|
||||
@@ -316,7 +321,7 @@ pub fn BSSStringList(comptime _count: usize, comptime _item_length: usize) type
|
||||
backing_buf: [count * item_length]u8,
|
||||
backing_buf_used: u64,
|
||||
overflow_list: Overflow,
|
||||
allocator: Allocator,
|
||||
allocator: std.mem.Allocator,
|
||||
slice_buf: [count][]const u8,
|
||||
slice_buf_used: u16,
|
||||
mutex: Mutex = .{},
|
||||
@@ -499,7 +504,7 @@ pub fn BSSMap(comptime ValueType: type, comptime count: anytype, comptime store_
|
||||
|
||||
index: IndexMap,
|
||||
overflow_list: Overflow,
|
||||
allocator: Allocator,
|
||||
allocator: std.mem.Allocator,
|
||||
mutex: Mutex = .{},
|
||||
backing_buf: [count]ValueType,
|
||||
backing_buf_used: u16,
|
||||
@@ -770,36 +775,119 @@ pub fn BSSMap(comptime ValueType: type, comptime count: anytype, comptime store_
|
||||
};
|
||||
}
|
||||
|
||||
pub fn isDefault(allocator: Allocator) bool {
|
||||
/// Checks whether `allocator` is the default allocator.
|
||||
pub fn isDefault(allocator: std.mem.Allocator) bool {
|
||||
return allocator.vtable == c_allocator.vtable;
|
||||
}
|
||||
|
||||
/// Allocate memory for a value of type `T` using the provided allocator, and initialize the memory
|
||||
/// with `value`.
|
||||
///
|
||||
/// If `allocator` is `bun.default_allocator`, this will internally use `bun.tryNew` to benefit from
|
||||
/// the added assertions.
|
||||
pub fn create(comptime T: type, allocator: Allocator, value: T) OOM!*T {
|
||||
if ((comptime Environment.allow_assert) and isDefault(allocator)) {
|
||||
return bun.tryNew(T, value);
|
||||
}
|
||||
const ptr = try allocator.create(T);
|
||||
ptr.* = value;
|
||||
return ptr;
|
||||
// The following functions operate on generic allocators. A generic allocator is a type that
|
||||
// satisfies the `GenericAllocator` interface:
|
||||
//
|
||||
// ```
|
||||
// const GenericAllocator = struct {
|
||||
// // Required.
|
||||
// pub fn allocator(self: Self) std.mem.Allocator;
|
||||
//
|
||||
// // Optional, to allow default-initialization. `.{}` will also be tried.
|
||||
// pub fn init() Self;
|
||||
//
|
||||
// // Optional, if this allocator owns auxiliary resources that need to be deinitialized.
|
||||
// pub fn deinit(self: *Self) void;
|
||||
//
|
||||
// // Optional. Defining a borrowed type makes it clear who owns the allocator and prevents
|
||||
// // `deinit` from being called twice.
|
||||
// pub const Borrowed: type;
|
||||
// pub fn borrow(self: Self) Borrowed;
|
||||
// };
|
||||
// ```
|
||||
//
|
||||
// Generic allocators must support being moved. They cannot contain self-references, and they cannot
|
||||
// serve allocations from a buffer that exists within the allocator itself (have your allocator type
|
||||
// contain a pointer to the buffer instead).
|
||||
//
|
||||
// As an exception, `std.mem.Allocator` is also treated as a generic allocator, and receives
|
||||
// special handling in the following functions to achieve this.
|
||||
|
||||
/// Gets the `std.mem.Allocator` for a given generic allocator.
|
||||
pub fn asStd(allocator: anytype) std.mem.Allocator {
|
||||
return if (comptime @TypeOf(allocator) == std.mem.Allocator)
|
||||
allocator
|
||||
else
|
||||
allocator.allocator();
|
||||
}
|
||||
|
||||
/// Free memory previously allocated by `create`.
|
||||
/// A borrowed version of an allocator.
|
||||
///
|
||||
/// The memory must have been allocated by the `create` function in this namespace, not
|
||||
/// directly by `allocator.create`.
|
||||
pub fn destroy(allocator: Allocator, ptr: anytype) void {
|
||||
if ((comptime Environment.allow_assert) and isDefault(allocator)) {
|
||||
bun.destroy(ptr);
|
||||
} else {
|
||||
allocator.destroy(ptr);
|
||||
}
|
||||
/// Some allocators have a `deinit` method that would be invalid to call multiple times (e.g.,
|
||||
/// `AllocationScope` and `MimallocArena`).
|
||||
///
|
||||
/// If multiple structs or functions need access to the same allocator, we want to avoid simply
|
||||
/// passing the allocator by value, as this could easily lead to `deinit` being called multiple
|
||||
/// times if we forget who really owns the allocator.
|
||||
///
|
||||
/// Passing a pointer is not always a good approach, as this results in a performance penalty for
|
||||
/// zero-sized allocators, and adds another level of indirection in all cases.
|
||||
///
|
||||
/// This function allows allocators that have a concept of being "owned" to define a "borrowed"
|
||||
/// version of the allocator. If no such type is defined, it is assumed the allocator does not
|
||||
/// own any data, and `Borrowed(Allocator)` is simply the same as `Allocator`.
|
||||
pub fn Borrowed(comptime Allocator: type) type {
|
||||
return if (comptime @hasDecl(Allocator, "Borrowed"))
|
||||
Allocator.Borrowed
|
||||
else
|
||||
Allocator;
|
||||
}
|
||||
|
||||
/// Borrows an allocator.
|
||||
///
|
||||
/// See `Borrowed` for the rationale.
|
||||
pub fn borrow(allocator: anytype) Borrowed(@TypeOf(allocator)) {
|
||||
return if (comptime @hasDecl(@TypeOf(allocator), "Borrowed"))
|
||||
allocator.borrow()
|
||||
else
|
||||
allocator;
|
||||
}
|
||||
|
||||
/// A type that behaves like `?Allocator`. This function will either return `?Allocator` itself,
|
||||
/// or an optimized type that behaves like `?Allocator`.
|
||||
///
|
||||
/// Use `initNullable` and `unpackNullable` to work with the returned type.
|
||||
pub fn Nullable(comptime Allocator: type) type {
|
||||
return if (comptime Allocator == std.mem.Allocator)
|
||||
NullableAllocator
|
||||
else if (comptime @hasDecl(Allocator, "Nullable"))
|
||||
Allocator.Nullable
|
||||
else
|
||||
?Allocator;
|
||||
}
|
||||
|
||||
/// Creates a `Nullable(Allocator)` from an optional `Allocator`.
|
||||
pub fn initNullable(comptime Allocator: type, allocator: ?Allocator) Nullable(Allocator) {
|
||||
return if (comptime Allocator == std.mem.Allocator or @hasDecl(Allocator, "Nullable"))
|
||||
.init(allocator)
|
||||
else
|
||||
allocator;
|
||||
}
|
||||
|
||||
/// Turns a `Nullable(Allocator)` back into an optional `Allocator`.
|
||||
pub fn unpackNullable(comptime Allocator: type, allocator: Nullable(Allocator)) ?Allocator {
|
||||
return if (comptime Allocator == std.mem.Allocator or @hasDecl(Allocator, "Nullable"))
|
||||
.get()
|
||||
else
|
||||
allocator;
|
||||
}
|
||||
|
||||
/// The default allocator. This is a zero-sized type whose `allocator` method returns
|
||||
/// `bun.default_allocator`.
|
||||
///
|
||||
/// This type is a `GenericAllocator`; see `src/allocators.zig`.
|
||||
pub const Default = struct {
|
||||
pub fn allocator(self: Default) std.mem.Allocator {
|
||||
_ = self;
|
||||
return c_allocator;
|
||||
}
|
||||
};
|
||||
|
||||
const basic = if (bun.use_mimalloc)
|
||||
@import("./allocators/basic.zig")
|
||||
else
|
||||
@@ -807,7 +895,6 @@ else
|
||||
|
||||
const Environment = @import("./env.zig");
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const bun = @import("bun");
|
||||
const OOM = bun.OOM;
|
||||
|
||||
@@ -1,288 +0,0 @@
|
||||
//! AllocationScope wraps another allocator, providing leak and invalid free assertions.
|
||||
//! It also allows measuring how much memory a scope has allocated.
|
||||
//!
|
||||
//! AllocationScope is conceptually a pointer, so it can be moved without invalidating allocations.
|
||||
//! Therefore, it isn't necessary to pass an AllocationScope by pointer.
|
||||
|
||||
const Self = @This();
|
||||
|
||||
pub const enabled = bun.Environment.enableAllocScopes;
|
||||
|
||||
internal_state: if (enabled) *State else Allocator,
|
||||
|
||||
const State = struct {
|
||||
parent: Allocator,
|
||||
mutex: bun.Mutex,
|
||||
total_memory_allocated: usize,
|
||||
allocations: std.AutoHashMapUnmanaged([*]const u8, Allocation),
|
||||
frees: std.AutoArrayHashMapUnmanaged([*]const u8, Free),
|
||||
/// Once `frees` fills up, entries are overwritten from start to end.
|
||||
free_overwrite_index: std.math.IntFittingRange(0, max_free_tracking + 1),
|
||||
};
|
||||
|
||||
pub const max_free_tracking = 2048 - 1;
|
||||
|
||||
pub const Allocation = struct {
|
||||
allocated_at: StoredTrace,
|
||||
len: usize,
|
||||
extra: Extra,
|
||||
};
|
||||
|
||||
pub const Free = struct {
|
||||
allocated_at: StoredTrace,
|
||||
freed_at: StoredTrace,
|
||||
};
|
||||
|
||||
pub const Extra = union(enum) {
|
||||
none,
|
||||
ref_count: *RefCountDebugData(false),
|
||||
ref_count_threadsafe: *RefCountDebugData(true),
|
||||
|
||||
const RefCountDebugData = @import("../ptr/ref_count.zig").DebugData;
|
||||
};
|
||||
|
||||
pub fn init(parent_alloc: Allocator) Self {
|
||||
const state = if (comptime enabled)
|
||||
bun.new(State, .{
|
||||
.parent = parent_alloc,
|
||||
.total_memory_allocated = 0,
|
||||
.allocations = .empty,
|
||||
.frees = .empty,
|
||||
.free_overwrite_index = 0,
|
||||
.mutex = .{},
|
||||
})
|
||||
else
|
||||
parent_alloc;
|
||||
return .{ .internal_state = state };
|
||||
}
|
||||
|
||||
pub fn deinit(scope: Self) void {
|
||||
if (comptime !enabled) return;
|
||||
|
||||
const state = scope.internal_state;
|
||||
state.mutex.lock();
|
||||
defer bun.destroy(state);
|
||||
defer state.allocations.deinit(state.parent);
|
||||
const count = state.allocations.count();
|
||||
if (count == 0) return;
|
||||
Output.errGeneric("Allocation scope leaked {d} allocations ({})", .{
|
||||
count,
|
||||
bun.fmt.size(state.total_memory_allocated, .{}),
|
||||
});
|
||||
var it = state.allocations.iterator();
|
||||
var n: usize = 0;
|
||||
while (it.next()) |entry| {
|
||||
Output.prettyErrorln("- {any}, len {d}, at:", .{ entry.key_ptr.*, entry.value_ptr.len });
|
||||
bun.crash_handler.dumpStackTrace(entry.value_ptr.allocated_at.trace(), trace_limits);
|
||||
|
||||
switch (entry.value_ptr.extra) {
|
||||
.none => {},
|
||||
inline else => |t| t.onAllocationLeak(@constCast(entry.key_ptr.*[0..entry.value_ptr.len])),
|
||||
}
|
||||
|
||||
n += 1;
|
||||
if (n >= 8) {
|
||||
Output.prettyErrorln("(only showing first 10 leaks)", .{});
|
||||
break;
|
||||
}
|
||||
}
|
||||
Output.panic("Allocation scope leaked {}", .{bun.fmt.size(state.total_memory_allocated, .{})});
|
||||
}
|
||||
|
||||
pub fn allocator(scope: Self) Allocator {
|
||||
const state = scope.internal_state;
|
||||
return if (comptime enabled) .{ .ptr = state, .vtable = &vtable } else state;
|
||||
}
|
||||
|
||||
pub fn parent(scope: Self) Allocator {
|
||||
const state = scope.internal_state;
|
||||
return if (comptime enabled) state.parent else state;
|
||||
}
|
||||
|
||||
pub fn total(self: Self) usize {
|
||||
if (comptime !enabled) @compileError("AllocationScope must be enabled");
|
||||
return self.internal_state.total_memory_allocated;
|
||||
}
|
||||
|
||||
pub fn numAllocations(self: Self) usize {
|
||||
if (comptime !enabled) @compileError("AllocationScope must be enabled");
|
||||
return self.internal_state.allocations.count();
|
||||
}
|
||||
|
||||
const vtable: Allocator.VTable = .{
|
||||
.alloc = alloc,
|
||||
.resize = &std.mem.Allocator.noResize,
|
||||
.remap = &std.mem.Allocator.noRemap,
|
||||
.free = free,
|
||||
};
|
||||
|
||||
// Smaller traces since AllocationScope prints so many
|
||||
pub const trace_limits: bun.crash_handler.WriteStackTraceLimits = .{
|
||||
.frame_count = 6,
|
||||
.stop_at_jsc_llint = true,
|
||||
.skip_stdlib = true,
|
||||
};
|
||||
pub const free_trace_limits: bun.crash_handler.WriteStackTraceLimits = .{
|
||||
.frame_count = 3,
|
||||
.stop_at_jsc_llint = true,
|
||||
.skip_stdlib = true,
|
||||
};
|
||||
|
||||
fn alloc(ctx: *anyopaque, len: usize, alignment: std.mem.Alignment, ret_addr: usize) ?[*]u8 {
|
||||
const state: *State = @ptrCast(@alignCast(ctx));
|
||||
|
||||
state.mutex.lock();
|
||||
defer state.mutex.unlock();
|
||||
state.allocations.ensureUnusedCapacity(state.parent, 1) catch
|
||||
return null;
|
||||
const result = state.parent.vtable.alloc(state.parent.ptr, len, alignment, ret_addr) orelse
|
||||
return null;
|
||||
trackAllocationAssumeCapacity(state, result[0..len], ret_addr, .none);
|
||||
return result;
|
||||
}
|
||||
|
||||
fn trackAllocationAssumeCapacity(state: *State, buf: []const u8, ret_addr: usize, extra: Extra) void {
|
||||
const trace = StoredTrace.capture(ret_addr);
|
||||
state.allocations.putAssumeCapacityNoClobber(buf.ptr, .{
|
||||
.allocated_at = trace,
|
||||
.len = buf.len,
|
||||
.extra = extra,
|
||||
});
|
||||
state.total_memory_allocated += buf.len;
|
||||
}
|
||||
|
||||
fn free(ctx: *anyopaque, buf: []u8, alignment: std.mem.Alignment, ret_addr: usize) void {
|
||||
const state: *State = @ptrCast(@alignCast(ctx));
|
||||
state.mutex.lock();
|
||||
defer state.mutex.unlock();
|
||||
const invalid = trackFreeAssumeLocked(state, buf, ret_addr);
|
||||
|
||||
state.parent.vtable.free(state.parent.ptr, buf, alignment, ret_addr);
|
||||
|
||||
// If asan did not catch the free, panic now.
|
||||
if (invalid) @panic("Invalid free");
|
||||
}
|
||||
|
||||
fn trackFreeAssumeLocked(state: *State, buf: []const u8, ret_addr: usize) bool {
|
||||
if (state.allocations.fetchRemove(buf.ptr)) |entry| {
|
||||
state.total_memory_allocated -= entry.value.len;
|
||||
|
||||
free_entry: {
|
||||
state.frees.put(state.parent, buf.ptr, .{
|
||||
.allocated_at = entry.value.allocated_at,
|
||||
.freed_at = StoredTrace.capture(ret_addr),
|
||||
}) catch break :free_entry;
|
||||
// Store a limited amount of free entries
|
||||
if (state.frees.count() >= max_free_tracking) {
|
||||
const i = state.free_overwrite_index;
|
||||
state.free_overwrite_index = @mod(state.free_overwrite_index + 1, max_free_tracking);
|
||||
state.frees.swapRemoveAt(i);
|
||||
}
|
||||
}
|
||||
return false;
|
||||
} else {
|
||||
bun.Output.errGeneric("Invalid free, pointer {any}, len {d}", .{ buf.ptr, buf.len });
|
||||
|
||||
if (state.frees.get(buf.ptr)) |free_entry_const| {
|
||||
var free_entry = free_entry_const;
|
||||
bun.Output.printErrorln("Pointer allocated here:", .{});
|
||||
bun.crash_handler.dumpStackTrace(free_entry.allocated_at.trace(), trace_limits);
|
||||
bun.Output.printErrorln("Pointer first freed here:", .{});
|
||||
bun.crash_handler.dumpStackTrace(free_entry.freed_at.trace(), free_trace_limits);
|
||||
}
|
||||
|
||||
// do not panic because address sanitizer will catch this case better.
|
||||
// the log message is in case there is a situation where address
|
||||
// sanitizer does not catch the invalid free.
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn assertOwned(scope: Self, ptr: anytype) void {
|
||||
if (comptime !enabled) return;
|
||||
const cast_ptr: [*]const u8 = @ptrCast(switch (@typeInfo(@TypeOf(ptr)).pointer.size) {
|
||||
.c, .one, .many => ptr,
|
||||
.slice => if (ptr.len > 0) ptr.ptr else return,
|
||||
});
|
||||
const state = scope.internal_state;
|
||||
state.mutex.lock();
|
||||
defer state.mutex.unlock();
|
||||
_ = state.allocations.getPtr(cast_ptr) orelse
|
||||
@panic("this pointer was not owned by the allocation scope");
|
||||
}
|
||||
|
||||
pub fn assertUnowned(scope: Self, ptr: anytype) void {
|
||||
if (comptime !enabled) return;
|
||||
const cast_ptr: [*]const u8 = @ptrCast(switch (@typeInfo(@TypeOf(ptr)).pointer.size) {
|
||||
.c, .one, .many => ptr,
|
||||
.slice => if (ptr.len > 0) ptr.ptr else return,
|
||||
});
|
||||
const state = scope.internal_state;
|
||||
state.mutex.lock();
|
||||
defer state.mutex.unlock();
|
||||
if (state.allocations.getPtr(cast_ptr)) |owned| {
|
||||
Output.warn("Owned pointer allocated here:");
|
||||
bun.crash_handler.dumpStackTrace(owned.allocated_at.trace(), trace_limits, trace_limits);
|
||||
}
|
||||
@panic("this pointer was owned by the allocation scope when it was not supposed to be");
|
||||
}
|
||||
|
||||
/// Track an arbitrary pointer. Extra data can be stored in the allocation,
|
||||
/// which will be printed when a leak is detected.
|
||||
pub fn trackExternalAllocation(scope: Self, ptr: []const u8, ret_addr: ?usize, extra: Extra) void {
|
||||
if (comptime !enabled) return;
|
||||
const state = scope.internal_state;
|
||||
state.mutex.lock();
|
||||
defer state.mutex.unlock();
|
||||
bun.handleOom(state.allocations.ensureUnusedCapacity(state.parent, 1));
|
||||
trackAllocationAssumeCapacity(state, ptr, ptr.len, ret_addr orelse @returnAddress(), extra);
|
||||
}
|
||||
|
||||
/// Call when the pointer from `trackExternalAllocation` is freed.
|
||||
/// Returns true if the free was invalid.
|
||||
pub fn trackExternalFree(scope: Self, slice: anytype, ret_addr: ?usize) bool {
|
||||
if (comptime !enabled) return false;
|
||||
const ptr: []const u8 = switch (@typeInfo(@TypeOf(slice))) {
|
||||
.pointer => |p| switch (p.size) {
|
||||
.slice => brk: {
|
||||
if (p.child != u8) @compileError("This function only supports []u8 or [:sentinel]u8 types, you passed in: " ++ @typeName(@TypeOf(slice)));
|
||||
if (p.sentinel_ptr == null) break :brk slice;
|
||||
// Ensure we include the sentinel value
|
||||
break :brk slice[0 .. slice.len + 1];
|
||||
},
|
||||
else => @compileError("This function only supports []u8 or [:sentinel]u8 types, you passed in: " ++ @typeName(@TypeOf(slice))),
|
||||
},
|
||||
else => @compileError("This function only supports []u8 or [:sentinel]u8 types, you passed in: " ++ @typeName(@TypeOf(slice))),
|
||||
};
|
||||
// Empty slice usually means invalid pointer
|
||||
if (ptr.len == 0) return false;
|
||||
const state = scope.internal_state;
|
||||
state.mutex.lock();
|
||||
defer state.mutex.unlock();
|
||||
return trackFreeAssumeLocked(state, ptr, ret_addr orelse @returnAddress());
|
||||
}
|
||||
|
||||
pub fn setPointerExtra(scope: Self, ptr: *anyopaque, extra: Extra) void {
|
||||
if (comptime !enabled) return;
|
||||
const state = scope.internal_state;
|
||||
state.mutex.lock();
|
||||
defer state.mutex.unlock();
|
||||
const allocation = state.allocations.getPtr(ptr) orelse
|
||||
@panic("Pointer not owned by allocation scope");
|
||||
allocation.extra = extra;
|
||||
}
|
||||
|
||||
pub inline fn downcast(a: Allocator) ?Self {
|
||||
return if (enabled and a.vtable == &vtable)
|
||||
.{ .internal_state = @ptrCast(@alignCast(a.ptr)) }
|
||||
else
|
||||
null;
|
||||
}
|
||||
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const bun = @import("bun");
|
||||
const Output = bun.Output;
|
||||
const StoredTrace = bun.crash_handler.StoredTrace;
|
||||
@@ -1,29 +1,104 @@
|
||||
//! This type is a `GenericAllocator`; see `src/allocators.zig`.
|
||||
|
||||
const Self = @This();
|
||||
|
||||
heap: HeapPtr,
|
||||
#heap: if (safety_checks) Owned(*DebugHeap) else *mimalloc.Heap,
|
||||
|
||||
const HeapPtr = if (safety_checks) *DebugHeap else *mimalloc.Heap;
|
||||
/// Uses the default thread-local heap. This type is zero-sized.
|
||||
///
|
||||
/// This type is a `GenericAllocator`; see `src/allocators.zig`.
|
||||
pub const Default = struct {
|
||||
pub fn allocator(self: Default) std.mem.Allocator {
|
||||
_ = self;
|
||||
return Borrowed.getDefault().allocator();
|
||||
}
|
||||
};
|
||||
|
||||
/// Borrowed version of `MimallocArena`, returned by `MimallocArena.borrow`.
|
||||
/// Using this type makes it clear who actually owns the `MimallocArena`, and prevents
|
||||
/// `deinit` from being called twice.
|
||||
///
|
||||
/// This type is a `GenericAllocator`; see `src/allocators.zig`.
|
||||
pub const Borrowed = struct {
|
||||
#heap: BorrowedHeap,
|
||||
|
||||
pub fn allocator(self: Borrowed) std.mem.Allocator {
|
||||
return .{ .ptr = self.#heap, .vtable = &c_allocator_vtable };
|
||||
}
|
||||
|
||||
pub fn getDefault() Borrowed {
|
||||
return .{ .#heap = getThreadHeap() };
|
||||
}
|
||||
|
||||
pub fn gc(self: Borrowed) void {
|
||||
mimalloc.mi_heap_collect(self.getMimallocHeap(), false);
|
||||
}
|
||||
|
||||
pub fn helpCatchMemoryIssues(self: Borrowed) void {
|
||||
if (comptime bun.FeatureFlags.help_catch_memory_issues) {
|
||||
self.gc();
|
||||
bun.mimalloc.mi_collect(false);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ownsPtr(self: Borrowed, ptr: *const anyopaque) bool {
|
||||
return mimalloc.mi_heap_check_owned(self.getMimallocHeap(), ptr);
|
||||
}
|
||||
|
||||
fn fromOpaque(ptr: *anyopaque) Borrowed {
|
||||
return .{ .#heap = @ptrCast(@alignCast(ptr)) };
|
||||
}
|
||||
|
||||
fn getMimallocHeap(self: Borrowed) *mimalloc.Heap {
|
||||
return if (comptime safety_checks) self.#heap.inner else self.#heap;
|
||||
}
|
||||
|
||||
fn assertThreadLock(self: Borrowed) void {
|
||||
if (comptime safety_checks) self.#heap.thread_lock.assertLocked();
|
||||
}
|
||||
|
||||
fn alignedAlloc(self: Borrowed, len: usize, alignment: Alignment) ?[*]u8 {
|
||||
log("Malloc: {d}\n", .{len});
|
||||
|
||||
const heap = self.getMimallocHeap();
|
||||
const ptr: ?*anyopaque = if (mimalloc.mustUseAlignedAlloc(alignment))
|
||||
mimalloc.mi_heap_malloc_aligned(heap, len, alignment.toByteUnits())
|
||||
else
|
||||
mimalloc.mi_heap_malloc(heap, len);
|
||||
|
||||
if (comptime bun.Environment.isDebug) {
|
||||
const usable = mimalloc.mi_malloc_usable_size(ptr);
|
||||
if (usable < len) {
|
||||
std.debug.panic("mimalloc: allocated size is too small: {d} < {d}", .{ usable, len });
|
||||
}
|
||||
}
|
||||
|
||||
return if (ptr) |p|
|
||||
@as([*]u8, @ptrCast(p))
|
||||
else
|
||||
null;
|
||||
}
|
||||
|
||||
pub fn downcast(std_alloc: std.mem.Allocator) Borrowed {
|
||||
bun.assertf(
|
||||
isInstance(std_alloc),
|
||||
"not a MimallocArena (vtable is {*})",
|
||||
.{std_alloc.vtable},
|
||||
);
|
||||
return .fromOpaque(std_alloc.ptr);
|
||||
}
|
||||
};
|
||||
|
||||
const BorrowedHeap = if (safety_checks) *DebugHeap else *mimalloc.Heap;
|
||||
|
||||
const DebugHeap = struct {
|
||||
inner: *mimalloc.Heap,
|
||||
thread_lock: bun.safety.ThreadLock,
|
||||
};
|
||||
|
||||
fn getMimallocHeap(self: Self) *mimalloc.Heap {
|
||||
return if (comptime safety_checks) self.heap.inner else self.heap;
|
||||
}
|
||||
|
||||
fn fromOpaque(ptr: *anyopaque) Self {
|
||||
return .{ .heap = bun.cast(HeapPtr, ptr) };
|
||||
}
|
||||
|
||||
fn assertThreadLock(self: Self) void {
|
||||
if (comptime safety_checks) self.heap.thread_lock.assertLocked();
|
||||
}
|
||||
|
||||
threadlocal var thread_heap: if (safety_checks) ?DebugHeap else void = if (safety_checks) null;
|
||||
|
||||
fn getThreadHeap() HeapPtr {
|
||||
fn getThreadHeap() BorrowedHeap {
|
||||
if (comptime !safety_checks) return mimalloc.mi_heap_get_default();
|
||||
if (thread_heap == null) {
|
||||
thread_heap = .{
|
||||
@@ -36,23 +111,27 @@ fn getThreadHeap() HeapPtr {
|
||||
|
||||
const log = bun.Output.scoped(.mimalloc, .hidden);
|
||||
|
||||
pub fn allocator(self: Self) std.mem.Allocator {
|
||||
return self.borrow().allocator();
|
||||
}
|
||||
|
||||
pub fn borrow(self: Self) Borrowed {
|
||||
return .{ .#heap = if (comptime safety_checks) self.#heap.get() else self.#heap };
|
||||
}
|
||||
|
||||
/// Internally, mimalloc calls mi_heap_get_default()
|
||||
/// to get the default heap.
|
||||
/// It uses pthread_getspecific to do that.
|
||||
/// We can save those extra calls if we just do it once in here
|
||||
pub fn getThreadLocalDefault() Allocator {
|
||||
return Allocator{ .ptr = getThreadHeap(), .vtable = &c_allocator_vtable };
|
||||
pub fn getThreadLocalDefault() std.mem.Allocator {
|
||||
return Borrowed.getDefault().allocator();
|
||||
}
|
||||
|
||||
pub fn backingAllocator(_: Self) Allocator {
|
||||
pub fn backingAllocator(_: Self) std.mem.Allocator {
|
||||
return getThreadLocalDefault();
|
||||
}
|
||||
|
||||
pub fn allocator(self: Self) Allocator {
|
||||
return Allocator{ .ptr = self.heap, .vtable = &c_allocator_vtable };
|
||||
}
|
||||
|
||||
pub fn dumpThreadStats(_: *Self) void {
|
||||
pub fn dumpThreadStats(_: Self) void {
|
||||
const dump_fn = struct {
|
||||
pub fn dump(textZ: [*:0]const u8, _: ?*anyopaque) callconv(.C) void {
|
||||
const text = bun.span(textZ);
|
||||
@@ -63,7 +142,7 @@ pub fn dumpThreadStats(_: *Self) void {
|
||||
bun.Output.flush();
|
||||
}
|
||||
|
||||
pub fn dumpStats(_: *Self) void {
|
||||
pub fn dumpStats(_: Self) void {
|
||||
const dump_fn = struct {
|
||||
pub fn dump(textZ: [*:0]const u8, _: ?*anyopaque) callconv(.C) void {
|
||||
const text = bun.span(textZ);
|
||||
@@ -75,9 +154,9 @@ pub fn dumpStats(_: *Self) void {
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Self) void {
|
||||
const mimalloc_heap = self.getMimallocHeap();
|
||||
const mimalloc_heap = self.borrow().getMimallocHeap();
|
||||
if (comptime safety_checks) {
|
||||
bun.destroy(self.heap);
|
||||
self.#heap.deinit();
|
||||
}
|
||||
mimalloc.mi_heap_destroy(mimalloc_heap);
|
||||
self.* = undefined;
|
||||
@@ -85,70 +164,43 @@ pub fn deinit(self: *Self) void {
|
||||
|
||||
pub fn init() Self {
|
||||
const mimalloc_heap = mimalloc.mi_heap_new() orelse bun.outOfMemory();
|
||||
const heap = if (comptime safety_checks)
|
||||
bun.new(DebugHeap, .{
|
||||
.inner = mimalloc_heap,
|
||||
.thread_lock = .initLocked(),
|
||||
})
|
||||
else
|
||||
mimalloc_heap;
|
||||
return .{ .heap = heap };
|
||||
if (comptime !safety_checks) return .{ .#heap = mimalloc_heap };
|
||||
const heap: Owned(*DebugHeap) = .new(.{
|
||||
.inner = mimalloc_heap,
|
||||
.thread_lock = .initLocked(),
|
||||
});
|
||||
return .{ .#heap = heap };
|
||||
}
|
||||
|
||||
pub fn gc(self: Self) void {
|
||||
mimalloc.mi_heap_collect(self.getMimallocHeap(), false);
|
||||
self.borrow().gc();
|
||||
}
|
||||
|
||||
pub inline fn helpCatchMemoryIssues(self: Self) void {
|
||||
if (comptime bun.FeatureFlags.help_catch_memory_issues) {
|
||||
self.gc();
|
||||
bun.mimalloc.mi_collect(false);
|
||||
}
|
||||
pub fn helpCatchMemoryIssues(self: Self) void {
|
||||
self.borrow().helpCatchMemoryIssues();
|
||||
}
|
||||
|
||||
pub fn ownsPtr(self: Self, ptr: *const anyopaque) bool {
|
||||
return mimalloc.mi_heap_check_owned(self.getMimallocHeap(), ptr);
|
||||
}
|
||||
|
||||
fn alignedAlloc(self: Self, len: usize, alignment: Alignment) ?[*]u8 {
|
||||
log("Malloc: {d}\n", .{len});
|
||||
|
||||
const heap = self.getMimallocHeap();
|
||||
const ptr: ?*anyopaque = if (mimalloc.mustUseAlignedAlloc(alignment))
|
||||
mimalloc.mi_heap_malloc_aligned(heap, len, alignment.toByteUnits())
|
||||
else
|
||||
mimalloc.mi_heap_malloc(heap, len);
|
||||
|
||||
if (comptime bun.Environment.isDebug) {
|
||||
const usable = mimalloc.mi_malloc_usable_size(ptr);
|
||||
if (usable < len) {
|
||||
std.debug.panic("mimalloc: allocated size is too small: {d} < {d}", .{ usable, len });
|
||||
}
|
||||
}
|
||||
|
||||
return if (ptr) |p|
|
||||
@as([*]u8, @ptrCast(p))
|
||||
else
|
||||
null;
|
||||
return self.borrow().ownsPtr(ptr);
|
||||
}
|
||||
|
||||
fn alignedAllocSize(ptr: [*]u8) usize {
|
||||
return mimalloc.mi_malloc_usable_size(ptr);
|
||||
}
|
||||
|
||||
fn alloc(ptr: *anyopaque, len: usize, alignment: Alignment, _: usize) ?[*]u8 {
|
||||
const self = fromOpaque(ptr);
|
||||
fn vtable_alloc(ptr: *anyopaque, len: usize, alignment: Alignment, _: usize) ?[*]u8 {
|
||||
const self: Borrowed = .fromOpaque(ptr);
|
||||
self.assertThreadLock();
|
||||
return alignedAlloc(self, len, alignment);
|
||||
return self.alignedAlloc(len, alignment);
|
||||
}
|
||||
|
||||
fn resize(ptr: *anyopaque, buf: []u8, _: Alignment, new_len: usize, _: usize) bool {
|
||||
const self = fromOpaque(ptr);
|
||||
fn vtable_resize(ptr: *anyopaque, buf: []u8, _: Alignment, new_len: usize, _: usize) bool {
|
||||
const self: Borrowed = .fromOpaque(ptr);
|
||||
self.assertThreadLock();
|
||||
return mimalloc.mi_expand(buf.ptr, new_len) != null;
|
||||
}
|
||||
|
||||
fn free(
|
||||
fn vtable_free(
|
||||
_: *anyopaque,
|
||||
buf: []u8,
|
||||
alignment: Alignment,
|
||||
@@ -187,8 +239,8 @@ fn free(
|
||||
/// `ret_addr` is optionally provided as the first return address of the
|
||||
/// allocation call stack. If the value is `0` it means no return address
|
||||
/// has been provided.
|
||||
fn remap(ptr: *anyopaque, buf: []u8, alignment: Alignment, new_len: usize, _: usize) ?[*]u8 {
|
||||
const self = fromOpaque(ptr);
|
||||
fn vtable_remap(ptr: *anyopaque, buf: []u8, alignment: Alignment, new_len: usize, _: usize) ?[*]u8 {
|
||||
const self: Borrowed = .fromOpaque(ptr);
|
||||
self.assertThreadLock();
|
||||
const heap = self.getMimallocHeap();
|
||||
const aligned_size = alignment.toByteUnits();
|
||||
@@ -196,23 +248,22 @@ fn remap(ptr: *anyopaque, buf: []u8, alignment: Alignment, new_len: usize, _: us
|
||||
return @ptrCast(value);
|
||||
}
|
||||
|
||||
pub fn isInstance(allocator_: Allocator) bool {
|
||||
return allocator_.vtable == &c_allocator_vtable;
|
||||
pub fn isInstance(alloc: std.mem.Allocator) bool {
|
||||
return alloc.vtable == &c_allocator_vtable;
|
||||
}
|
||||
|
||||
const c_allocator_vtable = Allocator.VTable{
|
||||
.alloc = &Self.alloc,
|
||||
.resize = &Self.resize,
|
||||
.remap = &Self.remap,
|
||||
.free = &Self.free,
|
||||
const c_allocator_vtable = std.mem.Allocator.VTable{
|
||||
.alloc = vtable_alloc,
|
||||
.resize = vtable_resize,
|
||||
.remap = vtable_remap,
|
||||
.free = vtable_free,
|
||||
};
|
||||
|
||||
const std = @import("std");
|
||||
const Alignment = std.mem.Alignment;
|
||||
|
||||
const bun = @import("bun");
|
||||
const assert = bun.assert;
|
||||
const mimalloc = bun.mimalloc;
|
||||
const Owned = bun.ptr.Owned;
|
||||
const safety_checks = bun.Environment.ci_assert;
|
||||
|
||||
const Alignment = std.mem.Alignment;
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
@@ -4,8 +4,7 @@ const NullableAllocator = @This();
|
||||
|
||||
ptr: *anyopaque = undefined,
|
||||
// Utilize the null pointer optimization on the vtable instead of
|
||||
// the regular ptr because some allocator implementations might tag their
|
||||
// `ptr` property.
|
||||
// the regular `ptr` because `ptr` may be undefined.
|
||||
vtable: ?*const std.mem.Allocator.VTable = null,
|
||||
|
||||
pub inline fn init(allocator: ?std.mem.Allocator) NullableAllocator {
|
||||
|
||||
555
src/allocators/allocation_scope.zig
Normal file
555
src/allocators/allocation_scope.zig
Normal file
@@ -0,0 +1,555 @@
|
||||
//! AllocationScope wraps another allocator, providing leak and invalid free assertions.
|
||||
//! It also allows measuring how much memory a scope has allocated.
|
||||
|
||||
const allocation_scope = @This();
|
||||
|
||||
/// An allocation scope with a dynamically typed parent allocator. Prefer using a concrete type,
|
||||
/// like `AllocationScopeIn(bun.DefaultAllocator)`.
|
||||
pub const AllocationScope = AllocationScopeIn(std.mem.Allocator);
|
||||
|
||||
pub const Allocation = struct {
|
||||
allocated_at: StoredTrace,
|
||||
len: usize,
|
||||
extra: Extra,
|
||||
};
|
||||
|
||||
pub const Free = struct {
|
||||
allocated_at: StoredTrace,
|
||||
freed_at: StoredTrace,
|
||||
};
|
||||
|
||||
pub const Extra = struct {
|
||||
ptr: *anyopaque,
|
||||
vtable: ?*const VTable,
|
||||
|
||||
pub const none: Extra = .{ .ptr = undefined, .vtable = null };
|
||||
|
||||
pub const VTable = struct {
|
||||
onAllocationLeak: *const fn (*anyopaque, data: []u8) void,
|
||||
};
|
||||
};
|
||||
|
||||
pub const Stats = struct {
|
||||
total_memory_allocated: usize,
|
||||
num_allocations: usize,
|
||||
};
|
||||
|
||||
pub const FreeError = error{
|
||||
/// Tried to free memory that wasn't allocated by this `AllocationScope`, or was already freed.
|
||||
NotAllocated,
|
||||
};
|
||||
|
||||
pub const enabled = bun.Environment.enableAllocScopes;
|
||||
pub const max_free_tracking = 2048 - 1;
|
||||
|
||||
const History = struct {
|
||||
const Self = @This();
|
||||
|
||||
total_memory_allocated: usize = 0,
|
||||
/// Allocated by `State.parent`.
|
||||
allocations: std.AutoHashMapUnmanaged([*]const u8, Allocation) = .empty,
|
||||
/// Allocated by `State.parent`.
|
||||
frees: std.AutoArrayHashMapUnmanaged([*]const u8, Free) = .empty,
|
||||
/// Once `frees` fills up, entries are overwritten from start to end.
|
||||
free_overwrite_index: std.math.IntFittingRange(0, max_free_tracking + 1) = 0,
|
||||
|
||||
/// `allocator` should be `State.parent`.
|
||||
fn deinit(self: *Self, allocator: std.mem.Allocator) void {
|
||||
self.allocations.deinit(allocator);
|
||||
self.frees.deinit(allocator);
|
||||
self.* = undefined;
|
||||
}
|
||||
};
|
||||
|
||||
const LockedState = struct {
|
||||
const Self = @This();
|
||||
|
||||
/// Should be the same as `State.parent`.
|
||||
parent: std.mem.Allocator,
|
||||
history: *History,
|
||||
|
||||
fn alloc(self: Self, len: usize, alignment: std.mem.Alignment, ret_addr: usize) bun.OOM![*]u8 {
|
||||
const result = self.parent.rawAlloc(len, alignment, ret_addr) orelse
|
||||
return error.OutOfMemory;
|
||||
errdefer self.parent.rawFree(result[0..len], alignment, ret_addr);
|
||||
try self.trackAllocation(result[0..len], ret_addr, .none);
|
||||
return result;
|
||||
}
|
||||
|
||||
fn free(self: Self, buf: []u8, alignment: std.mem.Alignment, ret_addr: usize) void {
|
||||
const success = if (self.trackFree(buf, ret_addr))
|
||||
true
|
||||
else |err| switch (err) {
|
||||
error.NotAllocated => false,
|
||||
};
|
||||
if (success or bun.Environment.enable_asan) {
|
||||
self.parent.rawFree(buf, alignment, ret_addr);
|
||||
}
|
||||
if (!success) {
|
||||
// If asan did not catch the free, panic now.
|
||||
std.debug.panic("Invalid free: {*}", .{buf});
|
||||
}
|
||||
}
|
||||
|
||||
fn assertOwned(self: Self, ptr: anytype) void {
|
||||
const cast_ptr: [*]const u8 = @ptrCast(switch (@typeInfo(@TypeOf(ptr)).pointer.size) {
|
||||
.c, .one, .many => ptr,
|
||||
.slice => if (ptr.len > 0) ptr.ptr else return,
|
||||
});
|
||||
if (!self.history.allocations.contains(cast_ptr)) {
|
||||
@panic("this pointer was not owned by the allocation scope");
|
||||
}
|
||||
}
|
||||
|
||||
fn assertUnowned(self: Self, ptr: anytype) void {
|
||||
const cast_ptr: [*]const u8 = @ptrCast(switch (@typeInfo(@TypeOf(ptr)).pointer.size) {
|
||||
.c, .one, .many => ptr,
|
||||
.slice => if (ptr.len > 0) ptr.ptr else return,
|
||||
});
|
||||
if (self.history.allocations.getPtr(cast_ptr)) |owned| {
|
||||
Output.warn("Owned pointer allocated here:");
|
||||
bun.crash_handler.dumpStackTrace(
|
||||
owned.allocated_at.trace(),
|
||||
trace_limits,
|
||||
trace_limits,
|
||||
);
|
||||
@panic("this pointer was owned by the allocation scope when it was not supposed to be");
|
||||
}
|
||||
}
|
||||
|
||||
fn trackAllocation(self: Self, buf: []const u8, ret_addr: usize, extra: Extra) bun.OOM!void {
|
||||
const trace = StoredTrace.capture(ret_addr);
|
||||
try self.history.allocations.putNoClobber(self.parent, buf.ptr, .{
|
||||
.allocated_at = trace,
|
||||
.len = buf.len,
|
||||
.extra = extra,
|
||||
});
|
||||
self.history.total_memory_allocated += buf.len;
|
||||
}
|
||||
|
||||
fn trackFree(self: Self, buf: []const u8, ret_addr: usize) FreeError!void {
|
||||
const entry = self.history.allocations.fetchRemove(buf.ptr) orelse {
|
||||
Output.errGeneric("Invalid free, pointer {any}, len {d}", .{ buf.ptr, buf.len });
|
||||
|
||||
if (self.history.frees.getPtr(buf.ptr)) |free_entry| {
|
||||
Output.printErrorln("Pointer allocated here:", .{});
|
||||
bun.crash_handler.dumpStackTrace(free_entry.allocated_at.trace(), trace_limits);
|
||||
Output.printErrorln("Pointer first freed here:", .{});
|
||||
bun.crash_handler.dumpStackTrace(free_entry.freed_at.trace(), free_trace_limits);
|
||||
}
|
||||
|
||||
// do not panic because address sanitizer will catch this case better.
|
||||
// the log message is in case there is a situation where address
|
||||
// sanitizer does not catch the invalid free.
|
||||
return error.NotAllocated;
|
||||
};
|
||||
|
||||
self.history.total_memory_allocated -= entry.value.len;
|
||||
|
||||
// Store a limited amount of free entries
|
||||
if (self.history.frees.count() >= max_free_tracking) {
|
||||
const i = self.history.free_overwrite_index;
|
||||
self.history.free_overwrite_index =
|
||||
@mod(self.history.free_overwrite_index + 1, max_free_tracking);
|
||||
self.history.frees.swapRemoveAt(i);
|
||||
}
|
||||
|
||||
self.history.frees.put(self.parent, buf.ptr, .{
|
||||
.allocated_at = entry.value.allocated_at,
|
||||
.freed_at = StoredTrace.capture(ret_addr),
|
||||
}) catch |err| bun.handleOom(err);
|
||||
}
|
||||
};
|
||||
|
||||
const State = struct {
|
||||
const Self = @This();
|
||||
|
||||
/// This field should not be modified. Therefore, it doesn't need to be protected by the mutex.
|
||||
parent: std.mem.Allocator,
|
||||
history: bun.threading.Guarded(History),
|
||||
|
||||
fn init(parent_alloc: std.mem.Allocator) Self {
|
||||
return .{
|
||||
.parent = parent_alloc,
|
||||
.history = .init(.{}),
|
||||
};
|
||||
}
|
||||
|
||||
fn lock(self: *Self) LockedState {
|
||||
return .{
|
||||
.parent = self.parent,
|
||||
.history = self.history.lock(),
|
||||
};
|
||||
}
|
||||
|
||||
fn unlock(self: *Self) void {
|
||||
self.history.unlock();
|
||||
}
|
||||
|
||||
fn deinit(self: *Self) void {
|
||||
defer self.* = undefined;
|
||||
var history = self.history.intoUnprotected();
|
||||
defer history.deinit();
|
||||
|
||||
const count = history.allocations.count();
|
||||
if (count == 0) return;
|
||||
Output.errGeneric("Allocation scope leaked {d} allocations ({})", .{
|
||||
count,
|
||||
bun.fmt.size(history.total_memory_allocated, .{}),
|
||||
});
|
||||
|
||||
var it = history.allocations.iterator();
|
||||
var n: usize = 0;
|
||||
while (it.next()) |entry| : (n += 1) {
|
||||
if (n >= 10) {
|
||||
Output.prettyErrorln("(only showing first 10 leaks)", .{});
|
||||
break;
|
||||
}
|
||||
Output.prettyErrorln(
|
||||
"- {any}, len {d}, at:",
|
||||
.{ entry.key_ptr.*, entry.value_ptr.len },
|
||||
);
|
||||
bun.crash_handler.dumpStackTrace(
|
||||
entry.value_ptr.allocated_at.trace(),
|
||||
trace_limits,
|
||||
);
|
||||
const extra = entry.value_ptr.extra;
|
||||
if (extra.vtable) |extra_vtable| {
|
||||
extra_vtable.onAllocationLeak(
|
||||
extra.ptr,
|
||||
@constCast(entry.key_ptr.*[0..entry.value_ptr.len]),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Output.panic(
|
||||
"Allocation scope leaked {}",
|
||||
.{bun.fmt.size(history.total_memory_allocated, .{})},
|
||||
);
|
||||
}
|
||||
|
||||
fn trackExternalAllocation(self: *Self, ptr: []const u8, ret_addr: ?usize, extra: Extra) void {
|
||||
const locked = self.lock();
|
||||
defer self.unlock();
|
||||
locked.trackAllocation(ptr, ret_addr orelse @returnAddress(), extra) catch |err|
|
||||
bun.handleOom(err);
|
||||
}
|
||||
|
||||
fn trackExternalFree(self: *Self, slice: anytype, ret_addr: ?usize) FreeError!void {
|
||||
const invalidType = struct {
|
||||
fn invalidType() noreturn {
|
||||
@compileError(std.fmt.comptimePrint(
|
||||
"This function only supports []u8 or [:sentinel]u8 types, you passed in: {s}",
|
||||
.{@typeName(@TypeOf(slice))},
|
||||
));
|
||||
}
|
||||
}.invalidType;
|
||||
|
||||
const ptr: []const u8 = switch (@typeInfo(@TypeOf(slice))) {
|
||||
.pointer => |p| switch (p.size) {
|
||||
.slice => brk: {
|
||||
if (p.child != u8) invalidType();
|
||||
if (p.sentinel_ptr == null) break :brk slice;
|
||||
// Ensure we include the sentinel value
|
||||
break :brk slice[0 .. slice.len + 1];
|
||||
},
|
||||
else => invalidType(),
|
||||
},
|
||||
else => invalidType(),
|
||||
};
|
||||
// Empty slice usually means invalid pointer
|
||||
if (ptr.len == 0) return;
|
||||
const locked = self.lock();
|
||||
defer self.unlock();
|
||||
return locked.trackFree(ptr, ret_addr orelse @returnAddress());
|
||||
}
|
||||
|
||||
fn setPointerExtra(self: *Self, ptr: *anyopaque, extra: Extra) void {
|
||||
const locked = self.lock();
|
||||
defer self.unlock();
|
||||
const allocation = locked.history.allocations.getPtr(@ptrCast(ptr)) orelse
|
||||
@panic("Pointer not owned by allocation scope");
|
||||
allocation.extra = extra;
|
||||
}
|
||||
};
|
||||
|
||||
/// An allocation scope that uses a specific kind of parent allocator.
|
||||
///
|
||||
/// This type is a `GenericAllocator`; see `src/allocators.zig`.
|
||||
pub fn AllocationScopeIn(comptime Allocator: type) type {
|
||||
const BorrowedAllocator = bun.allocators.Borrowed(Allocator);
|
||||
|
||||
// Borrowed version of `AllocationScope`. Access this type as `AllocationScope.Borrowed`.
|
||||
const BorrowedScope = struct {
|
||||
const Self = @This();
|
||||
|
||||
#parent: BorrowedAllocator,
|
||||
#state: if (enabled) *State else void,
|
||||
|
||||
pub fn allocator(self: Self) std.mem.Allocator {
|
||||
return if (comptime enabled)
|
||||
.{ .ptr = self.#state, .vtable = &vtable }
|
||||
else
|
||||
bun.allocators.asStd(self.#parent);
|
||||
}
|
||||
|
||||
pub fn parent(self: Self) BorrowedAllocator {
|
||||
return self.#parent;
|
||||
}
|
||||
|
||||
/// Deinitializes a borrowed allocation scope. This does not deinitialize the
|
||||
/// `AllocationScope` itself; only the owner of the `AllocationScope` should do that.
|
||||
///
|
||||
/// This method doesn't need to be called unless `bun.allocators.Borrowed(Allocator)` has
|
||||
/// a `deinit` method.
|
||||
pub fn deinit(self: *Self) void {
|
||||
bun.memory.deinit(&self.#parent);
|
||||
self.* = undefined;
|
||||
}
|
||||
|
||||
pub fn stats(self: Self) Stats {
|
||||
if (comptime !enabled) @compileError("AllocationScope must be enabled");
|
||||
const state = self.#state.lock();
|
||||
defer self.#state.unlock();
|
||||
return .{
|
||||
.total_memory_allocated = state.history.total_memory_allocated,
|
||||
.num_allocations = state.history.allocations.count(),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn assertOwned(self: Self, ptr: anytype) void {
|
||||
if (comptime !enabled) return;
|
||||
const state = self.#state.lock();
|
||||
defer self.#state.unlock();
|
||||
state.assertOwned(ptr);
|
||||
}
|
||||
|
||||
pub fn assertUnowned(self: Self, ptr: anytype) void {
|
||||
if (comptime !enabled) return;
|
||||
const state = self.#state.lock();
|
||||
defer self.#state.unlock();
|
||||
state.assertUnowned(ptr);
|
||||
}
|
||||
|
||||
pub fn trackExternalAllocation(
|
||||
self: Self,
|
||||
ptr: []const u8,
|
||||
ret_addr: ?usize,
|
||||
extra: Extra,
|
||||
) void {
|
||||
if (comptime enabled) self.#state.trackExternalAllocation(ptr, ret_addr, extra);
|
||||
}
|
||||
|
||||
pub fn trackExternalFree(self: Self, slice: anytype, ret_addr: ?usize) FreeError!void {
|
||||
return if (comptime enabled) self.#state.trackExternalFree(slice, ret_addr);
|
||||
}
|
||||
|
||||
pub fn setPointerExtra(self: Self, ptr: *anyopaque, extra: Extra) void {
|
||||
if (comptime enabled) self.#state.setPointerExtra(ptr, extra);
|
||||
}
|
||||
|
||||
fn downcastImpl(
|
||||
std_alloc: std.mem.Allocator,
|
||||
parent_alloc: if (Allocator == std.mem.Allocator)
|
||||
?BorrowedAllocator
|
||||
else
|
||||
BorrowedAllocator,
|
||||
) Self {
|
||||
const state = if (comptime enabled) blk: {
|
||||
bun.assertf(
|
||||
std_alloc.vtable == &vtable,
|
||||
"allocator is not an allocation scope (has vtable {*})",
|
||||
.{std_alloc.vtable},
|
||||
);
|
||||
const state: *State = @ptrCast(@alignCast(std_alloc.ptr));
|
||||
break :blk state;
|
||||
};
|
||||
|
||||
const current_std_parent = if (comptime enabled)
|
||||
state.parent
|
||||
else
|
||||
std_alloc;
|
||||
|
||||
const new_parent = if (comptime Allocator == std.mem.Allocator)
|
||||
parent_alloc orelse current_std_parent
|
||||
else
|
||||
parent_alloc;
|
||||
|
||||
const new_std_parent = bun.allocators.asStd(new_parent);
|
||||
bun.safety.alloc.assertEqFmt(
|
||||
current_std_parent,
|
||||
new_std_parent,
|
||||
"tried to downcast allocation scope with wrong parent allocator",
|
||||
.{},
|
||||
);
|
||||
return .{ .#parent = new_parent, .#state = state };
|
||||
}
|
||||
|
||||
/// Converts an `std.mem.Allocator` into a borrowed allocation scope, with a given parent
|
||||
/// allocator.
|
||||
///
|
||||
/// Requirements:
|
||||
///
|
||||
/// * `std_alloc` must have come from `AllocationScopeIn(Allocator).allocator` (or the
|
||||
/// equivalent method on a `Borrowed` instance).
|
||||
///
|
||||
/// * `parent_alloc` must be equivalent to the (borrowed) parent allocator of the original
|
||||
/// allocation scope (that is, the return value of `AllocationScopeIn(Allocator).parent`).
|
||||
/// In particular, `bun.allocators.asStd` must return the same value for each allocator.
|
||||
pub fn downcastIn(std_alloc: std.mem.Allocator, parent_alloc: BorrowedAllocator) Self {
|
||||
return downcastImpl(std_alloc, parent_alloc);
|
||||
}
|
||||
|
||||
/// Converts an `std.mem.Allocator` into a borrowed allocation scope.
|
||||
///
|
||||
/// Requirements:
|
||||
///
|
||||
/// * `std_alloc` must have come from `AllocationScopeIn(Allocator).allocator` (or the
|
||||
/// equivalent method on a `Borrowed` instance).
|
||||
///
|
||||
/// * One of the following must be true:
|
||||
///
|
||||
/// 1. `Allocator` is `std.mem.Allocator`.
|
||||
///
|
||||
/// 2. The parent allocator of the original allocation scope is equivalent to a
|
||||
/// default-initialized borrowed `Allocator`, as returned by
|
||||
/// `bun.memory.initDefault(bun.allocators.Borrowed(Allocator))`. This is the case
|
||||
/// for `bun.DefaultAllocator`.
|
||||
pub fn downcast(std_alloc: std.mem.Allocator) Self {
|
||||
return downcastImpl(std_alloc, if (comptime Allocator == std.mem.Allocator)
|
||||
null
|
||||
else
|
||||
bun.memory.initDefault(BorrowedAllocator));
|
||||
}
|
||||
};
|
||||
|
||||
return struct {
|
||||
const Self = @This();
|
||||
|
||||
#parent: Allocator,
|
||||
#state: if (Self.enabled) Owned(*State) else void,
|
||||
|
||||
pub const enabled = allocation_scope.enabled;
|
||||
|
||||
/// Borrowed version of `AllocationScope`, returned by `AllocationScope.borrow`.
|
||||
/// Using this type makes it clear who actually owns the `AllocationScope`, and prevents
|
||||
/// `deinit` from being called twice.
|
||||
///
|
||||
/// This type is a `GenericAllocator`; see `src/allocators.zig`.
|
||||
pub const Borrowed = BorrowedScope;
|
||||
|
||||
pub fn init(parent_alloc: Allocator) Self {
|
||||
return .{
|
||||
.#parent = parent_alloc,
|
||||
.#state = if (comptime Self.enabled) .new(.init(
|
||||
bun.allocators.asStd(parent_alloc),
|
||||
)),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn initDefault() Self {
|
||||
return .init(bun.memory.initDefault(Allocator));
|
||||
}
|
||||
|
||||
/// Borrows this `AllocationScope`. Use this method instead of copying `self`, as that makes
|
||||
/// it hard to know who owns the `AllocationScope`, and could lead to `deinit` being called
|
||||
/// twice.
|
||||
pub fn borrow(self: Self) Borrowed {
|
||||
return .{
|
||||
.#parent = self.parent(),
|
||||
.#state = if (comptime Self.enabled) self.#state.get(),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn allocator(self: Self) std.mem.Allocator {
|
||||
return self.borrow().allocator();
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Self) void {
|
||||
bun.memory.deinit(&self.#parent);
|
||||
if (comptime Self.enabled) self.#state.deinit();
|
||||
self.* = undefined;
|
||||
}
|
||||
|
||||
pub fn parent(self: Self) BorrowedAllocator {
|
||||
return bun.allocators.borrow(self.#parent);
|
||||
}
|
||||
|
||||
pub fn stats(self: Self) Stats {
|
||||
return self.borrow().stats();
|
||||
}
|
||||
|
||||
pub fn assertOwned(self: Self, ptr: anytype) void {
|
||||
self.borrow().assertOwned(ptr);
|
||||
}
|
||||
|
||||
pub fn assertUnowned(self: Self, ptr: anytype) void {
|
||||
self.borrow().assertUnowned(ptr);
|
||||
}
|
||||
|
||||
/// Track an arbitrary pointer. Extra data can be stored in the allocation, which will be
|
||||
/// printed when a leak is detected.
|
||||
pub fn trackExternalAllocation(
|
||||
self: Self,
|
||||
ptr: []const u8,
|
||||
ret_addr: ?usize,
|
||||
extra: Extra,
|
||||
) void {
|
||||
self.borrow().trackExternalAllocation(ptr, ret_addr, extra);
|
||||
}
|
||||
|
||||
/// Call when the pointer from `trackExternalAllocation` is freed.
|
||||
pub fn trackExternalFree(self: Self, slice: anytype, ret_addr: ?usize) FreeError!void {
|
||||
return self.borrow().trackExternalFree(slice, ret_addr);
|
||||
}
|
||||
|
||||
pub fn setPointerExtra(self: Self, ptr: *anyopaque, extra: Extra) void {
|
||||
return self.borrow().setPointerExtra(ptr, extra);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const vtable: std.mem.Allocator.VTable = .{
|
||||
.alloc = vtable_alloc,
|
||||
.resize = std.mem.Allocator.noResize,
|
||||
.remap = std.mem.Allocator.noRemap,
|
||||
.free = vtable_free,
|
||||
};
|
||||
|
||||
// Smaller traces since AllocationScope prints so many
|
||||
pub const trace_limits: bun.crash_handler.WriteStackTraceLimits = .{
|
||||
.frame_count = 6,
|
||||
.stop_at_jsc_llint = true,
|
||||
.skip_stdlib = true,
|
||||
};
|
||||
|
||||
pub const free_trace_limits: bun.crash_handler.WriteStackTraceLimits = .{
|
||||
.frame_count = 3,
|
||||
.stop_at_jsc_llint = true,
|
||||
.skip_stdlib = true,
|
||||
};
|
||||
|
||||
fn vtable_alloc(ctx: *anyopaque, len: usize, alignment: std.mem.Alignment, ret_addr: usize) ?[*]u8 {
|
||||
const raw_state: *State = @ptrCast(@alignCast(ctx));
|
||||
const state = raw_state.lock();
|
||||
defer raw_state.unlock();
|
||||
return state.alloc(len, alignment, ret_addr) catch null;
|
||||
}
|
||||
|
||||
fn vtable_free(ctx: *anyopaque, buf: []u8, alignment: std.mem.Alignment, ret_addr: usize) void {
|
||||
const raw_state: *State = @ptrCast(@alignCast(ctx));
|
||||
const state = raw_state.lock();
|
||||
defer raw_state.unlock();
|
||||
state.free(buf, alignment, ret_addr);
|
||||
}
|
||||
|
||||
pub inline fn isInstance(allocator: std.mem.Allocator) bool {
|
||||
return (comptime enabled) and allocator.vtable == &vtable;
|
||||
}
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const Output = bun.Output;
|
||||
const Owned = bun.ptr.Owned;
|
||||
const StoredTrace = bun.crash_handler.StoredTrace;
|
||||
112
src/allocators/maybe_owned.zig
Normal file
112
src/allocators/maybe_owned.zig
Normal file
@@ -0,0 +1,112 @@
|
||||
/// This type can be used with `bun.ptr.Owned` to model "maybe owned" pointers:
|
||||
///
|
||||
/// ```
|
||||
/// // Either owned by the default allocator, or borrowed
|
||||
/// const MaybeOwnedFoo = bun.ptr.Owned(*Foo, bun.allocators.MaybeOwned(bun.DefaultAllocator));
|
||||
///
|
||||
/// var owned_foo: MaybeOwnedFoo = .new(makeFoo());
|
||||
/// var borrowed_foo: MaybeOwnedFoo = .fromRawIn(some_foo_ptr, .initBorrowed());
|
||||
///
|
||||
/// owned_foo.deinit(); // calls `Foo.deinit` and frees the memory
|
||||
/// borrowed_foo.deinit(); // no-op
|
||||
/// ```
|
||||
///
|
||||
/// This type is a `GenericAllocator`; see `src/allocators.zig`.
|
||||
pub fn MaybeOwned(comptime Allocator: type) type {
|
||||
return struct {
|
||||
const Self = @This();
|
||||
|
||||
_parent: bun.allocators.Nullable(Allocator),
|
||||
|
||||
/// Same as `.initBorrowed()`. This allocator cannot be used to allocate memory; a panic
|
||||
/// will occur.
|
||||
pub const borrowed = .initBorrowed();
|
||||
|
||||
/// Creates a `MaybeOwned` allocator that owns memory.
|
||||
///
|
||||
/// Allocations are forwarded to a default-initialized `Allocator`.
|
||||
pub fn init() Self {
|
||||
return .initOwned(bun.memory.initDefault(Allocator));
|
||||
}
|
||||
|
||||
/// Creates a `MaybeOwned` allocator that owns memory, and forwards to a specific
|
||||
/// allocator.
|
||||
///
|
||||
/// Allocations are forwarded to `parent_alloc`.
|
||||
pub fn initOwned(parent_alloc: Allocator) Self {
|
||||
return .initRaw(parent_alloc);
|
||||
}
|
||||
|
||||
/// Creates a `MaybeOwned` allocator that does not own any memory. This allocator cannot
|
||||
/// be used to allocate new memory (a panic will occur), and its implementation of `free`
|
||||
/// is a no-op.
|
||||
pub fn initBorrowed() Self {
|
||||
return .initRaw(null);
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Self) void {
|
||||
var maybe_parent = self.intoParent();
|
||||
if (maybe_parent) |*parent_alloc| {
|
||||
bun.memory.deinit(parent_alloc);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn isOwned(self: Self) bool {
|
||||
return self.rawParent() != null;
|
||||
}
|
||||
|
||||
pub fn allocator(self: Self) std.mem.Allocator {
|
||||
const maybe_parent = self.rawParent();
|
||||
return if (maybe_parent) |parent_alloc|
|
||||
bun.allocators.asStd(parent_alloc)
|
||||
else
|
||||
.{ .ptr = undefined, .vtable = &null_vtable };
|
||||
}
|
||||
|
||||
const BorrowedParent = bun.allocators.Borrowed(Allocator);
|
||||
|
||||
pub fn parent(self: Self) ?BorrowedParent {
|
||||
const maybe_parent = self.rawParent();
|
||||
return if (maybe_parent) |parent_alloc|
|
||||
bun.allocators.borrow(parent_alloc)
|
||||
else
|
||||
null;
|
||||
}
|
||||
|
||||
pub fn intoParent(self: *Self) ?Allocator {
|
||||
defer self.* = undefined;
|
||||
return self.rawParent();
|
||||
}
|
||||
|
||||
/// Used by smart pointer types and allocator wrappers. See `bun.allocators.borrow`.
|
||||
pub const Borrowed = MaybeOwned(BorrowedParent);
|
||||
|
||||
pub fn borrow(self: Self) Borrowed {
|
||||
return .{ ._parent = bun.allocators.initNullable(BorrowedParent, self.parent()) };
|
||||
}
|
||||
|
||||
fn initRaw(parent_alloc: ?Allocator) Self {
|
||||
return .{ ._parent = bun.allocators.initNullable(Allocator, parent_alloc) };
|
||||
}
|
||||
|
||||
fn rawParent(self: Self) ?Allocator {
|
||||
return bun.allocators.unpackNullable(Allocator, self._parent);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
fn nullAlloc(ptr: *anyopaque, len: usize, alignment: Alignment, ret_addr: usize) ?[*]u8 {
|
||||
_ = .{ ptr, len, alignment, ret_addr };
|
||||
std.debug.panic("cannot allocate with a borrowed `MaybeOwned` allocator", .{});
|
||||
}
|
||||
|
||||
const null_vtable: std.mem.Allocator.VTable = .{
|
||||
.alloc = nullAlloc,
|
||||
.resize = std.mem.Allocator.noResize,
|
||||
.remap = std.mem.Allocator.noRemap,
|
||||
.free = std.mem.Allocator.noFree,
|
||||
};
|
||||
|
||||
const bun = @import("bun");
|
||||
const std = @import("std");
|
||||
const Alignment = std.mem.Alignment;
|
||||
@@ -799,6 +799,9 @@ pub const api = struct {
|
||||
/// import_source
|
||||
import_source: []const u8,
|
||||
|
||||
/// side_effects
|
||||
side_effects: bool = false,
|
||||
|
||||
pub fn decode(reader: anytype) anyerror!Jsx {
|
||||
var this = std.mem.zeroes(Jsx);
|
||||
|
||||
@@ -807,6 +810,7 @@ pub const api = struct {
|
||||
this.fragment = try reader.readValue([]const u8);
|
||||
this.development = try reader.readValue(bool);
|
||||
this.import_source = try reader.readValue([]const u8);
|
||||
this.side_effects = try reader.readValue(bool);
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -816,6 +820,7 @@ pub const api = struct {
|
||||
try writer.writeValue(@TypeOf(this.fragment), this.fragment);
|
||||
try writer.writeInt(@as(u8, @intFromBool(this.development)));
|
||||
try writer.writeValue(@TypeOf(this.import_source), this.import_source);
|
||||
try writer.writeInt(@as(u8, @intFromBool(this.side_effects)));
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -83,14 +83,14 @@ pub const TsEnumsMap = std.ArrayHashMapUnmanaged(Ref, bun.StringHashMapUnmanaged
|
||||
|
||||
pub fn fromParts(parts: []Part) Ast {
|
||||
return Ast{
|
||||
.parts = Part.List.init(parts),
|
||||
.parts = Part.List.fromOwnedSlice(parts),
|
||||
.runtime_imports = .{},
|
||||
};
|
||||
}
|
||||
|
||||
pub fn initTest(parts: []Part) Ast {
|
||||
pub fn initTest(parts: []const Part) Ast {
|
||||
return Ast{
|
||||
.parts = Part.List.init(parts),
|
||||
.parts = Part.List.fromBorrowedSliceDangerous(parts),
|
||||
.runtime_imports = .{},
|
||||
};
|
||||
}
|
||||
@@ -107,9 +107,9 @@ pub fn toJSON(self: *const Ast, _: std.mem.Allocator, stream: anytype) !void {
|
||||
/// Do not call this if it wasn't globally allocated!
|
||||
pub fn deinit(this: *Ast) void {
|
||||
// TODO: assert mimalloc-owned memory
|
||||
if (this.parts.len > 0) this.parts.deinitWithAllocator(bun.default_allocator);
|
||||
if (this.symbols.len > 0) this.symbols.deinitWithAllocator(bun.default_allocator);
|
||||
if (this.import_records.len > 0) this.import_records.deinitWithAllocator(bun.default_allocator);
|
||||
this.parts.deinit(bun.default_allocator);
|
||||
this.symbols.deinit(bun.default_allocator);
|
||||
this.import_records.deinit(bun.default_allocator);
|
||||
}
|
||||
|
||||
pub const Class = G.Class;
|
||||
|
||||
@@ -56,7 +56,14 @@ pub fn toExpr(binding: *const Binding, wrapper: anytype) Expr {
|
||||
};
|
||||
}
|
||||
|
||||
return Expr.init(E.Array, E.Array{ .items = ExprNodeList.init(exprs), .is_single_line = b.is_single_line }, loc);
|
||||
return Expr.init(
|
||||
E.Array,
|
||||
E.Array{
|
||||
.items = ExprNodeList.fromOwnedSlice(exprs),
|
||||
.is_single_line = b.is_single_line,
|
||||
},
|
||||
loc,
|
||||
);
|
||||
},
|
||||
.b_object => |b| {
|
||||
const properties = wrapper
|
||||
@@ -77,7 +84,7 @@ pub fn toExpr(binding: *const Binding, wrapper: anytype) Expr {
|
||||
return Expr.init(
|
||||
E.Object,
|
||||
E.Object{
|
||||
.properties = G.Property.List.init(properties),
|
||||
.properties = G.Property.List.fromOwnedSlice(properties),
|
||||
.is_single_line = b.is_single_line,
|
||||
},
|
||||
loc,
|
||||
|
||||
@@ -121,7 +121,7 @@ pub fn convertStmt(ctx: *ConvertESMExportsForHmr, p: anytype, stmt: Stmt) !void
|
||||
const temp_id = p.generateTempRef("default_export");
|
||||
try ctx.last_part.declared_symbols.append(p.allocator, .{ .ref = temp_id, .is_top_level = true });
|
||||
try ctx.last_part.symbol_uses.putNoClobber(p.allocator, temp_id, .{ .count_estimate = 1 });
|
||||
try p.current_scope.generated.push(p.allocator, temp_id);
|
||||
try p.current_scope.generated.append(p.allocator, temp_id);
|
||||
|
||||
try ctx.export_props.append(p.allocator, .{
|
||||
.key = Expr.init(E.String, .{ .data = "default" }, stmt.loc),
|
||||
@@ -395,7 +395,7 @@ fn visitRefToExport(
|
||||
const arg1 = p.generateTempRef(symbol.original_name);
|
||||
try ctx.last_part.declared_symbols.append(p.allocator, .{ .ref = arg1, .is_top_level = true });
|
||||
try ctx.last_part.symbol_uses.putNoClobber(p.allocator, arg1, .{ .count_estimate = 1 });
|
||||
try p.current_scope.generated.push(p.allocator, arg1);
|
||||
try p.current_scope.generated.append(p.allocator, arg1);
|
||||
|
||||
// 'get abc() { return abc }'
|
||||
try ctx.export_props.append(p.allocator, .{
|
||||
@@ -438,7 +438,7 @@ pub fn finalize(ctx: *ConvertESMExportsForHmr, p: anytype, all_parts: []js_ast.P
|
||||
|
||||
if (ctx.export_props.items.len > 0) {
|
||||
const obj = Expr.init(E.Object, .{
|
||||
.properties = G.Property.List.fromList(ctx.export_props),
|
||||
.properties = G.Property.List.moveFromList(&ctx.export_props),
|
||||
}, logger.Loc.Empty);
|
||||
|
||||
// `hmr.exports = ...`
|
||||
@@ -466,7 +466,7 @@ pub fn finalize(ctx: *ConvertESMExportsForHmr, p: anytype, all_parts: []js_ast.P
|
||||
.name = "reactRefreshAccept",
|
||||
.name_loc = .Empty,
|
||||
}, .Empty),
|
||||
.args = .init(&.{}),
|
||||
.args = .empty,
|
||||
}, .Empty),
|
||||
}, .Empty));
|
||||
}
|
||||
@@ -474,7 +474,10 @@ pub fn finalize(ctx: *ConvertESMExportsForHmr, p: anytype, all_parts: []js_ast.P
|
||||
// Merge all part metadata into the first part.
|
||||
for (all_parts[0 .. all_parts.len - 1]) |*part| {
|
||||
try ctx.last_part.declared_symbols.appendList(p.allocator, part.declared_symbols);
|
||||
try ctx.last_part.import_record_indices.append(p.allocator, part.import_record_indices.slice());
|
||||
try ctx.last_part.import_record_indices.appendSlice(
|
||||
p.allocator,
|
||||
part.import_record_indices.slice(),
|
||||
);
|
||||
for (part.symbol_uses.keys(), part.symbol_uses.values()) |k, v| {
|
||||
const gop = try ctx.last_part.symbol_uses.getOrPut(p.allocator, k);
|
||||
if (!gop.found_existing) {
|
||||
@@ -487,13 +490,16 @@ pub fn finalize(ctx: *ConvertESMExportsForHmr, p: anytype, all_parts: []js_ast.P
|
||||
part.declared_symbols.entries.len = 0;
|
||||
part.tag = .dead_due_to_inlining;
|
||||
part.dependencies.clearRetainingCapacity();
|
||||
try part.dependencies.push(p.allocator, .{
|
||||
try part.dependencies.append(p.allocator, .{
|
||||
.part_index = @intCast(all_parts.len - 1),
|
||||
.source_index = p.source.index,
|
||||
});
|
||||
}
|
||||
|
||||
try ctx.last_part.import_record_indices.append(p.allocator, p.import_records_for_current_part.items);
|
||||
try ctx.last_part.import_record_indices.appendSlice(
|
||||
p.allocator,
|
||||
p.import_records_for_current_part.items,
|
||||
);
|
||||
try ctx.last_part.declared_symbols.appendList(p.allocator, p.declared_symbols);
|
||||
|
||||
ctx.last_part.stmts = ctx.stmts.items;
|
||||
|
||||
@@ -18,7 +18,7 @@ pub const Array = struct {
|
||||
close_bracket_loc: logger.Loc = logger.Loc.Empty,
|
||||
|
||||
pub fn push(this: *Array, allocator: std.mem.Allocator, item: Expr) !void {
|
||||
try this.items.push(allocator, item);
|
||||
try this.items.append(allocator, item);
|
||||
}
|
||||
|
||||
pub inline fn slice(this: Array) []Expr {
|
||||
@@ -30,12 +30,13 @@ pub const Array = struct {
|
||||
allocator: std.mem.Allocator,
|
||||
estimated_count: usize,
|
||||
) !ExprNodeList {
|
||||
var out = try allocator.alloc(
|
||||
Expr,
|
||||
var out: bun.BabyList(Expr) = try .initCapacity(
|
||||
allocator,
|
||||
// This over-allocates a little but it's fine
|
||||
estimated_count + @as(usize, this.items.len),
|
||||
);
|
||||
var remain = out;
|
||||
out.expandToCapacity();
|
||||
var remain = out.slice();
|
||||
for (this.items.slice()) |item| {
|
||||
switch (item.data) {
|
||||
.e_spread => |val| {
|
||||
@@ -63,7 +64,8 @@ pub const Array = struct {
|
||||
remain = remain[1..];
|
||||
}
|
||||
|
||||
return ExprNodeList.init(out[0 .. out.len - remain.len]);
|
||||
out.shrinkRetainingCapacity(out.len - remain.len);
|
||||
return out;
|
||||
}
|
||||
|
||||
pub fn toJS(this: @This(), allocator: std.mem.Allocator, globalObject: *jsc.JSGlobalObject) ToJSError!jsc.JSValue {
|
||||
@@ -98,6 +100,43 @@ pub const Array = struct {
|
||||
pub const Unary = struct {
|
||||
op: Op.Code,
|
||||
value: ExprNodeIndex,
|
||||
flags: Unary.Flags = .{},
|
||||
|
||||
pub const Flags = packed struct(u8) {
|
||||
/// The expression "typeof (0, x)" must not become "typeof x" if "x"
|
||||
/// is unbound because that could suppress a ReferenceError from "x".
|
||||
///
|
||||
/// Also if we know a typeof operator was originally an identifier, then
|
||||
/// we know that this typeof operator always has no side effects (even if
|
||||
/// we consider the identifier by itself to have a side effect).
|
||||
///
|
||||
/// Note that there *is* actually a case where "typeof x" can throw an error:
|
||||
/// when "x" is being referenced inside of its TDZ (temporal dead zone). TDZ
|
||||
/// checks are not yet handled correctly by Bun, so this possibility is
|
||||
/// currently ignored.
|
||||
was_originally_typeof_identifier: bool = false,
|
||||
|
||||
/// Similarly the expression "delete (0, x)" must not become "delete x"
|
||||
/// because that syntax is invalid in strict mode. We also need to make sure
|
||||
/// we don't accidentally change the return value:
|
||||
///
|
||||
/// Returns false:
|
||||
/// "var a; delete (a)"
|
||||
/// "var a = Object.freeze({b: 1}); delete (a.b)"
|
||||
/// "var a = Object.freeze({b: 1}); delete (a?.b)"
|
||||
/// "var a = Object.freeze({b: 1}); delete (a['b'])"
|
||||
/// "var a = Object.freeze({b: 1}); delete (a?.['b'])"
|
||||
///
|
||||
/// Returns true:
|
||||
/// "var a; delete (0, a)"
|
||||
/// "var a = Object.freeze({b: 1}); delete (true && a.b)"
|
||||
/// "var a = Object.freeze({b: 1}); delete (false || a?.b)"
|
||||
/// "var a = Object.freeze({b: 1}); delete (null ?? a?.['b'])"
|
||||
///
|
||||
/// "var a = Object.freeze({b: 1}); delete (true ? a['b'] : a['b'])"
|
||||
was_originally_delete_of_identifier_or_property_access: bool = false,
|
||||
_: u6 = 0,
|
||||
};
|
||||
};
|
||||
|
||||
pub const Binary = struct {
|
||||
@@ -536,7 +575,7 @@ pub const Object = struct {
|
||||
if (asProperty(self, key)) |query| {
|
||||
self.properties.ptr[query.i].value = expr;
|
||||
} else {
|
||||
try self.properties.push(allocator, .{
|
||||
try self.properties.append(allocator, .{
|
||||
.key = Expr.init(E.String, E.String.init(key), expr.loc),
|
||||
.value = expr,
|
||||
});
|
||||
@@ -551,7 +590,7 @@ pub const Object = struct {
|
||||
|
||||
pub fn set(self: *const Object, key: Expr, allocator: std.mem.Allocator, value: Expr) SetError!void {
|
||||
if (self.hasProperty(key.data.e_string.data)) return error.Clobber;
|
||||
try self.properties.push(allocator, .{
|
||||
try self.properties.append(allocator, .{
|
||||
.key = key,
|
||||
.value = value,
|
||||
});
|
||||
@@ -605,7 +644,7 @@ pub const Object = struct {
|
||||
value_ = obj;
|
||||
}
|
||||
|
||||
try self.properties.push(allocator, .{
|
||||
try self.properties.append(allocator, .{
|
||||
.key = rope.head,
|
||||
.value = value_,
|
||||
});
|
||||
@@ -646,7 +685,7 @@ pub const Object = struct {
|
||||
if (rope.next) |next| {
|
||||
var obj = Expr.init(E.Object, E.Object{ .properties = .{} }, rope.head.loc);
|
||||
const out = try obj.data.e_object.getOrPutObject(next, allocator);
|
||||
try self.properties.push(allocator, .{
|
||||
try self.properties.append(allocator, .{
|
||||
.key = rope.head,
|
||||
.value = obj,
|
||||
});
|
||||
@@ -654,7 +693,7 @@ pub const Object = struct {
|
||||
}
|
||||
|
||||
const out = Expr.init(E.Object, E.Object{}, rope.head.loc);
|
||||
try self.properties.push(allocator, .{
|
||||
try self.properties.append(allocator, .{
|
||||
.key = rope.head,
|
||||
.value = out,
|
||||
});
|
||||
@@ -695,7 +734,7 @@ pub const Object = struct {
|
||||
if (rope.next) |next| {
|
||||
var obj = Expr.init(E.Object, E.Object{ .properties = .{} }, rope.head.loc);
|
||||
const out = try obj.data.e_object.getOrPutArray(next, allocator);
|
||||
try self.properties.push(allocator, .{
|
||||
try self.properties.append(allocator, .{
|
||||
.key = rope.head,
|
||||
.value = obj,
|
||||
});
|
||||
@@ -703,7 +742,7 @@ pub const Object = struct {
|
||||
}
|
||||
|
||||
const out = Expr.init(E.Array, E.Array{}, rope.head.loc);
|
||||
try self.properties.push(allocator, .{
|
||||
try self.properties.append(allocator, .{
|
||||
.key = rope.head,
|
||||
.value = out,
|
||||
});
|
||||
@@ -940,6 +979,30 @@ pub const String = struct {
|
||||
return bun.handleOom(this.string(allocator));
|
||||
}
|
||||
|
||||
fn stringCompareForJavaScript(comptime T: type, a: []const T, b: []const T) std.math.Order {
|
||||
const a_slice = a[0..@min(a.len, b.len)];
|
||||
const b_slice = b[0..@min(a.len, b.len)];
|
||||
for (a_slice, b_slice) |a_char, b_char| {
|
||||
const delta: i32 = @as(i32, a_char) - @as(i32, b_char);
|
||||
if (delta != 0) {
|
||||
return if (delta < 0) .lt else .gt;
|
||||
}
|
||||
}
|
||||
return std.math.order(a.len, b.len);
|
||||
}
|
||||
|
||||
/// Compares two strings lexicographically for JavaScript semantics.
|
||||
/// Both strings must share the same encoding (UTF-8 vs UTF-16).
|
||||
pub inline fn order(this: *const String, other: *const String) std.math.Order {
|
||||
bun.debugAssert(this.isUTF8() == other.isUTF8());
|
||||
|
||||
if (this.isUTF8()) {
|
||||
return stringCompareForJavaScript(u8, this.data, other.data);
|
||||
} else {
|
||||
return stringCompareForJavaScript(u16, this.slice16(), other.slice16());
|
||||
}
|
||||
}
|
||||
|
||||
pub var empty = String{};
|
||||
pub var @"true" = String{ .data = "true" };
|
||||
pub var @"false" = String{ .data = "false" };
|
||||
|
||||
162
src/ast/Expr.zig
162
src/ast/Expr.zig
@@ -273,13 +273,10 @@ pub fn set(expr: *Expr, allocator: std.mem.Allocator, name: string, value: Expr)
|
||||
}
|
||||
}
|
||||
|
||||
var new_props = expr.data.e_object.properties.listManaged(allocator);
|
||||
try new_props.append(.{
|
||||
try expr.data.e_object.properties.append(allocator, .{
|
||||
.key = Expr.init(E.String, .{ .data = name }, logger.Loc.Empty),
|
||||
.value = value,
|
||||
});
|
||||
|
||||
expr.data.e_object.properties = BabyList(G.Property).fromList(new_props);
|
||||
}
|
||||
|
||||
/// Don't use this if you care about performance.
|
||||
@@ -298,13 +295,10 @@ pub fn setString(expr: *Expr, allocator: std.mem.Allocator, name: string, value:
|
||||
}
|
||||
}
|
||||
|
||||
var new_props = expr.data.e_object.properties.listManaged(allocator);
|
||||
try new_props.append(.{
|
||||
try expr.data.e_object.properties.append(allocator, .{
|
||||
.key = Expr.init(E.String, .{ .data = name }, logger.Loc.Empty),
|
||||
.value = Expr.init(E.String, .{ .data = value }, logger.Loc.Empty),
|
||||
});
|
||||
|
||||
expr.data.e_object.properties = BabyList(G.Property).fromList(new_props);
|
||||
}
|
||||
|
||||
pub fn getObject(expr: *const Expr, name: string) ?Expr {
|
||||
@@ -647,6 +641,29 @@ pub fn jsonStringify(self: *const @This(), writer: anytype) !void {
|
||||
return try writer.write(Serializable{ .type = std.meta.activeTag(self.data), .object = "expr", .value = self.data, .loc = self.loc });
|
||||
}
|
||||
|
||||
pub fn extractNumericValuesInSafeRange(left: Expr.Data, right: Expr.Data) ?[2]f64 {
|
||||
const l_value = left.extractNumericValue() orelse return null;
|
||||
const r_value = right.extractNumericValue() orelse return null;
|
||||
|
||||
// Check for NaN and return null if either value is NaN
|
||||
if (std.math.isNan(l_value) or std.math.isNan(r_value)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (std.math.isInf(l_value) or std.math.isInf(r_value)) {
|
||||
return .{ l_value, r_value };
|
||||
}
|
||||
|
||||
if (l_value > bun.jsc.MAX_SAFE_INTEGER or r_value > bun.jsc.MAX_SAFE_INTEGER) {
|
||||
return null;
|
||||
}
|
||||
if (l_value < bun.jsc.MIN_SAFE_INTEGER or r_value < bun.jsc.MIN_SAFE_INTEGER) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return .{ l_value, r_value };
|
||||
}
|
||||
|
||||
pub fn extractNumericValues(left: Expr.Data, right: Expr.Data) ?[2]f64 {
|
||||
return .{
|
||||
left.extractNumericValue() orelse return null,
|
||||
@@ -654,6 +671,20 @@ pub fn extractNumericValues(left: Expr.Data, right: Expr.Data) ?[2]f64 {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn extractStringValues(left: Expr.Data, right: Expr.Data, allocator: std.mem.Allocator) ?[2]*E.String {
|
||||
const l_string = left.extractStringValue() orelse return null;
|
||||
const r_string = right.extractStringValue() orelse return null;
|
||||
l_string.resolveRopeIfNeeded(allocator);
|
||||
r_string.resolveRopeIfNeeded(allocator);
|
||||
|
||||
if (l_string.isUTF8() != r_string.isUTF8()) return null;
|
||||
|
||||
return .{
|
||||
l_string,
|
||||
r_string,
|
||||
};
|
||||
}
|
||||
|
||||
pub var icount: usize = 0;
|
||||
|
||||
// We don't need to dynamically allocate booleans
|
||||
@@ -1407,11 +1438,17 @@ pub fn init(comptime Type: type, st: Type, loc: logger.Loc) Expr {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn isPrimitiveLiteral(this: Expr) bool {
|
||||
/// If this returns true, then calling this expression captures the target of
|
||||
/// the property access as "this" when calling the function in the property.
|
||||
pub inline fn isPropertyAccess(this: *const Expr) bool {
|
||||
return this.hasValueForThisInCall();
|
||||
}
|
||||
|
||||
pub inline fn isPrimitiveLiteral(this: *const Expr) bool {
|
||||
return @as(Tag, this.data).isPrimitiveLiteral();
|
||||
}
|
||||
|
||||
pub fn isRef(this: Expr, ref: Ref) bool {
|
||||
pub inline fn isRef(this: *const Expr, ref: Ref) bool {
|
||||
return switch (this.data) {
|
||||
.e_import_identifier => |import_identifier| import_identifier.ref.eql(ref),
|
||||
.e_identifier => |ident| ident.ref.eql(ref),
|
||||
@@ -1873,36 +1910,19 @@ pub const Tag = enum {
|
||||
}
|
||||
};
|
||||
|
||||
pub fn isBoolean(a: Expr) bool {
|
||||
switch (a.data) {
|
||||
.e_boolean => {
|
||||
return true;
|
||||
pub fn isBoolean(a: *const Expr) bool {
|
||||
return switch (a.data) {
|
||||
.e_boolean => true,
|
||||
.e_if => |ex| ex.yes.isBoolean() and ex.no.isBoolean(),
|
||||
.e_unary => |ex| ex.op == .un_not or ex.op == .un_delete,
|
||||
.e_binary => |ex| switch (ex.op) {
|
||||
.bin_strict_eq, .bin_strict_ne, .bin_loose_eq, .bin_loose_ne, .bin_lt, .bin_gt, .bin_le, .bin_ge, .bin_instanceof, .bin_in => true,
|
||||
.bin_logical_or => ex.left.isBoolean() and ex.right.isBoolean(),
|
||||
.bin_logical_and => ex.left.isBoolean() and ex.right.isBoolean(),
|
||||
else => false,
|
||||
},
|
||||
|
||||
.e_if => |ex| {
|
||||
return isBoolean(ex.yes) and isBoolean(ex.no);
|
||||
},
|
||||
.e_unary => |ex| {
|
||||
return ex.op == .un_not or ex.op == .un_delete;
|
||||
},
|
||||
.e_binary => |ex| {
|
||||
switch (ex.op) {
|
||||
.bin_strict_eq, .bin_strict_ne, .bin_loose_eq, .bin_loose_ne, .bin_lt, .bin_gt, .bin_le, .bin_ge, .bin_instanceof, .bin_in => {
|
||||
return true;
|
||||
},
|
||||
.bin_logical_or => {
|
||||
return isBoolean(ex.left) and isBoolean(ex.right);
|
||||
},
|
||||
.bin_logical_and => {
|
||||
return isBoolean(ex.left) and isBoolean(ex.right);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
return false;
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn assign(a: Expr, b: Expr) Expr {
|
||||
@@ -1912,7 +1932,7 @@ pub fn assign(a: Expr, b: Expr) Expr {
|
||||
.right = b,
|
||||
}, a.loc);
|
||||
}
|
||||
pub inline fn at(expr: Expr, comptime Type: type, t: Type, _: std.mem.Allocator) Expr {
|
||||
pub inline fn at(expr: *const Expr, comptime Type: type, t: Type, _: std.mem.Allocator) Expr {
|
||||
return init(Type, t, expr.loc);
|
||||
}
|
||||
|
||||
@@ -1920,21 +1940,19 @@ pub inline fn at(expr: Expr, comptime Type: type, t: Type, _: std.mem.Allocator)
|
||||
// will potentially be simplified to avoid generating unnecessary extra "!"
|
||||
// operators. For example, calling this with "!!x" will return "!x" instead
|
||||
// of returning "!!!x".
|
||||
pub fn not(expr: Expr, allocator: std.mem.Allocator) Expr {
|
||||
return maybeSimplifyNot(
|
||||
expr,
|
||||
allocator,
|
||||
) orelse Expr.init(
|
||||
E.Unary,
|
||||
E.Unary{
|
||||
.op = .un_not,
|
||||
.value = expr,
|
||||
},
|
||||
expr.loc,
|
||||
);
|
||||
pub fn not(expr: *const Expr, allocator: std.mem.Allocator) Expr {
|
||||
return expr.maybeSimplifyNot(allocator) orelse
|
||||
Expr.init(
|
||||
E.Unary,
|
||||
E.Unary{
|
||||
.op = .un_not,
|
||||
.value = expr.*,
|
||||
},
|
||||
expr.loc,
|
||||
);
|
||||
}
|
||||
|
||||
pub fn hasValueForThisInCall(expr: Expr) bool {
|
||||
pub inline fn hasValueForThisInCall(expr: *const Expr) bool {
|
||||
return switch (expr.data) {
|
||||
.e_dot, .e_index => true,
|
||||
else => false,
|
||||
@@ -1946,7 +1964,7 @@ pub fn hasValueForThisInCall(expr: Expr) bool {
|
||||
/// whole operator (i.e. the "!x") if it can be simplified, or false if not.
|
||||
/// It's separate from "Not()" above to avoid allocation on failure in case
|
||||
/// that is undesired.
|
||||
pub fn maybeSimplifyNot(expr: Expr, allocator: std.mem.Allocator) ?Expr {
|
||||
pub fn maybeSimplifyNot(expr: *const Expr, allocator: std.mem.Allocator) ?Expr {
|
||||
switch (expr.data) {
|
||||
.e_null, .e_undefined => {
|
||||
return expr.at(E.Boolean, E.Boolean{ .value = true }, allocator);
|
||||
@@ -1968,7 +1986,7 @@ pub fn maybeSimplifyNot(expr: Expr, allocator: std.mem.Allocator) ?Expr {
|
||||
},
|
||||
// "!!!a" => "!a"
|
||||
.e_unary => |un| {
|
||||
if (un.op == Op.Code.un_not and knownPrimitive(un.value) == .boolean) {
|
||||
if (un.op == Op.Code.un_not and un.value.knownPrimitive() == .boolean) {
|
||||
return un.value;
|
||||
}
|
||||
},
|
||||
@@ -1981,33 +1999,33 @@ pub fn maybeSimplifyNot(expr: Expr, allocator: std.mem.Allocator) ?Expr {
|
||||
Op.Code.bin_loose_eq => {
|
||||
// "!(a == b)" => "a != b"
|
||||
ex.op = .bin_loose_ne;
|
||||
return expr;
|
||||
return expr.*;
|
||||
},
|
||||
Op.Code.bin_loose_ne => {
|
||||
// "!(a != b)" => "a == b"
|
||||
ex.op = .bin_loose_eq;
|
||||
return expr;
|
||||
return expr.*;
|
||||
},
|
||||
Op.Code.bin_strict_eq => {
|
||||
// "!(a === b)" => "a !== b"
|
||||
ex.op = .bin_strict_ne;
|
||||
return expr;
|
||||
return expr.*;
|
||||
},
|
||||
Op.Code.bin_strict_ne => {
|
||||
// "!(a !== b)" => "a === b"
|
||||
ex.op = .bin_strict_eq;
|
||||
return expr;
|
||||
return expr.*;
|
||||
},
|
||||
Op.Code.bin_comma => {
|
||||
// "!(a, b)" => "a, !b"
|
||||
ex.right = ex.right.not(allocator);
|
||||
return expr;
|
||||
return expr.*;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
},
|
||||
.e_inlined_enum => |inlined| {
|
||||
return maybeSimplifyNot(inlined.value, allocator);
|
||||
return inlined.value.maybeSimplifyNot(allocator);
|
||||
},
|
||||
|
||||
else => {},
|
||||
@@ -2016,11 +2034,11 @@ pub fn maybeSimplifyNot(expr: Expr, allocator: std.mem.Allocator) ?Expr {
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn toStringExprWithoutSideEffects(expr: Expr, allocator: std.mem.Allocator) ?Expr {
|
||||
pub fn toStringExprWithoutSideEffects(expr: *const Expr, allocator: std.mem.Allocator) ?Expr {
|
||||
const unwrapped = expr.unwrapInlined();
|
||||
const slice = switch (unwrapped.data) {
|
||||
.e_null => "null",
|
||||
.e_string => return expr,
|
||||
.e_string => return expr.*,
|
||||
.e_undefined => "undefined",
|
||||
.e_boolean => |data| if (data.value) "true" else "false",
|
||||
.e_big_int => |bigint| bigint.value,
|
||||
@@ -2054,7 +2072,7 @@ pub fn isOptionalChain(self: *const @This()) bool {
|
||||
};
|
||||
}
|
||||
|
||||
pub inline fn knownPrimitive(self: @This()) PrimitiveType {
|
||||
pub inline fn knownPrimitive(self: *const @This()) PrimitiveType {
|
||||
return self.data.knownPrimitive();
|
||||
}
|
||||
|
||||
@@ -2294,6 +2312,7 @@ pub const Data = union(Tag) {
|
||||
const item = bun.create(allocator, E.Unary, .{
|
||||
.op = el.op,
|
||||
.value = try el.value.deepClone(allocator),
|
||||
.flags = el.flags,
|
||||
});
|
||||
return .{ .e_unary = item };
|
||||
},
|
||||
@@ -2506,6 +2525,7 @@ pub const Data = union(Tag) {
|
||||
}
|
||||
},
|
||||
.e_unary => |e| {
|
||||
writeAnyToHasher(hasher, @as(u8, @bitCast(e.flags)));
|
||||
writeAnyToHasher(hasher, .{e.op});
|
||||
e.value.data.writeToHasher(hasher, symbol_table);
|
||||
},
|
||||
@@ -2537,7 +2557,7 @@ pub const Data = union(Tag) {
|
||||
inline .e_spread, .e_await => |e| {
|
||||
e.value.data.writeToHasher(hasher, symbol_table);
|
||||
},
|
||||
inline .e_yield => |e| {
|
||||
.e_yield => |e| {
|
||||
writeAnyToHasher(hasher, .{ e.is_star, e.value });
|
||||
if (e.value) |value|
|
||||
value.data.writeToHasher(hasher, symbol_table);
|
||||
@@ -2860,6 +2880,17 @@ pub const Data = union(Tag) {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn extractStringValue(data: Expr.Data) ?*E.String {
|
||||
return switch (data) {
|
||||
.e_string => data.e_string,
|
||||
.e_inlined_enum => |inlined| switch (inlined.value.data) {
|
||||
.e_string => |str| str,
|
||||
else => null,
|
||||
},
|
||||
else => null,
|
||||
};
|
||||
}
|
||||
|
||||
pub const Equality = struct {
|
||||
equal: bool = false,
|
||||
ok: bool = false,
|
||||
@@ -3208,7 +3239,6 @@ const JSPrinter = @import("../js_printer.zig");
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const BabyList = bun.BabyList;
|
||||
const Environment = bun.Environment;
|
||||
const JSONParser = bun.json;
|
||||
const MutableString = bun.MutableString;
|
||||
|
||||
@@ -8,18 +8,158 @@ pub const KnownGlobal = enum {
|
||||
Response,
|
||||
TextEncoder,
|
||||
TextDecoder,
|
||||
Error,
|
||||
TypeError,
|
||||
SyntaxError,
|
||||
RangeError,
|
||||
ReferenceError,
|
||||
EvalError,
|
||||
URIError,
|
||||
AggregateError,
|
||||
Array,
|
||||
Object,
|
||||
Function,
|
||||
RegExp,
|
||||
|
||||
pub const map = bun.ComptimeEnumMap(KnownGlobal);
|
||||
|
||||
pub noinline fn maybeMarkConstructorAsPure(noalias e: *E.New, symbols: []const Symbol) void {
|
||||
const id = if (e.target.data == .e_identifier) e.target.data.e_identifier.ref else return;
|
||||
inline fn callFromNew(e: *E.New, loc: logger.Loc) js_ast.Expr {
|
||||
const call = E.Call{
|
||||
.target = e.target,
|
||||
.args = e.args,
|
||||
.close_paren_loc = e.close_parens_loc,
|
||||
.can_be_unwrapped_if_unused = e.can_be_unwrapped_if_unused,
|
||||
};
|
||||
return js_ast.Expr.init(E.Call, call, loc);
|
||||
}
|
||||
|
||||
pub noinline fn minifyGlobalConstructor(allocator: std.mem.Allocator, noalias e: *E.New, symbols: []const Symbol, loc: logger.Loc, minify_whitespace: bool) ?js_ast.Expr {
|
||||
const id = if (e.target.data == .e_identifier) e.target.data.e_identifier.ref else return null;
|
||||
const symbol = &symbols[id.innerIndex()];
|
||||
if (symbol.kind != .unbound)
|
||||
return;
|
||||
return null;
|
||||
|
||||
const constructor = map.get(symbol.original_name) orelse return;
|
||||
const constructor = map.get(symbol.original_name) orelse return null;
|
||||
|
||||
switch (constructor) {
|
||||
return switch (constructor) {
|
||||
// Error constructors can be called without 'new' with identical behavior
|
||||
.Error, .TypeError, .SyntaxError, .RangeError, .ReferenceError, .EvalError, .URIError, .AggregateError => {
|
||||
// Convert `new Error(...)` to `Error(...)` to save bytes
|
||||
return callFromNew(e, loc);
|
||||
},
|
||||
|
||||
.Object => {
|
||||
const n = e.args.len;
|
||||
|
||||
if (n == 0) {
|
||||
// new Object() -> {}
|
||||
return js_ast.Expr.init(E.Object, E.Object{}, loc);
|
||||
}
|
||||
|
||||
if (n == 1) {
|
||||
const arg = e.args.ptr[0];
|
||||
switch (arg.data) {
|
||||
.e_object, .e_array => {
|
||||
// new Object({a: 1}) -> {a: 1}
|
||||
// new Object([1, 2]) -> [1, 2]
|
||||
return arg;
|
||||
},
|
||||
.e_null, .e_undefined => {
|
||||
// new Object(null) -> {}
|
||||
// new Object(undefined) -> {}
|
||||
return js_ast.Expr.init(E.Object, E.Object{}, loc);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
// For other cases, just remove 'new'
|
||||
return callFromNew(e, loc);
|
||||
},
|
||||
|
||||
.Array => {
|
||||
const n = e.args.len;
|
||||
|
||||
return switch (n) {
|
||||
0 => {
|
||||
// new Array() -> []
|
||||
return js_ast.Expr.init(E.Array, E.Array{}, loc);
|
||||
},
|
||||
1 => {
|
||||
// For single argument, only convert to literal if we're SURE it's not a number
|
||||
const arg = e.args.ptr[0];
|
||||
|
||||
// Check if it's an object or array literal first
|
||||
switch (arg.data) {
|
||||
.e_object, .e_array => {
|
||||
// new Array({}) -> [{}], new Array([1]) -> [[1]]
|
||||
// These are definitely not numbers, safe to convert
|
||||
return js_ast.Expr.init(E.Array, .{ .items = e.args }, loc);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
// For other types, check via knownPrimitive
|
||||
const primitive = arg.knownPrimitive();
|
||||
// Only convert if we know for certain it's not a number
|
||||
// unknown could be a number at runtime, so we must preserve Array() call
|
||||
switch (primitive) {
|
||||
.null, .undefined, .boolean, .string, .bigint => {
|
||||
// These are definitely not numbers, safe to convert
|
||||
return js_ast.Expr.init(E.Array, .{ .items = e.args }, loc);
|
||||
},
|
||||
.number => {
|
||||
const val = arg.data.e_number.value;
|
||||
if (
|
||||
// only want this with whitespace minification
|
||||
minify_whitespace and
|
||||
(val == 0 or
|
||||
val == 1 or
|
||||
val == 2 or
|
||||
val == 3 or
|
||||
val == 4 or
|
||||
val == 5 or
|
||||
val == 6 or
|
||||
val == 7 or
|
||||
val == 8 or
|
||||
val == 9 or
|
||||
val == 10))
|
||||
{
|
||||
const arg_loc = arg.loc;
|
||||
var list = e.args.moveToListManaged(allocator);
|
||||
list.clearRetainingCapacity();
|
||||
bun.handleOom(list.appendNTimes(js_ast.Expr{ .data = js_parser.Prefill.Data.EMissing, .loc = arg_loc }, @intFromFloat(val)));
|
||||
return js_ast.Expr.init(E.Array, .{ .items = .moveFromList(&list) }, loc);
|
||||
}
|
||||
return callFromNew(e, loc);
|
||||
},
|
||||
.unknown, .mixed => {
|
||||
// Could be a number, preserve Array() call
|
||||
return callFromNew(e, loc);
|
||||
},
|
||||
}
|
||||
},
|
||||
// > 1
|
||||
else => {
|
||||
// new Array(1, 2, 3) -> [1, 2, 3]
|
||||
// But NOT new Array(3) which creates an array with 3 empty slots
|
||||
return js_ast.Expr.init(E.Array, .{ .items = e.args }, loc);
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
.Function => {
|
||||
// Just remove 'new' for Function
|
||||
return callFromNew(e, loc);
|
||||
},
|
||||
.RegExp => {
|
||||
// Don't optimize RegExp - the semantics are too complex:
|
||||
// - new RegExp(re) creates a copy, but RegExp(re) returns the same instance
|
||||
// - This affects object identity and lastIndex behavior
|
||||
// - The difference only applies when flags are undefined
|
||||
// Keep the original new RegExp() call to preserve correct semantics
|
||||
return null;
|
||||
},
|
||||
.WeakSet, .WeakMap => {
|
||||
const n = e.args.len;
|
||||
|
||||
@@ -27,7 +167,7 @@ pub const KnownGlobal = enum {
|
||||
// "new WeakSet()" is pure
|
||||
e.can_be_unwrapped_if_unused = .if_unused;
|
||||
|
||||
return;
|
||||
return null;
|
||||
}
|
||||
|
||||
if (n == 1) {
|
||||
@@ -50,6 +190,7 @@ pub const KnownGlobal = enum {
|
||||
},
|
||||
}
|
||||
}
|
||||
return null;
|
||||
},
|
||||
.Date => {
|
||||
const n = e.args.len;
|
||||
@@ -58,7 +199,7 @@ pub const KnownGlobal = enum {
|
||||
// "new Date()" is pure
|
||||
e.can_be_unwrapped_if_unused = .if_unused;
|
||||
|
||||
return;
|
||||
return null;
|
||||
}
|
||||
|
||||
if (n == 1) {
|
||||
@@ -78,6 +219,7 @@ pub const KnownGlobal = enum {
|
||||
},
|
||||
}
|
||||
}
|
||||
return null;
|
||||
},
|
||||
|
||||
.Set => {
|
||||
@@ -86,7 +228,7 @@ pub const KnownGlobal = enum {
|
||||
if (n == 0) {
|
||||
// "new Set()" is pure
|
||||
e.can_be_unwrapped_if_unused = .if_unused;
|
||||
return;
|
||||
return null;
|
||||
}
|
||||
|
||||
if (n == 1) {
|
||||
@@ -102,6 +244,7 @@ pub const KnownGlobal = enum {
|
||||
},
|
||||
}
|
||||
}
|
||||
return null;
|
||||
},
|
||||
|
||||
.Headers => {
|
||||
@@ -111,8 +254,9 @@ pub const KnownGlobal = enum {
|
||||
// "new Headers()" is pure
|
||||
e.can_be_unwrapped_if_unused = .if_unused;
|
||||
|
||||
return;
|
||||
return null;
|
||||
}
|
||||
return null;
|
||||
},
|
||||
|
||||
.Response => {
|
||||
@@ -122,7 +266,7 @@ pub const KnownGlobal = enum {
|
||||
// "new Response()" is pure
|
||||
e.can_be_unwrapped_if_unused = .if_unused;
|
||||
|
||||
return;
|
||||
return null;
|
||||
}
|
||||
|
||||
if (n == 1) {
|
||||
@@ -142,6 +286,7 @@ pub const KnownGlobal = enum {
|
||||
},
|
||||
}
|
||||
}
|
||||
return null;
|
||||
},
|
||||
.TextDecoder, .TextEncoder => {
|
||||
const n = e.args.len;
|
||||
@@ -151,11 +296,12 @@ pub const KnownGlobal = enum {
|
||||
// "new TextDecoder()" is pure
|
||||
e.can_be_unwrapped_if_unused = .if_unused;
|
||||
|
||||
return;
|
||||
return null;
|
||||
}
|
||||
|
||||
// We _could_ validate the encoding argument
|
||||
// But let's not bother
|
||||
return null;
|
||||
},
|
||||
|
||||
.Map => {
|
||||
@@ -164,7 +310,7 @@ pub const KnownGlobal = enum {
|
||||
if (n == 0) {
|
||||
// "new Map()" is pure
|
||||
e.can_be_unwrapped_if_unused = .if_unused;
|
||||
return;
|
||||
return null;
|
||||
}
|
||||
|
||||
if (n == 1) {
|
||||
@@ -193,18 +339,20 @@ pub const KnownGlobal = enum {
|
||||
},
|
||||
}
|
||||
}
|
||||
return null;
|
||||
},
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
const string = []const u8;
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const js_parser = bun.js_parser;
|
||||
const logger = bun.logger;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const E = js_ast.E;
|
||||
const Symbol = js_ast.Symbol;
|
||||
|
||||
const std = @import("std");
|
||||
const Map = std.AutoHashMapUnmanaged;
|
||||
|
||||
@@ -386,7 +386,7 @@ pub const Runner = struct {
|
||||
const result = Expr.init(
|
||||
E.Array,
|
||||
E.Array{
|
||||
.items = ExprNodeList.init(&[_]Expr{}),
|
||||
.items = ExprNodeList.empty,
|
||||
.was_originally_macro = true,
|
||||
},
|
||||
this.caller.loc,
|
||||
@@ -398,7 +398,7 @@ pub const Runner = struct {
|
||||
var out = Expr.init(
|
||||
E.Array,
|
||||
E.Array{
|
||||
.items = ExprNodeList.init(array[0..0]),
|
||||
.items = ExprNodeList.empty,
|
||||
.was_originally_macro = true,
|
||||
},
|
||||
this.caller.loc,
|
||||
@@ -413,7 +413,7 @@ pub const Runner = struct {
|
||||
continue;
|
||||
i += 1;
|
||||
}
|
||||
out.data.e_array.items = ExprNodeList.init(array);
|
||||
out.data.e_array.items = ExprNodeList.fromOwnedSlice(array);
|
||||
_entry.value_ptr.* = out;
|
||||
return out;
|
||||
},
|
||||
@@ -438,27 +438,37 @@ pub const Runner = struct {
|
||||
.include_value = true,
|
||||
}).init(this.global, obj);
|
||||
defer object_iter.deinit();
|
||||
var properties = this.allocator.alloc(G.Property, object_iter.len) catch unreachable;
|
||||
errdefer this.allocator.free(properties);
|
||||
var out = Expr.init(
|
||||
|
||||
const out = _entry.value_ptr;
|
||||
out.* = Expr.init(
|
||||
E.Object,
|
||||
E.Object{
|
||||
.properties = BabyList(G.Property).init(properties),
|
||||
.properties = bun.handleOom(
|
||||
G.Property.List.initCapacity(this.allocator, object_iter.len),
|
||||
),
|
||||
.was_originally_macro = true,
|
||||
},
|
||||
this.caller.loc,
|
||||
);
|
||||
_entry.value_ptr.* = out;
|
||||
const properties = &out.data.e_object.properties;
|
||||
errdefer properties.clearAndFree(this.allocator);
|
||||
|
||||
while (try object_iter.next()) |prop| {
|
||||
properties[object_iter.i] = G.Property{
|
||||
.key = Expr.init(E.String, E.String.init(prop.toOwnedSlice(this.allocator) catch unreachable), this.caller.loc),
|
||||
bun.assertf(
|
||||
object_iter.i == properties.len,
|
||||
"`properties` unexpectedly modified (length {d}, expected {d})",
|
||||
.{ properties.len, object_iter.i },
|
||||
);
|
||||
properties.appendAssumeCapacity(G.Property{
|
||||
.key = Expr.init(
|
||||
E.String,
|
||||
E.String.init(prop.toOwnedSlice(this.allocator) catch unreachable),
|
||||
this.caller.loc,
|
||||
),
|
||||
.value = try this.run(object_iter.value),
|
||||
};
|
||||
});
|
||||
}
|
||||
out.data.e_object.properties = BabyList(G.Property).init(properties[0..object_iter.i]);
|
||||
_entry.value_ptr.* = out;
|
||||
return out;
|
||||
return out.*;
|
||||
},
|
||||
|
||||
.JSON => {
|
||||
@@ -644,7 +654,6 @@ const Resolver = @import("../resolver/resolver.zig").Resolver;
|
||||
const isPackagePath = @import("../resolver/resolver.zig").isPackagePath;
|
||||
|
||||
const bun = @import("bun");
|
||||
const BabyList = bun.BabyList;
|
||||
const Environment = bun.Environment;
|
||||
const Output = bun.Output;
|
||||
const Transpiler = bun.Transpiler;
|
||||
|
||||
184
src/ast/P.zig
184
src/ast/P.zig
@@ -536,7 +536,7 @@ pub fn NewParser_(
|
||||
|
||||
return p.newExpr(E.Call{
|
||||
.target = require_resolve_ref,
|
||||
.args = ExprNodeList.init(args),
|
||||
.args = ExprNodeList.fromOwnedSlice(args),
|
||||
}, arg.loc);
|
||||
}
|
||||
|
||||
@@ -570,7 +570,7 @@ pub fn NewParser_(
|
||||
return p.newExpr(
|
||||
E.Call{
|
||||
.target = p.valueForRequire(arg.loc),
|
||||
.args = ExprNodeList.init(args),
|
||||
.args = ExprNodeList.fromOwnedSlice(args),
|
||||
},
|
||||
arg.loc,
|
||||
);
|
||||
@@ -648,7 +648,7 @@ pub fn NewParser_(
|
||||
return p.newExpr(
|
||||
E.Call{
|
||||
.target = p.valueForRequire(arg.loc),
|
||||
.args = ExprNodeList.init(args),
|
||||
.args = ExprNodeList.fromOwnedSlice(args),
|
||||
},
|
||||
arg.loc,
|
||||
);
|
||||
@@ -955,7 +955,7 @@ pub fn NewParser_(
|
||||
.e_identifier => |ident| {
|
||||
// is this a require("something")
|
||||
if (strings.eqlComptime(p.loadNameFromRef(ident.ref), "require") and call.args.len == 1 and std.meta.activeTag(call.args.ptr[0].data) == .e_string) {
|
||||
_ = p.addImportRecord(.require, loc, call.args.first_().data.e_string.string(p.allocator) catch unreachable);
|
||||
_ = p.addImportRecord(.require, loc, call.args.at(0).data.e_string.string(p.allocator) catch unreachable);
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
@@ -971,7 +971,7 @@ pub fn NewParser_(
|
||||
.e_identifier => |ident| {
|
||||
// is this a require("something")
|
||||
if (strings.eqlComptime(p.loadNameFromRef(ident.ref), "require") and call.args.len == 1 and std.meta.activeTag(call.args.ptr[0].data) == .e_string) {
|
||||
_ = p.addImportRecord(.require, loc, call.args.first_().data.e_string.string(p.allocator) catch unreachable);
|
||||
_ = p.addImportRecord(.require, loc, call.args.at(0).data.e_string.string(p.allocator) catch unreachable);
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
@@ -1250,7 +1250,7 @@ pub fn NewParser_(
|
||||
.ref = namespace_ref,
|
||||
.is_top_level = true,
|
||||
});
|
||||
try p.module_scope.generated.push(allocator, namespace_ref);
|
||||
try p.module_scope.generated.append(allocator, namespace_ref);
|
||||
for (imports, clause_items) |alias, *clause_item| {
|
||||
const ref = symbols.get(alias) orelse unreachable;
|
||||
const alias_name = if (@TypeOf(symbols) == RuntimeImports) RuntimeImports.all[alias] else alias;
|
||||
@@ -1305,7 +1305,7 @@ pub fn NewParser_(
|
||||
parts.append(js_ast.Part{
|
||||
.stmts = stmts,
|
||||
.declared_symbols = declared_symbols,
|
||||
.import_record_indices = bun.BabyList(u32).init(import_records),
|
||||
.import_record_indices = bun.BabyList(u32).fromOwnedSlice(import_records),
|
||||
.tag = .runtime,
|
||||
}) catch unreachable;
|
||||
}
|
||||
@@ -1360,7 +1360,7 @@ pub fn NewParser_(
|
||||
.ref = namespace_ref,
|
||||
.is_top_level = true,
|
||||
});
|
||||
try p.module_scope.generated.push(allocator, namespace_ref);
|
||||
try p.module_scope.generated.append(allocator, namespace_ref);
|
||||
|
||||
for (clauses) |entry| {
|
||||
if (entry.enabled) {
|
||||
@@ -1374,7 +1374,7 @@ pub fn NewParser_(
|
||||
.name = LocRef{ .ref = entry.ref, .loc = logger.Loc{} },
|
||||
});
|
||||
declared_symbols.appendAssumeCapacity(.{ .ref = entry.ref, .is_top_level = true });
|
||||
try p.module_scope.generated.push(allocator, entry.ref);
|
||||
try p.module_scope.generated.append(allocator, entry.ref);
|
||||
try p.is_import_item.put(allocator, entry.ref, {});
|
||||
try p.named_imports.put(allocator, entry.ref, .{
|
||||
.alias = entry.name,
|
||||
@@ -2113,7 +2113,7 @@ pub fn NewParser_(
|
||||
//
|
||||
const hoisted_ref = p.newSymbol(.hoisted, symbol.original_name) catch unreachable;
|
||||
symbols = p.symbols.items;
|
||||
scope.generated.push(p.allocator, hoisted_ref) catch unreachable;
|
||||
bun.handleOom(scope.generated.append(p.allocator, hoisted_ref));
|
||||
p.hoisted_ref_for_sloppy_mode_block_fn.put(p.allocator, value.ref, hoisted_ref) catch unreachable;
|
||||
value.ref = hoisted_ref;
|
||||
symbol = &symbols[hoisted_ref.innerIndex()];
|
||||
@@ -2258,7 +2258,7 @@ pub fn NewParser_(
|
||||
.generated = .{},
|
||||
};
|
||||
|
||||
try parent.children.push(allocator, scope);
|
||||
try parent.children.append(allocator, scope);
|
||||
scope.strict_mode = parent.strict_mode;
|
||||
|
||||
p.current_scope = scope;
|
||||
@@ -2569,7 +2569,7 @@ pub fn NewParser_(
|
||||
const name = try strings.append(p.allocator, "import_", try path_name.nonUniqueNameString(p.allocator));
|
||||
stmt.namespace_ref = try p.newSymbol(.other, name);
|
||||
var scope: *Scope = p.current_scope;
|
||||
try scope.generated.push(p.allocator, stmt.namespace_ref);
|
||||
try scope.generated.append(p.allocator, stmt.namespace_ref);
|
||||
}
|
||||
|
||||
var item_refs = ImportItemForNamespaceMap.init(p.allocator);
|
||||
@@ -2761,7 +2761,7 @@ pub fn NewParser_(
|
||||
|
||||
var scope = p.current_scope;
|
||||
|
||||
try scope.generated.push(p.allocator, name.ref.?);
|
||||
try scope.generated.append(p.allocator, name.ref.?);
|
||||
|
||||
return name;
|
||||
}
|
||||
@@ -3067,7 +3067,7 @@ pub fn NewParser_(
|
||||
// this module will be unable to reference this symbol. However, we must
|
||||
// still add the symbol to the scope so it gets minified (automatically-
|
||||
// generated code may still reference the symbol).
|
||||
try p.module_scope.generated.push(p.allocator, ref);
|
||||
try p.module_scope.generated.append(p.allocator, ref);
|
||||
return ref;
|
||||
}
|
||||
|
||||
@@ -3141,7 +3141,7 @@ pub fn NewParser_(
|
||||
entry.key_ptr.* = name;
|
||||
entry.value_ptr.* = js_ast.Scope.Member{ .ref = ref, .loc = loc };
|
||||
if (comptime is_generated) {
|
||||
try p.module_scope.generated.push(p.allocator, ref);
|
||||
try p.module_scope.generated.append(p.allocator, ref);
|
||||
}
|
||||
return ref;
|
||||
}
|
||||
@@ -3448,7 +3448,10 @@ pub fn NewParser_(
|
||||
decls[0] = Decl{
|
||||
.binding = p.b(B.Identifier{ .ref = ref }, local.loc),
|
||||
};
|
||||
try partStmts.append(p.s(S.Local{ .decls = G.Decl.List.init(decls) }, local.loc));
|
||||
try partStmts.append(p.s(
|
||||
S.Local{ .decls = G.Decl.List.fromOwnedSlice(decls) },
|
||||
local.loc,
|
||||
));
|
||||
try p.declared_symbols.append(p.allocator, .{ .ref = ref, .is_top_level = true });
|
||||
}
|
||||
}
|
||||
@@ -3463,7 +3466,7 @@ pub fn NewParser_(
|
||||
.symbol_uses = p.symbol_uses,
|
||||
.import_symbol_property_uses = p.import_symbol_property_uses,
|
||||
.declared_symbols = p.declared_symbols.toOwnedSlice(),
|
||||
.import_record_indices = bun.BabyList(u32).init(
|
||||
.import_record_indices = bun.BabyList(u32).fromOwnedSlice(
|
||||
p.import_records_for_current_part.toOwnedSlice(
|
||||
p.allocator,
|
||||
) catch unreachable,
|
||||
@@ -3975,7 +3978,7 @@ pub fn NewParser_(
|
||||
// checks are not yet handled correctly by bun or esbuild, so this possibility is
|
||||
// currently ignored.
|
||||
.un_typeof => {
|
||||
if (ex.value.data == .e_identifier) {
|
||||
if (ex.value.data == .e_identifier and ex.flags.was_originally_typeof_identifier) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -4014,6 +4017,18 @@ pub fn NewParser_(
|
||||
ex.right.data,
|
||||
) and
|
||||
p.exprCanBeRemovedIfUnusedWithoutDCECheck(&ex.left) and p.exprCanBeRemovedIfUnusedWithoutDCECheck(&ex.right),
|
||||
|
||||
// Special-case "<" and ">" with string, number, or bigint arguments
|
||||
.bin_lt, .bin_gt, .bin_le, .bin_ge => {
|
||||
const left = ex.left.knownPrimitive();
|
||||
const right = ex.right.knownPrimitive();
|
||||
switch (left) {
|
||||
.string, .number, .bigint => {
|
||||
return right == left and p.exprCanBeRemovedIfUnusedWithoutDCECheck(&ex.left) and p.exprCanBeRemovedIfUnusedWithoutDCECheck(&ex.right);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
},
|
||||
@@ -4234,13 +4249,14 @@ pub fn NewParser_(
|
||||
// return false;
|
||||
// }
|
||||
|
||||
fn isSideEffectFreeUnboundIdentifierRef(p: *P, value: Expr, guard_condition: Expr, is_yes_branch: bool) bool {
|
||||
fn isSideEffectFreeUnboundIdentifierRef(p: *P, value: Expr, guard_condition: Expr, is_yes_branch_: bool) bool {
|
||||
if (value.data != .e_identifier or
|
||||
p.symbols.items[value.data.e_identifier.ref.innerIndex()].kind != .unbound or
|
||||
guard_condition.data != .e_binary)
|
||||
return false;
|
||||
|
||||
const binary = guard_condition.data.e_binary.*;
|
||||
var is_yes_branch = is_yes_branch_;
|
||||
|
||||
switch (binary.op) {
|
||||
.bin_strict_eq, .bin_strict_ne, .bin_loose_eq, .bin_loose_ne => {
|
||||
@@ -4269,6 +4285,39 @@ pub fn NewParser_(
|
||||
(binary.op == .bin_strict_ne or binary.op == .bin_loose_ne)) and
|
||||
id.eql(id2);
|
||||
},
|
||||
.bin_lt, .bin_gt, .bin_le, .bin_ge => {
|
||||
// Pattern match for "typeof x < <string>"
|
||||
var typeof: Expr.Data = binary.left.data;
|
||||
var str: Expr.Data = binary.right.data;
|
||||
|
||||
// Check if order is flipped: 'u' >= typeof x
|
||||
if (typeof == .e_string) {
|
||||
typeof = binary.right.data;
|
||||
str = binary.left.data;
|
||||
is_yes_branch = !is_yes_branch;
|
||||
}
|
||||
|
||||
if (typeof == .e_unary and str == .e_string) {
|
||||
const unary = typeof.e_unary.*;
|
||||
if (unary.op == .un_typeof and
|
||||
unary.value.data == .e_identifier and
|
||||
unary.flags.was_originally_typeof_identifier and
|
||||
str.e_string.eqlComptime("u"))
|
||||
{
|
||||
// In "typeof x < 'u' ? x : null", the reference to "x" is side-effect free
|
||||
// In "typeof x > 'u' ? x : null", the reference to "x" is side-effect free
|
||||
if (is_yes_branch == (binary.op == .bin_lt or binary.op == .bin_le)) {
|
||||
const id = value.data.e_identifier.ref;
|
||||
const id2 = unary.value.data.e_identifier.ref;
|
||||
if (id.eql(id2)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
},
|
||||
else => return false,
|
||||
}
|
||||
}
|
||||
@@ -4297,7 +4346,7 @@ pub fn NewParser_(
|
||||
.ref = (p.declareGeneratedSymbol(.other, symbol_name) catch unreachable),
|
||||
};
|
||||
|
||||
p.module_scope.generated.push(p.allocator, loc_ref.ref.?) catch unreachable;
|
||||
bun.handleOom(p.module_scope.generated.append(p.allocator, loc_ref.ref.?));
|
||||
p.is_import_item.put(p.allocator, loc_ref.ref.?, {}) catch unreachable;
|
||||
@field(p.jsx_imports, @tagName(field)) = loc_ref;
|
||||
break :brk loc_ref.ref.?;
|
||||
@@ -4399,7 +4448,7 @@ pub fn NewParser_(
|
||||
var local = p.s(
|
||||
S.Local{
|
||||
.is_export = true,
|
||||
.decls = Decl.List.init(decls),
|
||||
.decls = Decl.List.fromOwnedSlice(decls),
|
||||
},
|
||||
loc,
|
||||
);
|
||||
@@ -4420,7 +4469,7 @@ pub fn NewParser_(
|
||||
var local = p.s(
|
||||
S.Local{
|
||||
.is_export = true,
|
||||
.decls = Decl.List.init(decls),
|
||||
.decls = Decl.List.fromOwnedSlice(decls),
|
||||
},
|
||||
loc,
|
||||
);
|
||||
@@ -4542,7 +4591,7 @@ pub fn NewParser_(
|
||||
stmts.append(
|
||||
p.s(S.Local{
|
||||
.kind = .k_var,
|
||||
.decls = G.Decl.List.init(decls),
|
||||
.decls = G.Decl.List.fromOwnedSlice(decls),
|
||||
.is_export = is_export,
|
||||
}, stmt_loc),
|
||||
) catch |err| bun.handleOom(err);
|
||||
@@ -4551,7 +4600,7 @@ pub fn NewParser_(
|
||||
stmts.append(
|
||||
p.s(S.Local{
|
||||
.kind = .k_let,
|
||||
.decls = G.Decl.List.init(decls),
|
||||
.decls = G.Decl.List.fromOwnedSlice(decls),
|
||||
}, stmt_loc),
|
||||
) catch |err| bun.handleOom(err);
|
||||
}
|
||||
@@ -4636,7 +4685,7 @@ pub fn NewParser_(
|
||||
const call = p.newExpr(
|
||||
E.Call{
|
||||
.target = target,
|
||||
.args = ExprNodeList.init(args_list),
|
||||
.args = ExprNodeList.fromOwnedSlice(args_list),
|
||||
// TODO: make these fully tree-shakable. this annotation
|
||||
// as-is is incorrect. This would be done by changing all
|
||||
// enum wrappers into `var Enum = ...` instead of two
|
||||
@@ -4691,18 +4740,16 @@ pub fn NewParser_(
|
||||
for (func.func.args, 0..) |arg, i| {
|
||||
for (arg.ts_decorators.ptr[0..arg.ts_decorators.len]) |arg_decorator| {
|
||||
var decorators = if (is_constructor)
|
||||
class.ts_decorators.listManaged(p.allocator)
|
||||
&class.ts_decorators
|
||||
else
|
||||
prop.ts_decorators.listManaged(p.allocator);
|
||||
&prop.ts_decorators;
|
||||
const args = p.allocator.alloc(Expr, 2) catch unreachable;
|
||||
args[0] = p.newExpr(E.Number{ .value = @as(f64, @floatFromInt(i)) }, arg_decorator.loc);
|
||||
args[1] = arg_decorator;
|
||||
decorators.append(p.callRuntime(arg_decorator.loc, "__legacyDecorateParamTS", args)) catch unreachable;
|
||||
if (is_constructor) {
|
||||
class.ts_decorators.update(decorators);
|
||||
} else {
|
||||
prop.ts_decorators.update(decorators);
|
||||
}
|
||||
decorators.append(
|
||||
p.allocator,
|
||||
p.callRuntime(arg_decorator.loc, "__legacyDecorateParamTS", args),
|
||||
) catch |err| bun.handleOom(err);
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -4732,7 +4779,7 @@ pub fn NewParser_(
|
||||
target = p.newExpr(E.Dot{ .target = p.newExpr(E.Identifier{ .ref = class.class_name.?.ref.? }, class.class_name.?.loc), .name = "prototype", .name_loc = loc }, loc);
|
||||
}
|
||||
|
||||
var array = prop.ts_decorators.listManaged(p.allocator);
|
||||
var array: std.ArrayList(Expr) = .init(p.allocator);
|
||||
|
||||
if (p.options.features.emit_decorator_metadata) {
|
||||
switch (prop.kind) {
|
||||
@@ -4757,7 +4804,7 @@ pub fn NewParser_(
|
||||
entry.* = p.serializeMetadata(method_arg.ts_metadata) catch unreachable;
|
||||
}
|
||||
|
||||
args[1] = p.newExpr(E.Array{ .items = ExprNodeList.init(args_array) }, logger.Loc.Empty);
|
||||
args[1] = p.newExpr(E.Array{ .items = ExprNodeList.fromOwnedSlice(args_array) }, logger.Loc.Empty);
|
||||
|
||||
array.append(p.callRuntime(loc, "__legacyMetadataTS", args)) catch unreachable;
|
||||
}
|
||||
@@ -4782,7 +4829,7 @@ pub fn NewParser_(
|
||||
{
|
||||
var args = p.allocator.alloc(Expr, 2) catch unreachable;
|
||||
args[0] = p.newExpr(E.String{ .data = "design:paramtypes" }, logger.Loc.Empty);
|
||||
args[1] = p.newExpr(E.Array{ .items = ExprNodeList.init(&[_]Expr{}) }, logger.Loc.Empty);
|
||||
args[1] = p.newExpr(E.Array{ .items = ExprNodeList.empty }, logger.Loc.Empty);
|
||||
array.append(p.callRuntime(loc, "__legacyMetadataTS", args)) catch unreachable;
|
||||
}
|
||||
}
|
||||
@@ -4802,7 +4849,7 @@ pub fn NewParser_(
|
||||
entry.* = p.serializeMetadata(method_arg.ts_metadata) catch unreachable;
|
||||
}
|
||||
|
||||
args[1] = p.newExpr(E.Array{ .items = ExprNodeList.init(args_array) }, logger.Loc.Empty);
|
||||
args[1] = p.newExpr(E.Array{ .items = ExprNodeList.fromOwnedSlice(args_array) }, logger.Loc.Empty);
|
||||
|
||||
array.append(p.callRuntime(loc, "__legacyMetadataTS", args)) catch unreachable;
|
||||
}
|
||||
@@ -4819,8 +4866,9 @@ pub fn NewParser_(
|
||||
}
|
||||
}
|
||||
|
||||
bun.handleOom(array.insertSlice(0, prop.ts_decorators.slice()));
|
||||
const args = p.allocator.alloc(Expr, 4) catch unreachable;
|
||||
args[0] = p.newExpr(E.Array{ .items = ExprNodeList.init(array.items) }, loc);
|
||||
args[0] = p.newExpr(E.Array{ .items = ExprNodeList.moveFromList(&array) }, loc);
|
||||
args[1] = target;
|
||||
args[2] = descriptor_key;
|
||||
args[3] = descriptor_kind;
|
||||
@@ -4882,10 +4930,10 @@ pub fn NewParser_(
|
||||
if (class.extends != null) {
|
||||
const target = p.newExpr(E.Super{}, stmt.loc);
|
||||
const arguments_ref = p.newSymbol(.unbound, arguments_str) catch unreachable;
|
||||
p.current_scope.generated.push(p.allocator, arguments_ref) catch unreachable;
|
||||
bun.handleOom(p.current_scope.generated.append(p.allocator, arguments_ref));
|
||||
|
||||
const super = p.newExpr(E.Spread{ .value = p.newExpr(E.Identifier{ .ref = arguments_ref }, stmt.loc) }, stmt.loc);
|
||||
const args = ExprNodeList.one(p.allocator, super) catch unreachable;
|
||||
const args = bun.handleOom(ExprNodeList.initOne(p.allocator, super));
|
||||
|
||||
constructor_stmts.append(p.s(S.SExpr{ .value = p.newExpr(E.Call{ .target = target, .args = args }, stmt.loc) }, stmt.loc)) catch unreachable;
|
||||
}
|
||||
@@ -4933,7 +4981,7 @@ pub fn NewParser_(
|
||||
stmts.appendSliceAssumeCapacity(instance_decorators.items);
|
||||
stmts.appendSliceAssumeCapacity(static_decorators.items);
|
||||
if (class.ts_decorators.len > 0) {
|
||||
var array = class.ts_decorators.listManaged(p.allocator);
|
||||
var array = class.ts_decorators.moveToListManaged(p.allocator);
|
||||
|
||||
if (p.options.features.emit_decorator_metadata) {
|
||||
if (constructor_function != null) {
|
||||
@@ -4949,9 +4997,9 @@ pub fn NewParser_(
|
||||
param_array[i] = p.serializeMetadata(constructor_arg.ts_metadata) catch unreachable;
|
||||
}
|
||||
|
||||
args[1] = p.newExpr(E.Array{ .items = ExprNodeList.init(param_array) }, logger.Loc.Empty);
|
||||
args[1] = p.newExpr(E.Array{ .items = ExprNodeList.fromOwnedSlice(param_array) }, logger.Loc.Empty);
|
||||
} else {
|
||||
args[1] = p.newExpr(E.Array{ .items = ExprNodeList.init(&[_]Expr{}) }, logger.Loc.Empty);
|
||||
args[1] = p.newExpr(E.Array{ .items = ExprNodeList.empty }, logger.Loc.Empty);
|
||||
}
|
||||
|
||||
array.append(p.callRuntime(stmt.loc, "__legacyMetadataTS", args)) catch unreachable;
|
||||
@@ -4959,7 +5007,7 @@ pub fn NewParser_(
|
||||
}
|
||||
|
||||
const args = p.allocator.alloc(Expr, 2) catch unreachable;
|
||||
args[0] = p.newExpr(E.Array{ .items = ExprNodeList.init(array.items) }, stmt.loc);
|
||||
args[0] = p.newExpr(E.Array{ .items = ExprNodeList.fromOwnedSlice(array.items) }, stmt.loc);
|
||||
args[1] = p.newExpr(E.Identifier{ .ref = class.class_name.?.ref.? }, class.class_name.?.loc);
|
||||
|
||||
stmts.appendAssumeCapacity(Stmt.assign(
|
||||
@@ -5369,7 +5417,7 @@ pub fn NewParser_(
|
||||
name,
|
||||
loc_ref.ref.?,
|
||||
);
|
||||
p.module_scope.generated.push(p.allocator, loc_ref.ref.?) catch unreachable;
|
||||
bun.handleOom(p.module_scope.generated.append(p.allocator, loc_ref.ref.?));
|
||||
return loc_ref.ref.?;
|
||||
}
|
||||
} else {
|
||||
@@ -5393,7 +5441,7 @@ pub fn NewParser_(
|
||||
return p.newExpr(
|
||||
E.Call{
|
||||
.target = p.runtimeIdentifier(loc, name),
|
||||
.args = ExprNodeList.init(args),
|
||||
.args = ExprNodeList.fromOwnedSlice(args),
|
||||
},
|
||||
loc,
|
||||
);
|
||||
@@ -5453,7 +5501,7 @@ pub fn NewParser_(
|
||||
|
||||
for (to_flatten.children.slice()) |item| {
|
||||
item.parent = parent;
|
||||
parent.children.push(p.allocator, item) catch unreachable;
|
||||
bun.handleOom(parent.children.append(p.allocator, item));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5474,7 +5522,7 @@ pub fn NewParser_(
|
||||
.ref = ref,
|
||||
}) catch |err| bun.handleOom(err);
|
||||
|
||||
bun.handleOom(scope.generated.append(p.allocator, &.{ref}));
|
||||
bun.handleOom(scope.generated.append(p.allocator, ref));
|
||||
|
||||
return ref;
|
||||
}
|
||||
@@ -5664,7 +5712,7 @@ pub fn NewParser_(
|
||||
}
|
||||
|
||||
const is_top_level = scope == p.module_scope;
|
||||
scope.generated.append(p.allocator, &.{
|
||||
scope.generated.appendSlice(p.allocator, &.{
|
||||
ctx.stack_ref,
|
||||
caught_ref,
|
||||
err_ref,
|
||||
@@ -5704,7 +5752,7 @@ pub fn NewParser_(
|
||||
const finally_stmts = finally: {
|
||||
if (ctx.has_await_using) {
|
||||
const promise_ref = p.generateTempRef("_promise");
|
||||
bun.handleOom(scope.generated.append(p.allocator, &.{promise_ref}));
|
||||
bun.handleOom(scope.generated.append(p.allocator, promise_ref));
|
||||
p.declared_symbols.appendAssumeCapacity(.{ .is_top_level = is_top_level, .ref = promise_ref });
|
||||
|
||||
const promise_ref_expr = p.newExpr(E.Identifier{ .ref = promise_ref }, loc);
|
||||
@@ -5722,7 +5770,7 @@ pub fn NewParser_(
|
||||
.binding = p.b(B.Identifier{ .ref = promise_ref }, loc),
|
||||
.value = call_dispose,
|
||||
};
|
||||
break :decls G.Decl.List.init(decls);
|
||||
break :decls G.Decl.List.fromOwnedSlice(decls);
|
||||
},
|
||||
}, loc);
|
||||
|
||||
@@ -5758,7 +5806,7 @@ pub fn NewParser_(
|
||||
.binding = p.b(B.Identifier{ .ref = ctx.stack_ref }, loc),
|
||||
.value = p.newExpr(E.Array{}, loc),
|
||||
};
|
||||
break :decls G.Decl.List.init(decls);
|
||||
break :decls G.Decl.List.fromOwnedSlice(decls);
|
||||
},
|
||||
.kind = .k_let,
|
||||
}, loc));
|
||||
@@ -5780,7 +5828,7 @@ pub fn NewParser_(
|
||||
.binding = p.b(B.Identifier{ .ref = has_err_ref }, loc),
|
||||
.value = p.newExpr(E.Number{ .value = 1 }, loc),
|
||||
};
|
||||
break :decls G.Decl.List.init(decls);
|
||||
break :decls G.Decl.List.fromOwnedSlice(decls);
|
||||
},
|
||||
}, loc);
|
||||
break :catch_body statements;
|
||||
@@ -6057,7 +6105,7 @@ pub fn NewParser_(
|
||||
.body = .{
|
||||
.stmts = p.allocator.dupe(Stmt, &.{
|
||||
p.s(S.Return{ .value = p.newExpr(E.Array{
|
||||
.items = ExprNodeList.init(ctx.user_hooks.values()),
|
||||
.items = ExprNodeList.fromBorrowedSliceDangerous(ctx.user_hooks.values()),
|
||||
}, loc) }, loc),
|
||||
}) catch |err| bun.handleOom(err),
|
||||
.loc = loc,
|
||||
@@ -6069,7 +6117,7 @@ pub fn NewParser_(
|
||||
// _s(func, "<hash>", force, () => [useCustom])
|
||||
return p.newExpr(E.Call{
|
||||
.target = Expr.initIdentifier(ctx.signature_cb, loc),
|
||||
.args = ExprNodeList.init(args),
|
||||
.args = ExprNodeList.fromOwnedSlice(args),
|
||||
}, loc);
|
||||
}
|
||||
|
||||
@@ -6150,11 +6198,14 @@ pub fn NewParser_(
|
||||
}
|
||||
|
||||
if (part.import_record_indices.len == 0) {
|
||||
part.import_record_indices = @TypeOf(part.import_record_indices).init(
|
||||
(p.import_records_for_current_part.clone(p.allocator) catch unreachable).items,
|
||||
);
|
||||
part.import_record_indices = .fromOwnedSlice(bun.handleOom(
|
||||
p.allocator.dupe(u32, p.import_records_for_current_part.items),
|
||||
));
|
||||
} else {
|
||||
part.import_record_indices.append(p.allocator, p.import_records_for_current_part.items) catch unreachable;
|
||||
part.import_record_indices.appendSlice(
|
||||
p.allocator,
|
||||
p.import_records_for_current_part.items,
|
||||
) catch |err| bun.handleOom(err);
|
||||
}
|
||||
|
||||
parts.items[parts_end] = part;
|
||||
@@ -6295,7 +6346,7 @@ pub fn NewParser_(
|
||||
entry.value_ptr.* = .{};
|
||||
}
|
||||
|
||||
entry.value_ptr.push(ctx.allocator, @as(u32, @truncate(ctx.part_index))) catch unreachable;
|
||||
bun.handleOom(entry.value_ptr.append(ctx.allocator, @as(u32, @truncate(ctx.part_index))));
|
||||
}
|
||||
};
|
||||
|
||||
@@ -6321,7 +6372,7 @@ pub fn NewParser_(
|
||||
entry.value_ptr.* = .{};
|
||||
}
|
||||
|
||||
entry.value_ptr.push(p.allocator, js_ast.namespace_export_part_index) catch unreachable;
|
||||
bun.handleOom(entry.value_ptr.append(p.allocator, js_ast.namespace_export_part_index));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6344,17 +6395,12 @@ pub fn NewParser_(
|
||||
break :brk Ref.None;
|
||||
};
|
||||
|
||||
const parts_list = bun.BabyList(js_ast.Part).fromList(parts);
|
||||
|
||||
return .{
|
||||
.runtime_imports = p.runtime_imports,
|
||||
.parts = parts_list,
|
||||
.module_scope = p.module_scope.*,
|
||||
.symbols = js_ast.Symbol.List.fromList(p.symbols),
|
||||
.exports_ref = p.exports_ref,
|
||||
.wrapper_ref = wrapper_ref,
|
||||
.module_ref = p.module_ref,
|
||||
.import_records = ImportRecord.List.fromList(p.import_records),
|
||||
.export_star_import_records = p.export_star_import_records.items,
|
||||
.approximate_newline_count = p.lexer.approximate_newline_count,
|
||||
.exports_kind = exports_kind,
|
||||
@@ -6394,12 +6440,14 @@ pub fn NewParser_(
|
||||
.has_commonjs_export_names = p.has_commonjs_export_names,
|
||||
|
||||
.hashbang = hashbang,
|
||||
|
||||
// TODO: cross-module constant inlining
|
||||
// .const_values = p.const_values,
|
||||
.ts_enums = try p.computeTsEnumsMap(allocator),
|
||||
|
||||
.import_meta_ref = p.import_meta_ref,
|
||||
|
||||
.symbols = js_ast.Symbol.List.moveFromList(&p.symbols),
|
||||
.parts = bun.BabyList(js_ast.Part).moveFromList(parts),
|
||||
.import_records = ImportRecord.List.moveFromList(&p.import_records),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -188,7 +188,7 @@ pub const Parser = struct {
|
||||
// in the `symbols` array.
|
||||
bun.assert(p.symbols.items.len == 0);
|
||||
var symbols_ = symbols;
|
||||
p.symbols = symbols_.listManaged(p.allocator);
|
||||
p.symbols = symbols_.moveToListManaged(p.allocator);
|
||||
|
||||
try p.prepareForVisitPass();
|
||||
|
||||
@@ -550,10 +550,7 @@ pub const Parser = struct {
|
||||
var sliced = try ListManaged(Stmt).initCapacity(p.allocator, 1);
|
||||
sliced.items.len = 1;
|
||||
var _local = local.*;
|
||||
var list = try ListManaged(G.Decl).initCapacity(p.allocator, 1);
|
||||
list.items.len = 1;
|
||||
list.items[0] = decl;
|
||||
_local.decls.update(list);
|
||||
_local.decls = try .initOne(p.allocator, decl);
|
||||
sliced.items[0] = p.s(_local, stmt.loc);
|
||||
try p.appendPart(&parts, sliced.items);
|
||||
}
|
||||
@@ -686,7 +683,7 @@ pub const Parser = struct {
|
||||
var part_stmts = p.allocator.alloc(Stmt, 1) catch unreachable;
|
||||
part_stmts[0] = p.s(S.Local{
|
||||
.kind = .k_var,
|
||||
.decls = Decl.List.init(decls),
|
||||
.decls = Decl.List.fromOwnedSlice(decls),
|
||||
}, logger.Loc.Empty);
|
||||
before.append(js_ast.Part{
|
||||
.stmts = part_stmts,
|
||||
@@ -713,7 +710,7 @@ pub const Parser = struct {
|
||||
var import_part_stmts = remaining_stmts[0..1];
|
||||
remaining_stmts = remaining_stmts[1..];
|
||||
|
||||
bun.handleOom(p.module_scope.generated.push(p.allocator, deferred_import.namespace.ref.?));
|
||||
bun.handleOom(p.module_scope.generated.append(p.allocator, deferred_import.namespace.ref.?));
|
||||
|
||||
import_part_stmts[0] = Stmt.alloc(
|
||||
S.Import,
|
||||
@@ -835,7 +832,7 @@ pub const Parser = struct {
|
||||
part.symbol_uses = .{};
|
||||
return js_ast.Result{
|
||||
.ast = js_ast.Ast{
|
||||
.import_records = ImportRecord.List.init(p.import_records.items),
|
||||
.import_records = ImportRecord.List.moveFromList(&p.import_records),
|
||||
.redirect_import_record_index = id,
|
||||
.named_imports = p.named_imports,
|
||||
.named_exports = p.named_exports,
|
||||
@@ -905,7 +902,10 @@ pub const Parser = struct {
|
||||
break :brk new_stmts.items;
|
||||
};
|
||||
|
||||
part.import_record_indices.push(p.allocator, right.data.e_require_string.import_record_index) catch unreachable;
|
||||
part.import_record_indices.append(
|
||||
p.allocator,
|
||||
right.data.e_require_string.import_record_index,
|
||||
) catch |err| bun.handleOom(err);
|
||||
p.symbols.items[p.module_ref.innerIndex()].use_count_estimate = 0;
|
||||
p.symbols.items[namespace_ref.innerIndex()].use_count_estimate -|= 1;
|
||||
_ = part.symbol_uses.swapRemove(namespace_ref);
|
||||
@@ -1165,7 +1165,7 @@ pub const Parser = struct {
|
||||
var part_stmts = p.allocator.alloc(Stmt, 1) catch unreachable;
|
||||
part_stmts[0] = p.s(S.Local{
|
||||
.kind = .k_var,
|
||||
.decls = Decl.List.init(decls),
|
||||
.decls = Decl.List.fromOwnedSlice(decls),
|
||||
}, logger.Loc.Empty);
|
||||
before.append(js_ast.Part{
|
||||
.stmts = part_stmts,
|
||||
@@ -1245,7 +1245,7 @@ pub const Parser = struct {
|
||||
before.append(js_ast.Part{
|
||||
.stmts = part_stmts,
|
||||
.declared_symbols = declared_symbols,
|
||||
.import_record_indices = bun.BabyList(u32).init(import_record_indices),
|
||||
.import_record_indices = bun.BabyList(u32).fromOwnedSlice(import_record_indices),
|
||||
.tag = .bun_test,
|
||||
}) catch unreachable;
|
||||
|
||||
|
||||
@@ -153,7 +153,7 @@ pub const SideEffects = enum(u1) {
|
||||
// "typeof x" must not be transformed into if "x" since doing so could
|
||||
// cause an exception to be thrown. Instead we can just remove it since
|
||||
// "typeof x" is special-cased in the standard to never throw.
|
||||
if (std.meta.activeTag(un.value.data) == .e_identifier) {
|
||||
if (un.value.data == .e_identifier and un.flags.was_originally_typeof_identifier) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -199,6 +199,10 @@ pub const SideEffects = enum(u1) {
|
||||
// "toString" and/or "valueOf" to be called.
|
||||
.bin_loose_eq,
|
||||
.bin_loose_ne,
|
||||
.bin_lt,
|
||||
.bin_gt,
|
||||
.bin_le,
|
||||
.bin_ge,
|
||||
=> {
|
||||
if (isPrimitiveWithSideEffects(bin.left.data) and isPrimitiveWithSideEffects(bin.right.data)) {
|
||||
return Expr.joinWithComma(
|
||||
@@ -207,13 +211,23 @@ pub const SideEffects = enum(u1) {
|
||||
p.allocator,
|
||||
);
|
||||
}
|
||||
// If one side is a number, the number can be printed as
|
||||
// `0` since the result being unused doesnt matter, we
|
||||
// only care to invoke the coercion.
|
||||
if (bin.left.data == .e_number) {
|
||||
bin.left.data = .{ .e_number = .{ .value = 0.0 } };
|
||||
} else if (bin.right.data == .e_number) {
|
||||
bin.right.data = .{ .e_number = .{ .value = 0.0 } };
|
||||
|
||||
switch (bin.op) {
|
||||
.bin_loose_eq,
|
||||
.bin_loose_ne,
|
||||
=> {
|
||||
// If one side is a number and the other side is a known primitive with side effects,
|
||||
// the number can be printed as `0` since the result being unused doesn't matter,
|
||||
// we only care to invoke the coercion.
|
||||
// We only do this optimization if the other side is a known primitive with side effects
|
||||
// to avoid corrupting shared nodes when the other side is an undefined identifier
|
||||
if (bin.left.data == .e_number) {
|
||||
bin.left.data = .{ .e_number = .{ .value = 0.0 } };
|
||||
} else if (bin.right.data == .e_number) {
|
||||
bin.right.data = .{ .e_number = .{ .value = 0.0 } };
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
},
|
||||
|
||||
@@ -259,7 +273,8 @@ pub const SideEffects = enum(u1) {
|
||||
}
|
||||
|
||||
properties_slice = properties_slice[0..end];
|
||||
expr.data.e_object.properties = G.Property.List.init(properties_slice);
|
||||
expr.data.e_object.properties =
|
||||
G.Property.List.fromBorrowedSliceDangerous(properties_slice);
|
||||
return expr;
|
||||
}
|
||||
}
|
||||
@@ -297,16 +312,14 @@ pub const SideEffects = enum(u1) {
|
||||
for (items) |item| {
|
||||
if (item.data == .e_spread) {
|
||||
var end: usize = 0;
|
||||
for (items) |item__| {
|
||||
const item_ = item__;
|
||||
for (items) |item_| {
|
||||
if (item_.data != .e_missing) {
|
||||
items[end] = item_;
|
||||
end += 1;
|
||||
}
|
||||
|
||||
expr.data.e_array.items = ExprNodeList.init(items[0..end]);
|
||||
return expr;
|
||||
}
|
||||
expr.data.e_array.items.shrinkRetainingCapacity(end);
|
||||
return expr;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -443,7 +456,7 @@ pub const SideEffects = enum(u1) {
|
||||
findIdentifiers(decl.binding, &decls);
|
||||
}
|
||||
|
||||
local.decls.update(decls);
|
||||
local.decls = .moveFromList(&decls);
|
||||
return true;
|
||||
},
|
||||
|
||||
@@ -875,7 +888,6 @@ const js_ast = bun.ast;
|
||||
const Binding = js_ast.Binding;
|
||||
const E = js_ast.E;
|
||||
const Expr = js_ast.Expr;
|
||||
const ExprNodeList = js_ast.ExprNodeList;
|
||||
const Stmt = js_ast.Stmt;
|
||||
|
||||
const G = js_ast.G;
|
||||
|
||||
@@ -412,7 +412,7 @@ pub const Map = struct {
|
||||
}
|
||||
|
||||
pub fn initWithOneList(list: List) Map {
|
||||
const baby_list = BabyList(List).init((&list)[0..1]);
|
||||
const baby_list = BabyList(List).fromBorrowedSliceDangerous((&list)[0..1]);
|
||||
return initList(baby_list);
|
||||
}
|
||||
|
||||
|
||||
@@ -68,7 +68,7 @@ pub fn AstMaybe(
|
||||
.loc = name_loc,
|
||||
.ref = p.newSymbol(.import, name) catch unreachable,
|
||||
};
|
||||
p.module_scope.generated.push(p.allocator, new_item.ref.?) catch unreachable;
|
||||
bun.handleOom(p.module_scope.generated.append(p.allocator, new_item.ref.?));
|
||||
|
||||
import_items.put(name, new_item) catch unreachable;
|
||||
p.is_import_item.put(p.allocator, new_item.ref.?, {}) catch unreachable;
|
||||
@@ -214,7 +214,7 @@ pub fn AstMaybe(
|
||||
.other,
|
||||
std.fmt.allocPrint(p.allocator, "${any}", .{bun.fmt.fmtIdentifier(key)}) catch unreachable,
|
||||
) catch unreachable;
|
||||
p.module_scope.generated.push(p.allocator, new_ref) catch unreachable;
|
||||
bun.handleOom(p.module_scope.generated.append(p.allocator, new_ref));
|
||||
named_export_entry.value_ptr.* = .{
|
||||
.loc_ref = LocRef{
|
||||
.loc = name_loc,
|
||||
@@ -320,7 +320,7 @@ pub fn AstMaybe(
|
||||
.other,
|
||||
std.fmt.allocPrint(p.allocator, "${any}", .{bun.fmt.fmtIdentifier(name)}) catch unreachable,
|
||||
) catch unreachable;
|
||||
p.module_scope.generated.push(p.allocator, new_ref) catch unreachable;
|
||||
bun.handleOom(p.module_scope.generated.append(p.allocator, new_ref));
|
||||
named_export_entry.value_ptr.* = .{
|
||||
.loc_ref = LocRef{
|
||||
.loc = name_loc,
|
||||
@@ -493,7 +493,7 @@ pub fn AstMaybe(
|
||||
.other,
|
||||
std.fmt.allocPrint(p.allocator, "${any}", .{bun.fmt.fmtIdentifier(name)}) catch unreachable,
|
||||
) catch unreachable;
|
||||
p.module_scope.generated.push(p.allocator, new_ref) catch unreachable;
|
||||
bun.handleOom(p.module_scope.generated.append(p.allocator, new_ref));
|
||||
named_export_entry.value_ptr.* = .{
|
||||
.loc_ref = LocRef{
|
||||
.loc = name_loc,
|
||||
@@ -650,6 +650,9 @@ pub fn AstMaybe(
|
||||
E.Unary{
|
||||
.op = .un_typeof,
|
||||
.value = expr,
|
||||
.flags = .{
|
||||
.was_originally_typeof_identifier = expr.data == .e_identifier,
|
||||
},
|
||||
},
|
||||
logger.Loc.Empty,
|
||||
),
|
||||
|
||||
@@ -200,7 +200,7 @@ pub fn Parse(
|
||||
.class_name = name,
|
||||
.extends = extends,
|
||||
.close_brace_loc = close_brace_loc,
|
||||
.ts_decorators = ExprNodeList.init(class_opts.ts_decorators),
|
||||
.ts_decorators = ExprNodeList.fromOwnedSlice(class_opts.ts_decorators),
|
||||
.class_keyword = class_keyword,
|
||||
.body_loc = body_loc,
|
||||
.properties = properties.items,
|
||||
@@ -283,7 +283,7 @@ pub fn Parse(
|
||||
}
|
||||
const close_paren_loc = p.lexer.loc();
|
||||
try p.lexer.expect(.t_close_paren);
|
||||
return ExprListLoc{ .list = ExprNodeList.fromList(args), .loc = close_paren_loc };
|
||||
return ExprListLoc{ .list = ExprNodeList.moveFromList(&args), .loc = close_paren_loc };
|
||||
}
|
||||
|
||||
pub fn parseJSXPropValueIdentifier(noalias p: *P, previous_string_with_backslash_loc: *logger.Loc) !Expr {
|
||||
@@ -474,7 +474,10 @@ pub fn Parse(
|
||||
if (opts.is_async) {
|
||||
p.logExprErrors(&errors);
|
||||
const async_expr = p.newExpr(E.Identifier{ .ref = try p.storeNameInRef("async") }, loc);
|
||||
return p.newExpr(E.Call{ .target = async_expr, .args = ExprNodeList.init(items) }, loc);
|
||||
return p.newExpr(E.Call{
|
||||
.target = async_expr,
|
||||
.args = ExprNodeList.fromOwnedSlice(items),
|
||||
}, loc);
|
||||
}
|
||||
|
||||
// Is this a chain of expressions and comma operators?
|
||||
@@ -621,16 +624,17 @@ pub fn Parse(
|
||||
try p.forbidLexicalDecl(token_range.loc);
|
||||
}
|
||||
|
||||
const decls = try p.parseAndDeclareDecls(.other, opts);
|
||||
var decls_list = try p.parseAndDeclareDecls(.other, opts);
|
||||
const decls: G.Decl.List = .moveFromList(&decls_list);
|
||||
return ExprOrLetStmt{
|
||||
.stmt_or_expr = js_ast.StmtOrExpr{
|
||||
.stmt = p.s(S.Local{
|
||||
.kind = .k_let,
|
||||
.decls = G.Decl.List.fromList(decls),
|
||||
.decls = decls,
|
||||
.is_export = opts.is_export,
|
||||
}, token_range.loc),
|
||||
},
|
||||
.decls = decls.items,
|
||||
.decls = decls.slice(),
|
||||
};
|
||||
}
|
||||
},
|
||||
@@ -650,19 +654,20 @@ pub fn Parse(
|
||||
}
|
||||
// p.markSyntaxFeature(.using, token_range.loc);
|
||||
opts.is_using_statement = true;
|
||||
const decls = try p.parseAndDeclareDecls(.constant, opts);
|
||||
var decls_list = try p.parseAndDeclareDecls(.constant, opts);
|
||||
const decls: G.Decl.List = .moveFromList(&decls_list);
|
||||
if (!opts.is_for_loop_init) {
|
||||
try p.requireInitializers(.k_using, decls.items);
|
||||
try p.requireInitializers(.k_using, decls.slice());
|
||||
}
|
||||
return ExprOrLetStmt{
|
||||
.stmt_or_expr = js_ast.StmtOrExpr{
|
||||
.stmt = p.s(S.Local{
|
||||
.kind = .k_using,
|
||||
.decls = G.Decl.List.fromList(decls),
|
||||
.decls = decls,
|
||||
.is_export = false,
|
||||
}, token_range.loc),
|
||||
},
|
||||
.decls = decls.items,
|
||||
.decls = decls.slice(),
|
||||
};
|
||||
}
|
||||
} else if (p.fn_or_arrow_data_parse.allow_await == .allow_expr and strings.eqlComptime(raw, "await")) {
|
||||
@@ -689,19 +694,20 @@ pub fn Parse(
|
||||
}
|
||||
// p.markSyntaxFeature(.using, using_range.loc);
|
||||
opts.is_using_statement = true;
|
||||
const decls = try p.parseAndDeclareDecls(.constant, opts);
|
||||
var decls_list = try p.parseAndDeclareDecls(.constant, opts);
|
||||
const decls: G.Decl.List = .moveFromList(&decls_list);
|
||||
if (!opts.is_for_loop_init) {
|
||||
try p.requireInitializers(.k_await_using, decls.items);
|
||||
try p.requireInitializers(.k_await_using, decls.slice());
|
||||
}
|
||||
return ExprOrLetStmt{
|
||||
.stmt_or_expr = js_ast.StmtOrExpr{
|
||||
.stmt = p.s(S.Local{
|
||||
.kind = .k_await_using,
|
||||
.decls = G.Decl.List.fromList(decls),
|
||||
.decls = decls,
|
||||
.is_export = false,
|
||||
}, token_range.loc),
|
||||
},
|
||||
.decls = decls.items,
|
||||
.decls = decls.slice(),
|
||||
};
|
||||
}
|
||||
break :value Expr{
|
||||
|
||||
@@ -281,7 +281,7 @@ pub fn ParseFn(
|
||||
}
|
||||
|
||||
args.append(p.allocator, G.Arg{
|
||||
.ts_decorators = ExprNodeList.init(ts_decorators),
|
||||
.ts_decorators = ExprNodeList.fromOwnedSlice(ts_decorators),
|
||||
.binding = arg,
|
||||
.default = default_value,
|
||||
|
||||
|
||||
@@ -148,7 +148,7 @@ pub fn ParseJSXElement(
|
||||
|
||||
const is_key_after_spread = key_prop_i > -1 and first_spread_prop_i > -1 and key_prop_i > first_spread_prop_i;
|
||||
flags.setPresent(.is_key_after_spread, is_key_after_spread);
|
||||
properties = G.Property.List.fromList(props);
|
||||
properties = G.Property.List.moveFromList(&props);
|
||||
if (is_key_after_spread and p.options.jsx.runtime == .automatic and !p.has_classic_runtime_warned) {
|
||||
try p.log.addWarning(p.source, spread_loc, "\"key\" prop after a {...spread} is deprecated in JSX. Falling back to classic runtime.");
|
||||
p.has_classic_runtime_warned = true;
|
||||
@@ -268,7 +268,7 @@ pub fn ParseJSXElement(
|
||||
|
||||
return p.newExpr(E.JSXElement{
|
||||
.tag = end_tag.data.asExpr(),
|
||||
.children = ExprNodeList.fromList(children),
|
||||
.children = ExprNodeList.moveFromList(&children),
|
||||
.properties = properties,
|
||||
.key_prop_index = key_prop_i,
|
||||
.flags = flags,
|
||||
|
||||
@@ -262,7 +262,16 @@ pub fn ParsePrefix(
|
||||
return error.SyntaxError;
|
||||
}
|
||||
|
||||
return p.newExpr(E.Unary{ .op = .un_typeof, .value = value }, loc);
|
||||
return p.newExpr(
|
||||
E.Unary{
|
||||
.op = .un_typeof,
|
||||
.value = value,
|
||||
.flags = .{
|
||||
.was_originally_typeof_identifier = value.data == .e_identifier,
|
||||
},
|
||||
},
|
||||
loc,
|
||||
);
|
||||
}
|
||||
fn t_delete(noalias p: *P) anyerror!Expr {
|
||||
const loc = p.lexer.loc();
|
||||
@@ -281,7 +290,14 @@ pub fn ParsePrefix(
|
||||
}
|
||||
}
|
||||
|
||||
return p.newExpr(E.Unary{ .op = .un_delete, .value = value }, loc);
|
||||
return p.newExpr(E.Unary{
|
||||
.op = .un_delete,
|
||||
.value = value,
|
||||
.flags = .{
|
||||
.was_originally_delete_of_identifier_or_property_access = value.data == .e_identifier or
|
||||
value.isPropertyAccess(),
|
||||
},
|
||||
}, loc);
|
||||
}
|
||||
fn t_plus(noalias p: *P) anyerror!Expr {
|
||||
const loc = p.lexer.loc();
|
||||
@@ -500,7 +516,7 @@ pub fn ParsePrefix(
|
||||
self_errors.mergeInto(errors.?);
|
||||
}
|
||||
return p.newExpr(E.Array{
|
||||
.items = ExprNodeList.fromList(items),
|
||||
.items = ExprNodeList.moveFromList(&items),
|
||||
.comma_after_spread = comma_after_spread.toNullable(),
|
||||
.is_single_line = is_single_line,
|
||||
.close_bracket_loc = close_bracket_loc,
|
||||
@@ -584,7 +600,7 @@ pub fn ParsePrefix(
|
||||
}
|
||||
|
||||
return p.newExpr(E.Object{
|
||||
.properties = G.Property.List.fromList(properties),
|
||||
.properties = G.Property.List.moveFromList(&properties),
|
||||
.comma_after_spread = if (comma_after_spread.start > 0)
|
||||
comma_after_spread
|
||||
else
|
||||
|
||||
@@ -119,7 +119,7 @@ pub fn ParseProperty(
|
||||
}
|
||||
|
||||
return G.Property{
|
||||
.ts_decorators = ExprNodeList.init(opts.ts_decorators),
|
||||
.ts_decorators = try ExprNodeList.fromSlice(p.allocator, opts.ts_decorators),
|
||||
.kind = kind,
|
||||
.flags = Flags.Property.init(.{
|
||||
.is_computed = is_computed,
|
||||
@@ -333,7 +333,7 @@ pub fn ParseProperty(
|
||||
) catch unreachable;
|
||||
|
||||
block.* = G.ClassStaticBlock{
|
||||
.stmts = js_ast.BabyList(Stmt).init(stmts),
|
||||
.stmts = js_ast.BabyList(Stmt).fromOwnedSlice(stmts),
|
||||
.loc = loc,
|
||||
};
|
||||
|
||||
@@ -506,7 +506,7 @@ pub fn ParseProperty(
|
||||
try p.lexer.expectOrInsertSemicolon();
|
||||
|
||||
return G.Property{
|
||||
.ts_decorators = ExprNodeList.init(opts.ts_decorators),
|
||||
.ts_decorators = try ExprNodeList.fromSlice(p.allocator, opts.ts_decorators),
|
||||
.kind = kind,
|
||||
.flags = Flags.Property.init(.{
|
||||
.is_computed = is_computed,
|
||||
|
||||
@@ -493,9 +493,13 @@ pub fn ParseStmt(
|
||||
}
|
||||
fn t_var(p: *P, opts: *ParseStatementOptions, loc: logger.Loc) anyerror!Stmt {
|
||||
try p.lexer.next();
|
||||
const decls = try p.parseAndDeclareDecls(.hoisted, opts);
|
||||
var decls = try p.parseAndDeclareDecls(.hoisted, opts);
|
||||
try p.lexer.expectOrInsertSemicolon();
|
||||
return p.s(S.Local{ .kind = .k_var, .decls = Decl.List.fromList(decls), .is_export = opts.is_export }, loc);
|
||||
return p.s(S.Local{
|
||||
.kind = .k_var,
|
||||
.decls = Decl.List.moveFromList(&decls),
|
||||
.is_export = opts.is_export,
|
||||
}, loc);
|
||||
}
|
||||
fn t_const(p: *P, opts: *ParseStatementOptions, loc: logger.Loc) anyerror!Stmt {
|
||||
if (opts.lexical_decl != .allow_all) {
|
||||
@@ -509,14 +513,18 @@ pub fn ParseStmt(
|
||||
return p.parseTypescriptEnumStmt(loc, opts);
|
||||
}
|
||||
|
||||
const decls = try p.parseAndDeclareDecls(.constant, opts);
|
||||
var decls = try p.parseAndDeclareDecls(.constant, opts);
|
||||
try p.lexer.expectOrInsertSemicolon();
|
||||
|
||||
if (!opts.is_typescript_declare) {
|
||||
try p.requireInitializers(.k_const, decls.items);
|
||||
}
|
||||
|
||||
return p.s(S.Local{ .kind = .k_const, .decls = Decl.List.fromList(decls), .is_export = opts.is_export }, loc);
|
||||
return p.s(S.Local{
|
||||
.kind = .k_const,
|
||||
.decls = Decl.List.moveFromList(&decls),
|
||||
.is_export = opts.is_export,
|
||||
}, loc);
|
||||
}
|
||||
fn t_if(p: *P, _: *ParseStatementOptions, loc: logger.Loc) anyerror!Stmt {
|
||||
var current_loc = loc;
|
||||
@@ -795,15 +803,17 @@ pub fn ParseStmt(
|
||||
is_var = true;
|
||||
try p.lexer.next();
|
||||
var stmtOpts = ParseStatementOptions{};
|
||||
decls.update(try p.parseAndDeclareDecls(.hoisted, &stmtOpts));
|
||||
init_ = p.s(S.Local{ .kind = .k_var, .decls = Decl.List.fromList(decls) }, init_loc);
|
||||
var decls_list = try p.parseAndDeclareDecls(.hoisted, &stmtOpts);
|
||||
decls = .moveFromList(&decls_list);
|
||||
init_ = p.s(S.Local{ .kind = .k_var, .decls = decls }, init_loc);
|
||||
},
|
||||
// for (const )
|
||||
.t_const => {
|
||||
try p.lexer.next();
|
||||
var stmtOpts = ParseStatementOptions{};
|
||||
decls.update(try p.parseAndDeclareDecls(.constant, &stmtOpts));
|
||||
init_ = p.s(S.Local{ .kind = .k_const, .decls = Decl.List.fromList(decls) }, init_loc);
|
||||
var decls_list = try p.parseAndDeclareDecls(.constant, &stmtOpts);
|
||||
decls = .moveFromList(&decls_list);
|
||||
init_ = p.s(S.Local{ .kind = .k_const, .decls = decls }, init_loc);
|
||||
},
|
||||
// for (;)
|
||||
.t_semicolon => {},
|
||||
@@ -1293,7 +1303,7 @@ pub fn ParseStmt(
|
||||
for (local.decls.slice()) |decl| {
|
||||
try extractDeclsForBinding(decl.binding, &_decls);
|
||||
}
|
||||
decls.update(_decls);
|
||||
decls = .moveFromList(&_decls);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
@@ -201,7 +201,7 @@ pub fn ParseTypescript(
|
||||
// run the renamer. For external-facing things the renamer will avoid
|
||||
// collisions automatically so this isn't important for correctness.
|
||||
arg_ref = p.newSymbol(.hoisted, strings.cat(p.allocator, "_", name_text) catch unreachable) catch unreachable;
|
||||
p.current_scope.generated.push(p.allocator, arg_ref) catch unreachable;
|
||||
bun.handleOom(p.current_scope.generated.append(p.allocator, arg_ref));
|
||||
} else {
|
||||
arg_ref = p.newSymbol(.hoisted, name_text) catch unreachable;
|
||||
}
|
||||
@@ -238,7 +238,7 @@ pub fn ParseTypescript(
|
||||
try p.lexer.expect(.t_string_literal);
|
||||
try p.lexer.expect(.t_close_paren);
|
||||
if (!opts.is_typescript_declare) {
|
||||
const args = try ExprNodeList.one(p.allocator, path);
|
||||
const args = try ExprNodeList.initOne(p.allocator, path);
|
||||
value = p.newExpr(E.Call{ .target = target, .close_paren_loc = p.lexer.loc(), .args = args }, loc);
|
||||
}
|
||||
} else {
|
||||
@@ -266,7 +266,12 @@ pub fn ParseTypescript(
|
||||
.binding = p.b(B.Identifier{ .ref = ref }, default_name_loc),
|
||||
.value = value,
|
||||
};
|
||||
return p.s(S.Local{ .kind = kind, .decls = Decl.List.init(decls), .is_export = opts.is_export, .was_ts_import_equals = true }, loc);
|
||||
return p.s(S.Local{
|
||||
.kind = kind,
|
||||
.decls = Decl.List.fromOwnedSlice(decls),
|
||||
.is_export = opts.is_export,
|
||||
.was_ts_import_equals = true,
|
||||
}, loc);
|
||||
}
|
||||
|
||||
pub fn parseTypescriptEnumStmt(p: *P, loc: logger.Loc, opts: *ParseStatementOptions) anyerror!Stmt {
|
||||
@@ -372,7 +377,7 @@ pub fn ParseTypescript(
|
||||
// run the renamer. For external-facing things the renamer will avoid
|
||||
// collisions automatically so this isn't important for correctness.
|
||||
arg_ref = p.newSymbol(.hoisted, strings.cat(p.allocator, "_", name_text) catch unreachable) catch unreachable;
|
||||
p.current_scope.generated.push(p.allocator, arg_ref) catch unreachable;
|
||||
bun.handleOom(p.current_scope.generated.append(p.allocator, arg_ref));
|
||||
} else {
|
||||
arg_ref = p.declareSymbol(.hoisted, name_loc, name_text) catch unreachable;
|
||||
}
|
||||
|
||||
@@ -567,9 +567,9 @@ pub fn Visit(
|
||||
// Make it an error to use "arguments" in a static class block
|
||||
p.current_scope.forbid_arguments = true;
|
||||
|
||||
var list = property.class_static_block.?.stmts.listManaged(p.allocator);
|
||||
var list = property.class_static_block.?.stmts.moveToListManaged(p.allocator);
|
||||
p.visitStmts(&list, .fn_body) catch unreachable;
|
||||
property.class_static_block.?.stmts = js_ast.BabyList(Stmt).fromList(list);
|
||||
property.class_static_block.?.stmts = js_ast.BabyList(Stmt).moveFromList(&list);
|
||||
p.popScope();
|
||||
|
||||
p.fn_or_arrow_data_visit = old_fn_or_arrow_data;
|
||||
@@ -912,12 +912,13 @@ pub fn Visit(
|
||||
before.ensureUnusedCapacity(@as(usize, @intFromBool(let_decls.items.len > 0)) + @as(usize, @intFromBool(var_decls.items.len > 0)) + non_fn_stmts.items.len) catch unreachable;
|
||||
|
||||
if (let_decls.items.len > 0) {
|
||||
const decls: Decl.List = .moveFromList(&let_decls);
|
||||
before.appendAssumeCapacity(p.s(
|
||||
S.Local{
|
||||
.kind = .k_let,
|
||||
.decls = Decl.List.fromList(let_decls),
|
||||
.decls = decls,
|
||||
},
|
||||
let_decls.items[0].value.?.loc,
|
||||
decls.at(0).value.?.loc,
|
||||
));
|
||||
}
|
||||
|
||||
@@ -928,12 +929,13 @@ pub fn Visit(
|
||||
before.appendAssumeCapacity(new);
|
||||
}
|
||||
} else {
|
||||
const decls: Decl.List = .moveFromList(&var_decls);
|
||||
before.appendAssumeCapacity(p.s(
|
||||
S.Local{
|
||||
.kind = .k_var,
|
||||
.decls = Decl.List.fromList(var_decls),
|
||||
.decls = decls,
|
||||
},
|
||||
var_decls.items[0].value.?.loc,
|
||||
decls.at(0).value.?.loc,
|
||||
));
|
||||
}
|
||||
}
|
||||
@@ -1166,7 +1168,10 @@ pub fn Visit(
|
||||
if (prev_stmt.data == .s_local and
|
||||
local.canMergeWith(prev_stmt.data.s_local))
|
||||
{
|
||||
prev_stmt.data.s_local.decls.append(p.allocator, local.decls.slice()) catch unreachable;
|
||||
prev_stmt.data.s_local.decls.appendSlice(
|
||||
p.allocator,
|
||||
local.decls.slice(),
|
||||
) catch |err| bun.handleOom(err);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user