mirror of
https://github.com/oven-sh/bun
synced 2026-02-07 09:28:51 +00:00
Compare commits
96 Commits
dylan/byte
...
pfg/proces
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
abf4905aa8 | ||
|
|
2c7b800af1 | ||
|
|
d3d68f45fd | ||
|
|
b2b1bc9ba8 | ||
|
|
f8aed4826b | ||
|
|
5102538fc3 | ||
|
|
3908cd9d16 | ||
|
|
45760cd53c | ||
|
|
716a2fbea0 | ||
|
|
1790d108e7 | ||
|
|
ef8408c797 | ||
|
|
987cab74d7 | ||
|
|
2eebcee522 | ||
|
|
d85207f179 | ||
|
|
661deb8eaf | ||
|
|
041f3e9df0 | ||
|
|
768748ec2d | ||
|
|
31202ec210 | ||
|
|
344a772ad5 | ||
|
|
dd9d1530da | ||
|
|
a09c45396e | ||
|
|
0351bd5f28 | ||
|
|
0ec153ee1c | ||
|
|
6397654e22 | ||
|
|
6bafe2602e | ||
|
|
9411c62756 | ||
|
|
8614b98f6b | ||
|
|
ecd23df4ca | ||
|
|
3d8139dc27 | ||
|
|
beea7180f3 | ||
|
|
8e786c1cfc | ||
|
|
9b97dd11e2 | ||
|
|
069a8d0b5d | ||
|
|
9907c2e9fa | ||
|
|
3976fd83ee | ||
|
|
2ac835f764 | ||
|
|
52b82cbe40 | ||
|
|
7ddb527573 | ||
|
|
bac13201ae | ||
|
|
0a3b9ce701 | ||
|
|
7d5f5ad772 | ||
|
|
a3d3d49c7f | ||
|
|
d9551dda1a | ||
|
|
ee7608f7cf | ||
|
|
e329316d44 | ||
|
|
9479bb8a5b | ||
|
|
88a0002f7e | ||
|
|
6e9d57a953 | ||
|
|
3b7d1f7be2 | ||
|
|
1f517499ef | ||
|
|
b3f5dd73da | ||
|
|
a37b858993 | ||
|
|
09c56c8ba8 | ||
|
|
6e3349b55c | ||
|
|
2162837416 | ||
|
|
b9f6a908f7 | ||
|
|
4b5551d230 | ||
|
|
e1505b7143 | ||
|
|
6611983038 | ||
|
|
d7ca10e22f | ||
|
|
dc3c8f79c4 | ||
|
|
3ee477fc5b | ||
|
|
25834afe9a | ||
|
|
7caaf434e9 | ||
|
|
edf13bd91d | ||
|
|
20dddd1819 | ||
|
|
8ec4c0abb3 | ||
|
|
ab45d20630 | ||
|
|
21841af612 | ||
|
|
98da9b943c | ||
|
|
6a1bc7d780 | ||
|
|
bdfdcebafb | ||
|
|
1e4935cf3e | ||
|
|
e63608fced | ||
|
|
d6c1b54289 | ||
|
|
594b03c275 | ||
|
|
18e4da1903 | ||
|
|
7a199276fb | ||
|
|
63c4d8f68f | ||
|
|
afcdd90b77 | ||
|
|
ae6ad1c04a | ||
|
|
301ec28a65 | ||
|
|
5b842ade1d | ||
|
|
cf947fee17 | ||
|
|
73f0594704 | ||
|
|
2daf7ed02e | ||
|
|
38e8fea828 | ||
|
|
536dc8653b | ||
|
|
a705dfc63a | ||
|
|
9fba9de0b5 | ||
|
|
6c3005e412 | ||
|
|
40b310c208 | ||
|
|
edb7214e6c | ||
|
|
48b0b7fe6d | ||
|
|
e0cbef0dce | ||
|
|
14832c5547 |
4
.github/CODEOWNERS
vendored
4
.github/CODEOWNERS
vendored
@@ -3,3 +3,7 @@
|
||||
|
||||
# Tests
|
||||
/test/expectations.txt @Jarred-Sumner
|
||||
|
||||
# Types
|
||||
*.d.ts @alii
|
||||
/packages/bun-types/ @alii
|
||||
|
||||
2
.github/actions/bump/action.yml
vendored
2
.github/actions/bump/action.yml
vendored
@@ -25,7 +25,7 @@ runs:
|
||||
echo "version=$LATEST" >> $GITHUB_OUTPUT
|
||||
echo "message=$MESSAGE" >> $GITHUB_OUTPUT
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
add-paths: |
|
||||
CMakeLists.txt
|
||||
|
||||
19
.github/workflows/auto-assign-types.yml
vendored
Normal file
19
.github/workflows/auto-assign-types.yml
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
name: Auto Assign Types Issues
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [labeled]
|
||||
|
||||
jobs:
|
||||
auto-assign:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.label.name == 'types'
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
- name: Assign to alii
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GH_REPO: ${{ github.repository }}
|
||||
run: |
|
||||
gh issue edit ${{ github.event.issue.number }} --add-assignee alii
|
||||
63
.github/workflows/labeled.yml
vendored
63
.github/workflows/labeled.yml
vendored
@@ -105,11 +105,16 @@ jobs:
|
||||
env:
|
||||
GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
|
||||
GITHUB_ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
GITHUB_ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||
shell: bash
|
||||
run: |
|
||||
LABELS=$(bun scripts/read-issue.ts)
|
||||
bun scripts/is-outdated.ts
|
||||
|
||||
# Check for patterns that should close the issue
|
||||
CLOSE_ACTION=$(bun scripts/handle-crash-patterns.ts)
|
||||
echo "close-action=$CLOSE_ACTION" >> $GITHUB_OUTPUT
|
||||
|
||||
if [[ -f "is-outdated.txt" ]]; then
|
||||
echo "is-outdated=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
@@ -118,6 +123,10 @@ jobs:
|
||||
echo "outdated=$(cat outdated.txt)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ -f "is-standalone.txt" ]]; then
|
||||
echo "is-standalone=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ -f "is-very-outdated.txt" ]]; then
|
||||
echo "is-very-outdated=true" >> $GITHUB_OUTPUT
|
||||
LABELS="$LABELS,old-version"
|
||||
@@ -127,9 +136,32 @@ jobs:
|
||||
|
||||
echo "latest=$(cat LATEST)" >> $GITHUB_OUTPUT
|
||||
echo "labels=$LABELS" >> $GITHUB_OUTPUT
|
||||
rm -rf is-outdated.txt outdated.txt latest.txt is-very-outdated.txt
|
||||
rm -rf is-outdated.txt outdated.txt latest.txt is-very-outdated.txt is-standalone.txt
|
||||
- name: Close issue if pattern detected
|
||||
if: github.event.label.name == 'crash' && fromJson(steps.add-labels.outputs.close-action).close == true
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const closeAction = JSON.parse('${{ steps.add-labels.outputs.close-action }}');
|
||||
|
||||
// Comment with the reason
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
body: closeAction.comment
|
||||
});
|
||||
|
||||
// Close the issue
|
||||
await github.rest.issues.update({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
state: 'closed',
|
||||
state_reason: closeAction.reason
|
||||
});
|
||||
- name: Generate comment text with Sentry Link
|
||||
if: github.event.label.name == 'crash'
|
||||
if: github.event.label.name == 'crash' && fromJson(steps.add-labels.outputs.close-action).close != true
|
||||
# ignore if fail
|
||||
continue-on-error: true
|
||||
id: generate-comment-text
|
||||
@@ -163,8 +195,17 @@ jobs:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
labels: ${{ steps.add-labels.outputs.labels }}
|
||||
- name: Comment outdated (standalone executable)
|
||||
if: steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone == 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
@${{ github.event.issue.user.login }}, the latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but the standalone executable is running Bun v${{ steps.add-labels.outputs.outdated }}. When the CLI using Bun's single-file executable next updates it might be fixed.
|
||||
- name: Comment outdated
|
||||
if: steps.add-labels.outputs.is-outdated == 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
|
||||
if: steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone != 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
@@ -178,8 +219,22 @@ jobs:
|
||||
```sh
|
||||
bun upgrade
|
||||
```
|
||||
- name: Comment with Sentry Link and outdated version (standalone executable)
|
||||
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone == 'true'
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
@${{ github.event.issue.user.login }}, thank you for reporting this crash. The latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but the standalone executable is running Bun v${{ steps.add-labels.outputs.outdated }}. When the CLI using Bun's single-file executable next updates it might be fixed.
|
||||
|
||||
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
|
||||
|
||||
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
|
||||
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
|
||||
- name: Comment with Sentry Link and outdated version
|
||||
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true'
|
||||
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone != 'true'
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
|
||||
89
.github/workflows/on-submodule-update.yml
vendored
89
.github/workflows/on-submodule-update.yml
vendored
@@ -1,89 +0,0 @@
|
||||
name: Comment on updated submodule
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
paths:
|
||||
- "src/generated_versions_list.zig"
|
||||
- ".github/workflows/on-submodule-update.yml"
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
name: Comment
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
issues: write
|
||||
steps:
|
||||
- name: Checkout current
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
src
|
||||
- name: Hash generated versions list
|
||||
id: hash
|
||||
run: |
|
||||
echo "hash=$(sha256sum src/generated_versions_list.zig | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
|
||||
- name: Checkout base
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.base_ref }}
|
||||
sparse-checkout: |
|
||||
src
|
||||
- name: Hash base
|
||||
id: base
|
||||
run: |
|
||||
echo "base=$(sha256sum src/generated_versions_list.zig | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
|
||||
- name: Compare
|
||||
id: compare
|
||||
run: |
|
||||
if [ "${{ steps.hash.outputs.hash }}" != "${{ steps.base.outputs.base }}" ]; then
|
||||
echo "changed=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "changed=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Find Comment
|
||||
id: comment
|
||||
uses: peter-evans/find-comment@v3
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: github-actions[bot]
|
||||
body-includes: <!-- generated-comment submodule-updated -->
|
||||
- name: Write Warning Comment
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
if: steps.compare.outputs.changed == 'true'
|
||||
with:
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
edit-mode: replace
|
||||
body: |
|
||||
⚠️ **Warning:** @${{ github.actor }}, this PR has changes to submodule versions.
|
||||
|
||||
If this change was intentional, please ignore this message. If not, please undo changes to submodules and rebase your branch.
|
||||
|
||||
<!-- generated-comment submodule-updated -->
|
||||
- name: Add labels
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: steps.compare.outputs.changed == 'true'
|
||||
with:
|
||||
actions: "add-labels"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
labels: "changed-submodules"
|
||||
- name: Remove labels
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: steps.compare.outputs.changed == 'false'
|
||||
with:
|
||||
actions: "remove-labels"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
labels: "changed-submodules"
|
||||
- name: Delete outdated comment
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: steps.compare.outputs.changed == 'false' && steps.comment.outputs.comment-id != ''
|
||||
with:
|
||||
actions: "delete-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
2
.github/workflows/update-cares.yml
vendored
2
.github/workflows/update-cares.yml
vendored
@@ -80,7 +80,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
4
.github/workflows/update-hdrhistogram.yml
vendored
4
.github/workflows/update-hdrhistogram.yml
vendored
@@ -55,7 +55,7 @@ jobs:
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
# Try to get commit SHA from tag object (for annotated tags)
|
||||
# If it fails, assume it's a lightweight tag pointing directly to commit
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/HdrHistogram/HdrHistogram_c/git/tags/$LATEST_TAG_SHA" 2>/dev/null | jq -r '.object.sha // empty')
|
||||
@@ -83,7 +83,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
4
.github/workflows/update-highway.yml
vendored
4
.github/workflows/update-highway.yml
vendored
@@ -58,7 +58,7 @@ jobs:
|
||||
|
||||
TAG_OBJECT_SHA=$(echo "$TAG_REF" | jq -r '.object.sha')
|
||||
TAG_OBJECT_TYPE=$(echo "$TAG_REF" | jq -r '.object.type')
|
||||
|
||||
|
||||
if [ -z "$TAG_OBJECT_SHA" ] || [ "$TAG_OBJECT_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
2
.github/workflows/update-libarchive.yml
vendored
2
.github/workflows/update-libarchive.yml
vendored
@@ -80,7 +80,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
2
.github/workflows/update-libdeflate.yml
vendored
2
.github/workflows/update-libdeflate.yml
vendored
@@ -80,7 +80,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
6
.github/workflows/update-lolhtml.yml
vendored
6
.github/workflows/update-lolhtml.yml
vendored
@@ -55,12 +55,12 @@ jobs:
|
||||
TAG_REF_RESPONSE=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/refs/tags/$LATEST_TAG")
|
||||
LATEST_TAG_SHA=$(echo "$TAG_REF_RESPONSE" | jq -r '.object.sha')
|
||||
TAG_OBJECT_TYPE=$(echo "$TAG_REF_RESPONSE" | jq -r '.object.type')
|
||||
|
||||
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
if [ "$TAG_OBJECT_TYPE" = "tag" ]; then
|
||||
# This is an annotated tag, we need to get the commit it points to
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
@@ -92,7 +92,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
4
.github/workflows/update-lshpack.yml
vendored
4
.github/workflows/update-lshpack.yml
vendored
@@ -59,7 +59,7 @@ jobs:
|
||||
|
||||
LATEST_TAG_SHA=$(echo "$TAG_REF" | jq -r '.object.sha')
|
||||
TAG_TYPE=$(echo "$TAG_REF" | jq -r '.object.type')
|
||||
|
||||
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
@@ -97,7 +97,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
2
.github/workflows/update-sqlite3.yml
vendored
2
.github/workflows/update-sqlite3.yml
vendored
@@ -91,7 +91,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current_num < steps.check-version.outputs.latest_num
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
79
.github/workflows/update-vendor.yml
vendored
Normal file
79
.github/workflows/update-vendor.yml
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
name: Update vendor
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 4 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
package:
|
||||
- elysia
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: oven-sh/setup-bun@v2
|
||||
|
||||
- name: Check version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
current=$(bun -p '(await Bun.file("test/vendor.json").json()).filter(v=>v.package===process.argv[1])[0].tag' ${{ matrix.package }})
|
||||
repository=$(bun -p '(await Bun.file("test/vendor.json").json()).filter(v=>v.package===process.argv[1])[0].repository' ${{ matrix.package }} | cut -d'/' -f4,5)
|
||||
|
||||
if [ -z "$current" ]; then
|
||||
echo "Error: Could not find COMMIT line in test/vendor.json"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$current" >> $GITHUB_OUTPUT
|
||||
echo "repository=$repository" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/${repository}/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
bun -e 'await Bun.write("test/vendor.json", JSON.stringify((await Bun.file("test/vendor.json").json()).map(v=>{if(v.package===process.argv[1])v.tag=process.argv[2];return v;}), null, 2) + "\n")' ${{ matrix.package }} ${{ steps.check-version.outputs.latest }}
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
test/vendor.json
|
||||
commit-message: "deps: update ${{ matrix.package }} to ${{ steps.check-version.outputs.latest }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update ${{ matrix.package }} to ${{ steps.check-version.outputs.latest }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-${{ matrix.package }}-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates ${{ matrix.package }} to version ${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Compare: https://github.com/${{ steps.check-version.outputs.repository }}/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-vendor.yml)
|
||||
2
.github/workflows/update-zstd.yml
vendored
2
.github/workflows/update-zstd.yml
vendored
@@ -80,7 +80,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
11
.vscode/launch.json
generated
vendored
11
.vscode/launch.json
generated
vendored
@@ -25,6 +25,9 @@
|
||||
// "BUN_JSC_validateExceptionChecks": "1",
|
||||
// "BUN_JSC_dumpSimulatedThrows": "1",
|
||||
// "BUN_JSC_unexpectedExceptionStackTraceLimit": "20",
|
||||
// "BUN_DESTRUCT_VM_ON_EXIT": "1",
|
||||
// "ASAN_OPTIONS": "allow_user_segv_handler=1:disable_coredump=0:detect_leaks=1",
|
||||
// "LSAN_OPTIONS": "malloc_context_size=100:print_suppressions=1:suppressions=${workspaceFolder}/test/leaksan.supp",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
"sourceMap": {
|
||||
@@ -57,11 +60,17 @@
|
||||
"name": "bun run [file]",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["${file}"],
|
||||
"cwd": "${fileDirname}",
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "0",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
// "BUN_JSC_validateExceptionChecks": "1",
|
||||
// "BUN_JSC_dumpSimulatedThrows": "1",
|
||||
// "BUN_JSC_unexpectedExceptionStackTraceLimit": "20",
|
||||
// "BUN_DESTRUCT_VM_ON_EXIT": "1",
|
||||
// "ASAN_OPTIONS": "allow_user_segv_handler=1:disable_coredump=0:detect_leaks=1",
|
||||
// "LSAN_OPTIONS": "malloc_context_size=100:print_suppressions=1:suppressions=${workspaceFolder}/test/leaksan.supp",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
"sourceMap": {
|
||||
|
||||
10
CLAUDE.md
10
CLAUDE.md
@@ -4,18 +4,14 @@ This is the Bun repository - an all-in-one JavaScript runtime & toolkit designed
|
||||
|
||||
### Build Commands
|
||||
|
||||
- **Build debug version**: `bun bd`
|
||||
- **Build Bun**: `bun bd`
|
||||
- Creates a debug build at `./build/debug/bun-debug`
|
||||
- **CRITICAL**: DO NOT set a build timeout. Compilation takes ~5 minutes. Be patient.
|
||||
- **CRITICAL**: no need for a timeout, the build is really fast!
|
||||
- **Run tests with your debug build**: `bun bd test <test-file>`
|
||||
- **CRITICAL**: Never use `bun test` directly - it won't include your changes
|
||||
- **Run any command with debug build**: `bun bd <command>`
|
||||
|
||||
### Other Build Variants
|
||||
|
||||
- `bun run build:release` - Release build
|
||||
|
||||
Address sanitizer is enabled by default in debug builds of Bun.
|
||||
Tip: Bun is already installed and in $PATH. The `bd` subcommand is a package.json script.
|
||||
|
||||
## Testing
|
||||
|
||||
|
||||
@@ -31,6 +31,11 @@ include(SetupCcache)
|
||||
parse_package_json(VERSION_VARIABLE DEFAULT_VERSION)
|
||||
optionx(VERSION STRING "The version of Bun" DEFAULT ${DEFAULT_VERSION})
|
||||
project(Bun VERSION ${VERSION})
|
||||
|
||||
# Bun uses C++23, which is compatible with BoringSSL's C++17 requirement
|
||||
set(CMAKE_CXX_STANDARD 23)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||
|
||||
include(Options)
|
||||
include(CompilerFlags)
|
||||
|
||||
@@ -43,6 +48,9 @@ include(SetupEsbuild)
|
||||
include(SetupZig)
|
||||
include(SetupRust)
|
||||
|
||||
# Generate dependency versions header
|
||||
include(GenerateDependencyVersions)
|
||||
|
||||
# --- Targets ---
|
||||
|
||||
include(BuildBun)
|
||||
|
||||
@@ -13,7 +13,10 @@
|
||||
},
|
||||
{
|
||||
"output": "JavaScriptSources.txt",
|
||||
"paths": ["src/js/**/*.{js,ts}"]
|
||||
"paths": [
|
||||
"src/js/**/*.{js,ts}",
|
||||
"src/install/PackageManager/scanner-entry.ts"
|
||||
]
|
||||
},
|
||||
{
|
||||
"output": "JavaScriptCodegenSources.txt",
|
||||
|
||||
@@ -1,506 +0,0 @@
|
||||
packages/bun-usockets/src/crypto/root_certs.cpp
|
||||
packages/bun-usockets/src/crypto/sni_tree.cpp
|
||||
src/bake/BakeGlobalObject.cpp
|
||||
src/bake/BakeProduction.cpp
|
||||
src/bake/BakeSourceProvider.cpp
|
||||
src/bun.js/bindings/ActiveDOMCallback.cpp
|
||||
src/bun.js/bindings/AsymmetricKeyValue.cpp
|
||||
src/bun.js/bindings/AsyncContextFrame.cpp
|
||||
src/bun.js/bindings/Base64Helpers.cpp
|
||||
src/bun.js/bindings/bindings.cpp
|
||||
src/bun.js/bindings/blob.cpp
|
||||
src/bun.js/bindings/bun-simdutf.cpp
|
||||
src/bun.js/bindings/bun-spawn.cpp
|
||||
src/bun.js/bindings/BunClientData.cpp
|
||||
src/bun.js/bindings/BunCommonStrings.cpp
|
||||
src/bun.js/bindings/BunDebugger.cpp
|
||||
src/bun.js/bindings/BunGCOutputConstraint.cpp
|
||||
src/bun.js/bindings/BunGlobalScope.cpp
|
||||
src/bun.js/bindings/BunHttp2CommonStrings.cpp
|
||||
src/bun.js/bindings/BunInjectedScriptHost.cpp
|
||||
src/bun.js/bindings/BunInspector.cpp
|
||||
src/bun.js/bindings/BunJSCEventLoop.cpp
|
||||
src/bun.js/bindings/BunObject.cpp
|
||||
src/bun.js/bindings/BunPlugin.cpp
|
||||
src/bun.js/bindings/BunProcess.cpp
|
||||
src/bun.js/bindings/BunString.cpp
|
||||
src/bun.js/bindings/BunWorkerGlobalScope.cpp
|
||||
src/bun.js/bindings/c-bindings.cpp
|
||||
src/bun.js/bindings/CallSite.cpp
|
||||
src/bun.js/bindings/CallSitePrototype.cpp
|
||||
src/bun.js/bindings/CatchScopeBinding.cpp
|
||||
src/bun.js/bindings/CodeCoverage.cpp
|
||||
src/bun.js/bindings/ConsoleObject.cpp
|
||||
src/bun.js/bindings/Cookie.cpp
|
||||
src/bun.js/bindings/CookieMap.cpp
|
||||
src/bun.js/bindings/coroutine.cpp
|
||||
src/bun.js/bindings/CPUFeatures.cpp
|
||||
src/bun.js/bindings/decodeURIComponentSIMD.cpp
|
||||
src/bun.js/bindings/DOMException.cpp
|
||||
src/bun.js/bindings/DOMFormData.cpp
|
||||
src/bun.js/bindings/DOMURL.cpp
|
||||
src/bun.js/bindings/DOMWrapperWorld.cpp
|
||||
src/bun.js/bindings/DoubleFormatter.cpp
|
||||
src/bun.js/bindings/EncodeURIComponent.cpp
|
||||
src/bun.js/bindings/EncodingTables.cpp
|
||||
src/bun.js/bindings/ErrorCode.cpp
|
||||
src/bun.js/bindings/ErrorStackFrame.cpp
|
||||
src/bun.js/bindings/ErrorStackTrace.cpp
|
||||
src/bun.js/bindings/EventLoopTaskNoContext.cpp
|
||||
src/bun.js/bindings/ExposeNodeModuleGlobals.cpp
|
||||
src/bun.js/bindings/ffi.cpp
|
||||
src/bun.js/bindings/helpers.cpp
|
||||
src/bun.js/bindings/highway_strings.cpp
|
||||
src/bun.js/bindings/HTMLEntryPoint.cpp
|
||||
src/bun.js/bindings/ImportMetaObject.cpp
|
||||
src/bun.js/bindings/inlines.cpp
|
||||
src/bun.js/bindings/InspectorBunFrontendDevServerAgent.cpp
|
||||
src/bun.js/bindings/InspectorHTTPServerAgent.cpp
|
||||
src/bun.js/bindings/InspectorLifecycleAgent.cpp
|
||||
src/bun.js/bindings/InspectorTestReporterAgent.cpp
|
||||
src/bun.js/bindings/InternalForTesting.cpp
|
||||
src/bun.js/bindings/InternalModuleRegistry.cpp
|
||||
src/bun.js/bindings/IPC.cpp
|
||||
src/bun.js/bindings/isBuiltinModule.cpp
|
||||
src/bun.js/bindings/JS2Native.cpp
|
||||
src/bun.js/bindings/JSBigIntBinding.cpp
|
||||
src/bun.js/bindings/JSBuffer.cpp
|
||||
src/bun.js/bindings/JSBufferEncodingType.cpp
|
||||
src/bun.js/bindings/JSBufferList.cpp
|
||||
src/bun.js/bindings/JSBundlerPlugin.cpp
|
||||
src/bun.js/bindings/JSBunRequest.cpp
|
||||
src/bun.js/bindings/JSCommonJSExtensions.cpp
|
||||
src/bun.js/bindings/JSCommonJSModule.cpp
|
||||
src/bun.js/bindings/JSCTaskScheduler.cpp
|
||||
src/bun.js/bindings/JSCTestingHelpers.cpp
|
||||
src/bun.js/bindings/JSDOMExceptionHandling.cpp
|
||||
src/bun.js/bindings/JSDOMFile.cpp
|
||||
src/bun.js/bindings/JSDOMGlobalObject.cpp
|
||||
src/bun.js/bindings/JSDOMWrapper.cpp
|
||||
src/bun.js/bindings/JSDOMWrapperCache.cpp
|
||||
src/bun.js/bindings/JSEnvironmentVariableMap.cpp
|
||||
src/bun.js/bindings/JSFFIFunction.cpp
|
||||
src/bun.js/bindings/JSMockFunction.cpp
|
||||
src/bun.js/bindings/JSNextTickQueue.cpp
|
||||
src/bun.js/bindings/JSNodePerformanceHooksHistogram.cpp
|
||||
src/bun.js/bindings/JSNodePerformanceHooksHistogramConstructor.cpp
|
||||
src/bun.js/bindings/JSNodePerformanceHooksHistogramPrototype.cpp
|
||||
src/bun.js/bindings/JSPropertyIterator.cpp
|
||||
src/bun.js/bindings/JSS3File.cpp
|
||||
src/bun.js/bindings/JSSecrets.cpp
|
||||
src/bun.js/bindings/JSSocketAddressDTO.cpp
|
||||
src/bun.js/bindings/JSStringDecoder.cpp
|
||||
src/bun.js/bindings/JSWrappingFunction.cpp
|
||||
src/bun.js/bindings/JSX509Certificate.cpp
|
||||
src/bun.js/bindings/JSX509CertificateConstructor.cpp
|
||||
src/bun.js/bindings/JSX509CertificatePrototype.cpp
|
||||
src/bun.js/bindings/linux_perf_tracing.cpp
|
||||
src/bun.js/bindings/MarkedArgumentBufferBinding.cpp
|
||||
src/bun.js/bindings/MarkingConstraint.cpp
|
||||
src/bun.js/bindings/ModuleLoader.cpp
|
||||
src/bun.js/bindings/napi_external.cpp
|
||||
src/bun.js/bindings/napi_finalizer.cpp
|
||||
src/bun.js/bindings/napi_handle_scope.cpp
|
||||
src/bun.js/bindings/napi_type_tag.cpp
|
||||
src/bun.js/bindings/napi.cpp
|
||||
src/bun.js/bindings/NapiClass.cpp
|
||||
src/bun.js/bindings/NapiRef.cpp
|
||||
src/bun.js/bindings/NapiWeakValue.cpp
|
||||
src/bun.js/bindings/ncrpyto_engine.cpp
|
||||
src/bun.js/bindings/ncrypto.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoDhJob.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoGenDhKeyPair.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoGenDsaKeyPair.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoGenEcKeyPair.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoGenKeyPair.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoGenNidKeyPair.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoGenRsaKeyPair.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoHkdf.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoKeygen.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoKeys.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoPrimes.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoSignJob.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoUtil.cpp
|
||||
src/bun.js/bindings/node/crypto/JSCipher.cpp
|
||||
src/bun.js/bindings/node/crypto/JSCipherConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSCipherPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSDiffieHellman.cpp
|
||||
src/bun.js/bindings/node/crypto/JSDiffieHellmanConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSDiffieHellmanGroup.cpp
|
||||
src/bun.js/bindings/node/crypto/JSDiffieHellmanGroupConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSDiffieHellmanGroupPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSDiffieHellmanPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSECDH.cpp
|
||||
src/bun.js/bindings/node/crypto/JSECDHConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSECDHPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSHash.cpp
|
||||
src/bun.js/bindings/node/crypto/JSHmac.cpp
|
||||
src/bun.js/bindings/node/crypto/JSKeyObject.cpp
|
||||
src/bun.js/bindings/node/crypto/JSKeyObjectConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSKeyObjectPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSPrivateKeyObject.cpp
|
||||
src/bun.js/bindings/node/crypto/JSPrivateKeyObjectConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSPrivateKeyObjectPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSPublicKeyObject.cpp
|
||||
src/bun.js/bindings/node/crypto/JSPublicKeyObjectConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSPublicKeyObjectPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSSecretKeyObject.cpp
|
||||
src/bun.js/bindings/node/crypto/JSSecretKeyObjectConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSSecretKeyObjectPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSSign.cpp
|
||||
src/bun.js/bindings/node/crypto/JSVerify.cpp
|
||||
src/bun.js/bindings/node/crypto/KeyObject.cpp
|
||||
src/bun.js/bindings/node/crypto/node_crypto_binding.cpp
|
||||
src/bun.js/bindings/node/http/JSConnectionsList.cpp
|
||||
src/bun.js/bindings/node/http/JSConnectionsListConstructor.cpp
|
||||
src/bun.js/bindings/node/http/JSConnectionsListPrototype.cpp
|
||||
src/bun.js/bindings/node/http/JSHTTPParser.cpp
|
||||
src/bun.js/bindings/node/http/JSHTTPParserConstructor.cpp
|
||||
src/bun.js/bindings/node/http/JSHTTPParserPrototype.cpp
|
||||
src/bun.js/bindings/node/http/NodeHTTPParser.cpp
|
||||
src/bun.js/bindings/node/NodeTimers.cpp
|
||||
src/bun.js/bindings/NodeAsyncHooks.cpp
|
||||
src/bun.js/bindings/NodeDirent.cpp
|
||||
src/bun.js/bindings/NodeFetch.cpp
|
||||
src/bun.js/bindings/NodeFSStatBinding.cpp
|
||||
src/bun.js/bindings/NodeFSStatFSBinding.cpp
|
||||
src/bun.js/bindings/NodeHTTP.cpp
|
||||
src/bun.js/bindings/NodeTimerObject.cpp
|
||||
src/bun.js/bindings/NodeTLS.cpp
|
||||
src/bun.js/bindings/NodeURL.cpp
|
||||
src/bun.js/bindings/NodeValidator.cpp
|
||||
src/bun.js/bindings/NodeVM.cpp
|
||||
src/bun.js/bindings/NodeVMModule.cpp
|
||||
src/bun.js/bindings/NodeVMScript.cpp
|
||||
src/bun.js/bindings/NodeVMSourceTextModule.cpp
|
||||
src/bun.js/bindings/NodeVMSyntheticModule.cpp
|
||||
src/bun.js/bindings/NoOpForTesting.cpp
|
||||
src/bun.js/bindings/ObjectBindings.cpp
|
||||
src/bun.js/bindings/objects.cpp
|
||||
src/bun.js/bindings/OsBinding.cpp
|
||||
src/bun.js/bindings/Path.cpp
|
||||
src/bun.js/bindings/ProcessBindingBuffer.cpp
|
||||
src/bun.js/bindings/ProcessBindingConstants.cpp
|
||||
src/bun.js/bindings/ProcessBindingFs.cpp
|
||||
src/bun.js/bindings/ProcessBindingHTTPParser.cpp
|
||||
src/bun.js/bindings/ProcessBindingNatives.cpp
|
||||
src/bun.js/bindings/ProcessBindingTTYWrap.cpp
|
||||
src/bun.js/bindings/ProcessBindingUV.cpp
|
||||
src/bun.js/bindings/ProcessIdentifier.cpp
|
||||
src/bun.js/bindings/RegularExpression.cpp
|
||||
src/bun.js/bindings/S3Error.cpp
|
||||
src/bun.js/bindings/ScriptExecutionContext.cpp
|
||||
src/bun.js/bindings/SecretsDarwin.cpp
|
||||
src/bun.js/bindings/SecretsLinux.cpp
|
||||
src/bun.js/bindings/SecretsWindows.cpp
|
||||
src/bun.js/bindings/Serialization.cpp
|
||||
src/bun.js/bindings/ServerRouteList.cpp
|
||||
src/bun.js/bindings/spawn.cpp
|
||||
src/bun.js/bindings/SQLClient.cpp
|
||||
src/bun.js/bindings/sqlite/JSSQLStatement.cpp
|
||||
src/bun.js/bindings/StringBuilderBinding.cpp
|
||||
src/bun.js/bindings/stripANSI.cpp
|
||||
src/bun.js/bindings/Strong.cpp
|
||||
src/bun.js/bindings/TextCodec.cpp
|
||||
src/bun.js/bindings/TextCodecCJK.cpp
|
||||
src/bun.js/bindings/TextCodecReplacement.cpp
|
||||
src/bun.js/bindings/TextCodecSingleByte.cpp
|
||||
src/bun.js/bindings/TextCodecUserDefined.cpp
|
||||
src/bun.js/bindings/TextCodecWrapper.cpp
|
||||
src/bun.js/bindings/TextEncoding.cpp
|
||||
src/bun.js/bindings/TextEncodingRegistry.cpp
|
||||
src/bun.js/bindings/Uint8Array.cpp
|
||||
src/bun.js/bindings/Undici.cpp
|
||||
src/bun.js/bindings/URLDecomposition.cpp
|
||||
src/bun.js/bindings/URLSearchParams.cpp
|
||||
src/bun.js/bindings/UtilInspect.cpp
|
||||
src/bun.js/bindings/v8/node.cpp
|
||||
src/bun.js/bindings/v8/shim/Function.cpp
|
||||
src/bun.js/bindings/v8/shim/FunctionTemplate.cpp
|
||||
src/bun.js/bindings/v8/shim/GlobalInternals.cpp
|
||||
src/bun.js/bindings/v8/shim/Handle.cpp
|
||||
src/bun.js/bindings/v8/shim/HandleScopeBuffer.cpp
|
||||
src/bun.js/bindings/v8/shim/InternalFieldObject.cpp
|
||||
src/bun.js/bindings/v8/shim/Map.cpp
|
||||
src/bun.js/bindings/v8/shim/ObjectTemplate.cpp
|
||||
src/bun.js/bindings/v8/shim/Oddball.cpp
|
||||
src/bun.js/bindings/v8/shim/TaggedPointer.cpp
|
||||
src/bun.js/bindings/v8/v8_api_internal.cpp
|
||||
src/bun.js/bindings/v8/v8_internal.cpp
|
||||
src/bun.js/bindings/v8/V8Array.cpp
|
||||
src/bun.js/bindings/v8/V8Boolean.cpp
|
||||
src/bun.js/bindings/v8/V8Context.cpp
|
||||
src/bun.js/bindings/v8/V8EscapableHandleScope.cpp
|
||||
src/bun.js/bindings/v8/V8EscapableHandleScopeBase.cpp
|
||||
src/bun.js/bindings/v8/V8External.cpp
|
||||
src/bun.js/bindings/v8/V8Function.cpp
|
||||
src/bun.js/bindings/v8/V8FunctionCallbackInfo.cpp
|
||||
src/bun.js/bindings/v8/V8FunctionTemplate.cpp
|
||||
src/bun.js/bindings/v8/V8HandleScope.cpp
|
||||
src/bun.js/bindings/v8/V8Isolate.cpp
|
||||
src/bun.js/bindings/v8/V8Local.cpp
|
||||
src/bun.js/bindings/v8/V8Maybe.cpp
|
||||
src/bun.js/bindings/v8/V8Number.cpp
|
||||
src/bun.js/bindings/v8/V8Object.cpp
|
||||
src/bun.js/bindings/v8/V8ObjectTemplate.cpp
|
||||
src/bun.js/bindings/v8/V8String.cpp
|
||||
src/bun.js/bindings/v8/V8Template.cpp
|
||||
src/bun.js/bindings/v8/V8Value.cpp
|
||||
src/bun.js/bindings/Weak.cpp
|
||||
src/bun.js/bindings/webcore/AbortController.cpp
|
||||
src/bun.js/bindings/webcore/AbortSignal.cpp
|
||||
src/bun.js/bindings/webcore/ActiveDOMObject.cpp
|
||||
src/bun.js/bindings/webcore/BroadcastChannel.cpp
|
||||
src/bun.js/bindings/webcore/BunBroadcastChannelRegistry.cpp
|
||||
src/bun.js/bindings/webcore/CloseEvent.cpp
|
||||
src/bun.js/bindings/webcore/CommonAtomStrings.cpp
|
||||
src/bun.js/bindings/webcore/ContextDestructionObserver.cpp
|
||||
src/bun.js/bindings/webcore/CustomEvent.cpp
|
||||
src/bun.js/bindings/webcore/CustomEventCustom.cpp
|
||||
src/bun.js/bindings/webcore/DOMJITHelpers.cpp
|
||||
src/bun.js/bindings/webcore/ErrorCallback.cpp
|
||||
src/bun.js/bindings/webcore/ErrorEvent.cpp
|
||||
src/bun.js/bindings/webcore/Event.cpp
|
||||
src/bun.js/bindings/webcore/EventContext.cpp
|
||||
src/bun.js/bindings/webcore/EventDispatcher.cpp
|
||||
src/bun.js/bindings/webcore/EventEmitter.cpp
|
||||
src/bun.js/bindings/webcore/EventFactory.cpp
|
||||
src/bun.js/bindings/webcore/EventListenerMap.cpp
|
||||
src/bun.js/bindings/webcore/EventNames.cpp
|
||||
src/bun.js/bindings/webcore/EventPath.cpp
|
||||
src/bun.js/bindings/webcore/EventTarget.cpp
|
||||
src/bun.js/bindings/webcore/EventTargetConcrete.cpp
|
||||
src/bun.js/bindings/webcore/EventTargetFactory.cpp
|
||||
src/bun.js/bindings/webcore/FetchHeaders.cpp
|
||||
src/bun.js/bindings/webcore/HeaderFieldTokenizer.cpp
|
||||
src/bun.js/bindings/webcore/HTTPHeaderField.cpp
|
||||
src/bun.js/bindings/webcore/HTTPHeaderIdentifiers.cpp
|
||||
src/bun.js/bindings/webcore/HTTPHeaderMap.cpp
|
||||
src/bun.js/bindings/webcore/HTTPHeaderNames.cpp
|
||||
src/bun.js/bindings/webcore/HTTPHeaderStrings.cpp
|
||||
src/bun.js/bindings/webcore/HTTPHeaderValues.cpp
|
||||
src/bun.js/bindings/webcore/HTTPParsers.cpp
|
||||
src/bun.js/bindings/webcore/IdentifierEventListenerMap.cpp
|
||||
src/bun.js/bindings/webcore/InternalWritableStream.cpp
|
||||
src/bun.js/bindings/webcore/JSAbortAlgorithm.cpp
|
||||
src/bun.js/bindings/webcore/JSAbortController.cpp
|
||||
src/bun.js/bindings/webcore/JSAbortSignal.cpp
|
||||
src/bun.js/bindings/webcore/JSAbortSignalCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSAddEventListenerOptions.cpp
|
||||
src/bun.js/bindings/webcore/JSBroadcastChannel.cpp
|
||||
src/bun.js/bindings/webcore/JSByteLengthQueuingStrategy.cpp
|
||||
src/bun.js/bindings/webcore/JSCallbackData.cpp
|
||||
src/bun.js/bindings/webcore/JSCloseEvent.cpp
|
||||
src/bun.js/bindings/webcore/JSCookie.cpp
|
||||
src/bun.js/bindings/webcore/JSCookieMap.cpp
|
||||
src/bun.js/bindings/webcore/JSCountQueuingStrategy.cpp
|
||||
src/bun.js/bindings/webcore/JSCustomEvent.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMBindingInternalsBuiltins.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMBuiltinConstructorBase.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMConstructorBase.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMConvertDate.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMConvertNumbers.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMConvertStrings.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMConvertWebGL.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMException.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMFormData.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMGuardedObject.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMIterator.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMOperation.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMPromise.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMPromiseDeferred.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMURL.cpp
|
||||
src/bun.js/bindings/webcore/JSErrorCallback.cpp
|
||||
src/bun.js/bindings/webcore/JSErrorEvent.cpp
|
||||
src/bun.js/bindings/webcore/JSErrorEventCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSErrorHandler.cpp
|
||||
src/bun.js/bindings/webcore/JSEvent.cpp
|
||||
src/bun.js/bindings/webcore/JSEventCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSEventDOMJIT.cpp
|
||||
src/bun.js/bindings/webcore/JSEventEmitter.cpp
|
||||
src/bun.js/bindings/webcore/JSEventEmitterCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSEventInit.cpp
|
||||
src/bun.js/bindings/webcore/JSEventListener.cpp
|
||||
src/bun.js/bindings/webcore/JSEventListenerOptions.cpp
|
||||
src/bun.js/bindings/webcore/JSEventModifierInit.cpp
|
||||
src/bun.js/bindings/webcore/JSEventTarget.cpp
|
||||
src/bun.js/bindings/webcore/JSEventTargetCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSEventTargetNode.cpp
|
||||
src/bun.js/bindings/webcore/JSFetchHeaders.cpp
|
||||
src/bun.js/bindings/webcore/JSMessageChannel.cpp
|
||||
src/bun.js/bindings/webcore/JSMessageChannelCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSMessageEvent.cpp
|
||||
src/bun.js/bindings/webcore/JSMessageEventCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSMessagePort.cpp
|
||||
src/bun.js/bindings/webcore/JSMessagePortCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSMIMEBindings.cpp
|
||||
src/bun.js/bindings/webcore/JSMIMEParams.cpp
|
||||
src/bun.js/bindings/webcore/JSMIMEType.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformance.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceEntry.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceEntryCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceMark.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceMarkOptions.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceMeasure.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceMeasureOptions.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceObserver.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceObserverCallback.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceObserverCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceObserverEntryList.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceResourceTiming.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceServerTiming.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceTiming.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableByteStreamController.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStream.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStreamBYOBReader.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStreamBYOBRequest.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStreamDefaultController.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStreamDefaultReader.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStreamSink.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStreamSource.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStreamSourceCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSStructuredSerializeOptions.cpp
|
||||
src/bun.js/bindings/webcore/JSTextDecoderStream.cpp
|
||||
src/bun.js/bindings/webcore/JSTextEncoder.cpp
|
||||
src/bun.js/bindings/webcore/JSTextEncoderStream.cpp
|
||||
src/bun.js/bindings/webcore/JSTransformStream.cpp
|
||||
src/bun.js/bindings/webcore/JSTransformStreamDefaultController.cpp
|
||||
src/bun.js/bindings/webcore/JSURLSearchParams.cpp
|
||||
src/bun.js/bindings/webcore/JSWasmStreamingCompiler.cpp
|
||||
src/bun.js/bindings/webcore/JSWebSocket.cpp
|
||||
src/bun.js/bindings/webcore/JSWorker.cpp
|
||||
src/bun.js/bindings/webcore/JSWorkerOptions.cpp
|
||||
src/bun.js/bindings/webcore/JSWritableStream.cpp
|
||||
src/bun.js/bindings/webcore/JSWritableStreamDefaultController.cpp
|
||||
src/bun.js/bindings/webcore/JSWritableStreamDefaultWriter.cpp
|
||||
src/bun.js/bindings/webcore/JSWritableStreamSink.cpp
|
||||
src/bun.js/bindings/webcore/MessageChannel.cpp
|
||||
src/bun.js/bindings/webcore/MessageEvent.cpp
|
||||
src/bun.js/bindings/webcore/MessagePort.cpp
|
||||
src/bun.js/bindings/webcore/MessagePortChannel.cpp
|
||||
src/bun.js/bindings/webcore/MessagePortChannelProvider.cpp
|
||||
src/bun.js/bindings/webcore/MessagePortChannelProviderImpl.cpp
|
||||
src/bun.js/bindings/webcore/MessagePortChannelRegistry.cpp
|
||||
src/bun.js/bindings/webcore/NetworkLoadMetrics.cpp
|
||||
src/bun.js/bindings/webcore/Performance.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceEntry.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceMark.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceMeasure.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceObserver.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceObserverEntryList.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceResourceTiming.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceServerTiming.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceTiming.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceUserTiming.cpp
|
||||
src/bun.js/bindings/webcore/ReadableStream.cpp
|
||||
src/bun.js/bindings/webcore/ReadableStreamDefaultController.cpp
|
||||
src/bun.js/bindings/webcore/ReadableStreamSink.cpp
|
||||
src/bun.js/bindings/webcore/ReadableStreamSource.cpp
|
||||
src/bun.js/bindings/webcore/ResourceTiming.cpp
|
||||
src/bun.js/bindings/webcore/RFC7230.cpp
|
||||
src/bun.js/bindings/webcore/SerializedScriptValue.cpp
|
||||
src/bun.js/bindings/webcore/ServerTiming.cpp
|
||||
src/bun.js/bindings/webcore/ServerTimingParser.cpp
|
||||
src/bun.js/bindings/webcore/StructuredClone.cpp
|
||||
src/bun.js/bindings/webcore/TextEncoder.cpp
|
||||
src/bun.js/bindings/webcore/WebCoreTypedArrayController.cpp
|
||||
src/bun.js/bindings/webcore/WebSocket.cpp
|
||||
src/bun.js/bindings/webcore/Worker.cpp
|
||||
src/bun.js/bindings/webcore/WritableStream.cpp
|
||||
src/bun.js/bindings/webcrypto/CommonCryptoDERUtilities.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithm.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_CBC.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_CBCOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_CFB.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_CFBOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_CTR.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_CTROpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_GCM.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_GCMOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_KW.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_KWOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmECDH.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmECDHOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmECDSA.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmECDSAOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmEd25519.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmHKDF.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmHKDFOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmHMAC.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmHMACOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmPBKDF2.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmPBKDF2OpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRegistry.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRegistryOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSA_OAEP.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSA_OAEPOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSA_PSS.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSA_PSSOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSAES_PKCS1_v1_5.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSAES_PKCS1_v1_5OpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSASSA_PKCS1_v1_5.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSASSA_PKCS1_v1_5OpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA1.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA224.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA256.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA384.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA512.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmX25519.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoDigest.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKey.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyAES.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyEC.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyECOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyHMAC.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyOKP.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyOKPOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyRaw.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyRSA.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyRSAComponents.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyRSAOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/JSAesCbcCfbParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSAesCtrParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSAesGcmParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSAesKeyParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoAesKeyAlgorithm.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoAlgorithmParameters.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoEcKeyAlgorithm.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoHmacKeyAlgorithm.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoKey.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoKeyAlgorithm.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoKeyPair.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoKeyUsage.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoRsaHashedKeyAlgorithm.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoRsaKeyAlgorithm.cpp
|
||||
src/bun.js/bindings/webcrypto/JSEcdhKeyDeriveParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSEcdsaParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSEcKeyParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSHkdfParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSHmacKeyParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSJsonWebKey.cpp
|
||||
src/bun.js/bindings/webcrypto/JSPbkdf2Params.cpp
|
||||
src/bun.js/bindings/webcrypto/JSRsaHashedImportParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSRsaHashedKeyGenParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSRsaKeyGenParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSRsaOaepParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSRsaOtherPrimesInfo.cpp
|
||||
src/bun.js/bindings/webcrypto/JSRsaPssParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSSubtleCrypto.cpp
|
||||
src/bun.js/bindings/webcrypto/JSX25519Params.cpp
|
||||
src/bun.js/bindings/webcrypto/OpenSSLUtilities.cpp
|
||||
src/bun.js/bindings/webcrypto/PhonyWorkQueue.cpp
|
||||
src/bun.js/bindings/webcrypto/SerializedCryptoKeyWrapOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/SubtleCrypto.cpp
|
||||
src/bun.js/bindings/workaround-missing-symbols.cpp
|
||||
src/bun.js/bindings/wtf-bindings.cpp
|
||||
src/bun.js/bindings/ZigGeneratedCode.cpp
|
||||
src/bun.js/bindings/ZigGlobalObject.cpp
|
||||
src/bun.js/bindings/ZigSourceProvider.cpp
|
||||
src/bun.js/modules/NodeModuleModule.cpp
|
||||
src/bun.js/modules/NodeTTYModule.cpp
|
||||
src/bun.js/modules/NodeUtilTypesModule.cpp
|
||||
src/bun.js/modules/ObjectModule.cpp
|
||||
src/deps/libuwsockets.cpp
|
||||
src/io/io_darwin.cpp
|
||||
src/vm/Semaphore.cpp
|
||||
src/vm/SigintWatcher.cpp
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
oven-sh/boringssl
|
||||
COMMIT
|
||||
7a5d984c69b0c34c4cbb56c6812eaa5b9bef485c
|
||||
f1ffd9e83d4f5c28a9c70d73f9a4e6fcf310062f
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
|
||||
@@ -636,6 +636,7 @@ register_command(
|
||||
SOURCES
|
||||
${BUN_ZIG_SOURCES}
|
||||
${BUN_ZIG_GENERATED_SOURCES}
|
||||
${CWD}/src/install/PackageManager/scanner-entry.ts # Is there a better way to do this?
|
||||
)
|
||||
|
||||
set_property(TARGET bun-zig PROPERTY JOB_POOL compile_pool)
|
||||
@@ -1125,6 +1126,9 @@ endif()
|
||||
|
||||
include_directories(${WEBKIT_INCLUDE_PATH})
|
||||
|
||||
# Include the generated dependency versions header
|
||||
include_directories(${CMAKE_BINARY_DIR})
|
||||
|
||||
if(NOT WEBKIT_LOCAL AND NOT APPLE)
|
||||
include_directories(${WEBKIT_INCLUDE_PATH}/wtf/unicode)
|
||||
endif()
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
HdrHistogram/HdrHistogram_c
|
||||
COMMIT
|
||||
8dcce8f68512fca460b171bccc3a5afce0048779
|
||||
be60a9987ee48d0abf0d7b6a175bad8d6c1585d1
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
|
||||
209
cmake/tools/GenerateDependencyVersions.cmake
Normal file
209
cmake/tools/GenerateDependencyVersions.cmake
Normal file
@@ -0,0 +1,209 @@
|
||||
# GenerateDependencyVersions.cmake
|
||||
# Generates a header file with all dependency versions
|
||||
|
||||
# Function to extract version from git tree object
|
||||
function(get_git_tree_hash dep_name output_var)
|
||||
execute_process(
|
||||
COMMAND git rev-parse HEAD:./src/deps/${dep_name}
|
||||
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
|
||||
OUTPUT_VARIABLE commit_hash
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
ERROR_QUIET
|
||||
RESULT_VARIABLE result
|
||||
)
|
||||
if(result EQUAL 0 AND commit_hash)
|
||||
set(${output_var} "${commit_hash}" PARENT_SCOPE)
|
||||
else()
|
||||
set(${output_var} "unknown" PARENT_SCOPE)
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
# Function to extract version from header file using regex
|
||||
function(extract_version_from_header header_file regex_pattern output_var)
|
||||
if(EXISTS "${header_file}")
|
||||
file(STRINGS "${header_file}" version_line REGEX "${regex_pattern}")
|
||||
if(version_line)
|
||||
string(REGEX MATCH "${regex_pattern}" _match "${version_line}")
|
||||
if(CMAKE_MATCH_1)
|
||||
set(${output_var} "${CMAKE_MATCH_1}" PARENT_SCOPE)
|
||||
else()
|
||||
set(${output_var} "unknown" PARENT_SCOPE)
|
||||
endif()
|
||||
else()
|
||||
set(${output_var} "unknown" PARENT_SCOPE)
|
||||
endif()
|
||||
else()
|
||||
set(${output_var} "unknown" PARENT_SCOPE)
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
# Main function to generate the header file
|
||||
function(generate_dependency_versions_header)
|
||||
set(DEPS_PATH "${CMAKE_SOURCE_DIR}/src/deps")
|
||||
set(VENDOR_PATH "${CMAKE_SOURCE_DIR}/vendor")
|
||||
|
||||
# Initialize version variables
|
||||
set(DEPENDENCY_VERSIONS "")
|
||||
|
||||
# WebKit version (from SetupWebKit.cmake or command line)
|
||||
if(WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION_STR "${WEBKIT_VERSION}")
|
||||
else()
|
||||
set(WEBKIT_VERSION_STR "0ddf6f47af0a9782a354f61e06d7f83d097d9f84")
|
||||
endif()
|
||||
list(APPEND DEPENDENCY_VERSIONS "WEBKIT" "${WEBKIT_VERSION_STR}")
|
||||
|
||||
# Track input files so CMake reconfigures when they change
|
||||
set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS
|
||||
"${CMAKE_SOURCE_DIR}/package.json"
|
||||
"${VENDOR_PATH}/libdeflate/libdeflate.h"
|
||||
"${VENDOR_PATH}/zlib/zlib.h"
|
||||
"${DEPS_PATH}/zstd/lib/zstd.h"
|
||||
)
|
||||
|
||||
# Hardcoded dependency versions (previously from generated_versions_list.zig)
|
||||
# These are the commit hashes/tree objects for each dependency
|
||||
list(APPEND DEPENDENCY_VERSIONS "BORINGSSL" "29a2cd359458c9384694b75456026e4b57e3e567")
|
||||
list(APPEND DEPENDENCY_VERSIONS "C_ARES" "d1722e6e8acaf10eb73fa995798a9cd421d9f85e")
|
||||
list(APPEND DEPENDENCY_VERSIONS "LIBARCHIVE" "898dc8319355b7e985f68a9819f182aaed61b53a")
|
||||
list(APPEND DEPENDENCY_VERSIONS "LIBDEFLATE_HASH" "dc76454a39e7e83b68c3704b6e3784654f8d5ac5")
|
||||
list(APPEND DEPENDENCY_VERSIONS "LOLHTML" "8d4c273ded322193d017042d1f48df2766b0f88b")
|
||||
list(APPEND DEPENDENCY_VERSIONS "LSHPACK" "3d0f1fc1d6e66a642e7a98c55deb38aa986eb4b0")
|
||||
list(APPEND DEPENDENCY_VERSIONS "MIMALLOC" "4c283af60cdae205df5a872530c77e2a6a307d43")
|
||||
list(APPEND DEPENDENCY_VERSIONS "PICOHTTPPARSER" "066d2b1e9ab820703db0837a7255d92d30f0c9f5")
|
||||
list(APPEND DEPENDENCY_VERSIONS "TINYCC" "ab631362d839333660a265d3084d8ff060b96753")
|
||||
list(APPEND DEPENDENCY_VERSIONS "ZLIB_HASH" "886098f3f339617b4243b286f5ed364b9989e245")
|
||||
list(APPEND DEPENDENCY_VERSIONS "ZSTD_HASH" "794ea1b0afca0f020f4e57b6732332231fb23c70")
|
||||
|
||||
# Extract semantic versions from header files where available
|
||||
extract_version_from_header(
|
||||
"${VENDOR_PATH}/libdeflate/libdeflate.h"
|
||||
"#define LIBDEFLATE_VERSION_STRING[ \t]+\"([0-9\\.]+)\""
|
||||
LIBDEFLATE_VERSION_STRING
|
||||
)
|
||||
list(APPEND DEPENDENCY_VERSIONS "LIBDEFLATE_VERSION" "${LIBDEFLATE_VERSION_STRING}")
|
||||
|
||||
extract_version_from_header(
|
||||
"${VENDOR_PATH}/zlib/zlib.h"
|
||||
"#define[ \t]+ZLIB_VERSION[ \t]+\"([^\"]+)\""
|
||||
ZLIB_VERSION_STRING
|
||||
)
|
||||
list(APPEND DEPENDENCY_VERSIONS "ZLIB_VERSION" "${ZLIB_VERSION_STRING}")
|
||||
|
||||
extract_version_from_header(
|
||||
"${DEPS_PATH}/zstd/lib/zstd.h"
|
||||
"#define[ \t]+ZSTD_VERSION_STRING[ \t]+\"([^\"]+)\""
|
||||
ZSTD_VERSION_STRING
|
||||
)
|
||||
list(APPEND DEPENDENCY_VERSIONS "ZSTD_VERSION" "${ZSTD_VERSION_STRING}")
|
||||
|
||||
# Bun version from package.json
|
||||
if(EXISTS "${CMAKE_SOURCE_DIR}/package.json")
|
||||
file(READ "${CMAKE_SOURCE_DIR}/package.json" PACKAGE_JSON)
|
||||
string(REGEX MATCH "\"version\"[ \t]*:[ \t]*\"([^\"]+)\"" _ ${PACKAGE_JSON})
|
||||
if(CMAKE_MATCH_1)
|
||||
set(BUN_VERSION_STRING "${CMAKE_MATCH_1}")
|
||||
else()
|
||||
set(BUN_VERSION_STRING "unknown")
|
||||
endif()
|
||||
else()
|
||||
set(BUN_VERSION_STRING "${VERSION}")
|
||||
endif()
|
||||
list(APPEND DEPENDENCY_VERSIONS "BUN_VERSION" "${BUN_VERSION_STRING}")
|
||||
|
||||
# Node.js compatibility version (hardcoded as in the current implementation)
|
||||
set(NODEJS_COMPAT_VERSION "22.12.0")
|
||||
list(APPEND DEPENDENCY_VERSIONS "NODEJS_COMPAT_VERSION" "${NODEJS_COMPAT_VERSION}")
|
||||
|
||||
# Get Bun's git SHA for uws/usockets versions (they use Bun's own SHA)
|
||||
execute_process(
|
||||
COMMAND git rev-parse HEAD
|
||||
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
|
||||
OUTPUT_VARIABLE BUN_GIT_SHA
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
ERROR_QUIET
|
||||
)
|
||||
if(NOT BUN_GIT_SHA)
|
||||
set(BUN_GIT_SHA "unknown")
|
||||
endif()
|
||||
list(APPEND DEPENDENCY_VERSIONS "UWS" "${BUN_GIT_SHA}")
|
||||
list(APPEND DEPENDENCY_VERSIONS "USOCKETS" "${BUN_GIT_SHA}")
|
||||
|
||||
# Zig version - hardcoded for now, can be updated as needed
|
||||
# This should match the version of Zig used to build Bun
|
||||
list(APPEND DEPENDENCY_VERSIONS "ZIG" "0.14.1")
|
||||
|
||||
# Generate the header file content
|
||||
set(HEADER_CONTENT "// This file is auto-generated by CMake. Do not edit manually.\n")
|
||||
string(APPEND HEADER_CONTENT "#ifndef BUN_DEPENDENCY_VERSIONS_H\n")
|
||||
string(APPEND HEADER_CONTENT "#define BUN_DEPENDENCY_VERSIONS_H\n\n")
|
||||
string(APPEND HEADER_CONTENT "#ifdef __cplusplus\n")
|
||||
string(APPEND HEADER_CONTENT "extern \"C\" {\n")
|
||||
string(APPEND HEADER_CONTENT "#endif\n\n")
|
||||
string(APPEND HEADER_CONTENT "// Dependency versions\n")
|
||||
|
||||
# Process the version list
|
||||
list(LENGTH DEPENDENCY_VERSIONS num_versions)
|
||||
math(EXPR last_idx "${num_versions} - 1")
|
||||
set(i 0)
|
||||
while(i LESS num_versions)
|
||||
list(GET DEPENDENCY_VERSIONS ${i} name)
|
||||
math(EXPR value_idx "${i} + 1")
|
||||
if(value_idx LESS num_versions)
|
||||
list(GET DEPENDENCY_VERSIONS ${value_idx} value)
|
||||
# Only emit #define if value is not "unknown"
|
||||
if(NOT "${value}" STREQUAL "unknown")
|
||||
string(APPEND HEADER_CONTENT "#define BUN_DEP_${name} \"${value}\"\n")
|
||||
endif()
|
||||
endif()
|
||||
math(EXPR i "${i} + 2")
|
||||
endwhile()
|
||||
|
||||
string(APPEND HEADER_CONTENT "\n")
|
||||
string(APPEND HEADER_CONTENT "// C string constants for easy access\n")
|
||||
|
||||
# Create C string constants
|
||||
set(i 0)
|
||||
while(i LESS num_versions)
|
||||
list(GET DEPENDENCY_VERSIONS ${i} name)
|
||||
math(EXPR value_idx "${i} + 1")
|
||||
if(value_idx LESS num_versions)
|
||||
list(GET DEPENDENCY_VERSIONS ${value_idx} value)
|
||||
# Only emit constant if value is not "unknown"
|
||||
if(NOT "${value}" STREQUAL "unknown")
|
||||
string(APPEND HEADER_CONTENT "static const char* const BUN_VERSION_${name} = \"${value}\";\n")
|
||||
endif()
|
||||
endif()
|
||||
math(EXPR i "${i} + 2")
|
||||
endwhile()
|
||||
|
||||
string(APPEND HEADER_CONTENT "\n#ifdef __cplusplus\n")
|
||||
string(APPEND HEADER_CONTENT "}\n")
|
||||
string(APPEND HEADER_CONTENT "#endif\n\n")
|
||||
string(APPEND HEADER_CONTENT "#endif // BUN_DEPENDENCY_VERSIONS_H\n")
|
||||
|
||||
# Write the header file
|
||||
set(OUTPUT_FILE "${CMAKE_BINARY_DIR}/bun_dependency_versions.h")
|
||||
file(WRITE "${OUTPUT_FILE}" "${HEADER_CONTENT}")
|
||||
|
||||
message(STATUS "Generated dependency versions header: ${OUTPUT_FILE}")
|
||||
|
||||
# Also create a more detailed version for debugging
|
||||
set(DEBUG_OUTPUT_FILE "${CMAKE_BINARY_DIR}/bun_dependency_versions_debug.txt")
|
||||
set(DEBUG_CONTENT "Bun Dependency Versions\n")
|
||||
string(APPEND DEBUG_CONTENT "=======================\n\n")
|
||||
set(i 0)
|
||||
while(i LESS num_versions)
|
||||
list(GET DEPENDENCY_VERSIONS ${i} name)
|
||||
math(EXPR value_idx "${i} + 1")
|
||||
if(value_idx LESS num_versions)
|
||||
list(GET DEPENDENCY_VERSIONS ${value_idx} value)
|
||||
string(APPEND DEBUG_CONTENT "${name}: ${value}\n")
|
||||
endif()
|
||||
math(EXPR i "${i} + 2")
|
||||
endwhile()
|
||||
file(WRITE "${DEBUG_OUTPUT_FILE}" "${DEBUG_CONTENT}")
|
||||
endfunction()
|
||||
|
||||
# Call the function to generate the header
|
||||
generate_dependency_versions_header()
|
||||
@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
|
||||
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
|
||||
|
||||
if(NOT WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION f474428677de1fafaf13bb3b9a050fe3504dda25)
|
||||
set(WEBKIT_VERSION 495c25e24927ba03277ae225cd42811588d03ff8)
|
||||
endif()
|
||||
|
||||
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)
|
||||
|
||||
@@ -604,13 +604,12 @@ const db = new SQL({
|
||||
connectionTimeout: 30, // Timeout when establishing new connections
|
||||
|
||||
// SSL/TLS options
|
||||
ssl: "prefer", // or "disable", "require", "verify-ca", "verify-full"
|
||||
// tls: {
|
||||
// rejectUnauthorized: true,
|
||||
// ca: "path/to/ca.pem",
|
||||
// key: "path/to/key.pem",
|
||||
// cert: "path/to/cert.pem",
|
||||
// },
|
||||
tls: {
|
||||
rejectUnauthorized: true,
|
||||
ca: "path/to/ca.pem",
|
||||
key: "path/to/key.pem",
|
||||
cert: "path/to/cert.pem",
|
||||
},
|
||||
|
||||
// Callbacks
|
||||
onconnect: client => {
|
||||
|
||||
@@ -184,6 +184,45 @@ const { database, redis } = require("./config.yaml");
|
||||
console.log(database.port); // 5432
|
||||
```
|
||||
|
||||
### TypeScript Support
|
||||
|
||||
While Bun can import YAML files directly, TypeScript doesn't know the types of your YAML files by default. To add TypeScript support for your YAML imports, create a declaration file with `.d.ts` appended to the YAML filename (e.g., `config.yaml` → `config.yaml.d.ts`):
|
||||
|
||||
```yaml#config.yaml
|
||||
features: "advanced"
|
||||
server:
|
||||
host: localhost
|
||||
port: 3000
|
||||
```
|
||||
|
||||
```ts#config.yaml.d.ts
|
||||
const contents: {
|
||||
features: string;
|
||||
server: {
|
||||
host: string;
|
||||
port: number;
|
||||
};
|
||||
};
|
||||
|
||||
export = contents;
|
||||
```
|
||||
|
||||
Now TypeScript will provide proper type checking and auto-completion:
|
||||
|
||||
```ts#app.ts
|
||||
import config from "./config.yaml";
|
||||
|
||||
// TypeScript knows the types!
|
||||
config.server.port; // number
|
||||
config.server.host; // string
|
||||
config.features; // string
|
||||
|
||||
// TypeScript will catch errors
|
||||
config.server.unknown; // Error: Property 'unknown' does not exist
|
||||
```
|
||||
|
||||
This approach works for both ES modules and CommonJS, giving you full type safety while Bun continues to handle the actual YAML parsing at runtime.
|
||||
|
||||
## Hot Reloading with YAML
|
||||
|
||||
One of the most powerful features of Bun's YAML support is hot reloading. When you run your application with `bun --hot`, changes to YAML files are automatically detected and reloaded without closing connections
|
||||
|
||||
@@ -733,6 +733,10 @@ Whether to enable minification. Default `false`.
|
||||
When targeting `bun`, identifiers will be minified by default.
|
||||
{% /callout %}
|
||||
|
||||
{% callout %}
|
||||
When `minify.syntax` is enabled, unused function and class expression names are removed unless `minify.keepNames` is set to `true` or `--keep-names` flag is used.
|
||||
{% /callout %}
|
||||
|
||||
To enable all minification options:
|
||||
|
||||
{% codetabs group="a" %}
|
||||
@@ -763,12 +767,16 @@ await Bun.build({
|
||||
whitespace: true,
|
||||
identifiers: true,
|
||||
syntax: true,
|
||||
keepNames: false, // default
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
```bash#CLI
|
||||
$ bun build ./index.tsx --outdir ./out --minify-whitespace --minify-identifiers --minify-syntax
|
||||
|
||||
# To preserve function and class names during minification:
|
||||
$ bun build ./index.tsx --outdir ./out --minify --keep-names
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
@@ -1553,6 +1561,7 @@ interface BuildConfig {
|
||||
whitespace?: boolean;
|
||||
syntax?: boolean;
|
||||
identifiers?: boolean;
|
||||
keepNames?: boolean;
|
||||
};
|
||||
/**
|
||||
* Ignore dead code elimination/tree-shaking annotations such as @__PURE__ and package.json
|
||||
|
||||
@@ -9,8 +9,9 @@ $ bun create next-app
|
||||
✔ What is your project named? … my-app
|
||||
✔ Would you like to use TypeScript with this project? … No / Yes
|
||||
✔ Would you like to use ESLint with this project? … No / Yes
|
||||
✔ Would you like to use Tailwind CSS? ... No / Yes
|
||||
✔ Would you like to use `src/` directory with this project? … No / Yes
|
||||
✔ Would you like to use experimental `app/` directory with this project? … No / Yes
|
||||
✔ Would you like to use App Router? (recommended) ... No / Yes
|
||||
✔ What import alias would you like configured? … @/*
|
||||
Creating a new Next.js app in /path/to/my-app.
|
||||
```
|
||||
|
||||
@@ -73,4 +73,30 @@ console.log(data.hobbies); // => ["reading", "coding"]
|
||||
|
||||
---
|
||||
|
||||
## TypeScript Support
|
||||
|
||||
To add TypeScript support for your YAML imports, create a declaration file with `.d.ts` appended to the YAML filename (e.g., `config.yaml` → `config.yaml.d.ts`);
|
||||
|
||||
```ts#config.yaml.d.ts
|
||||
const contents: {
|
||||
database: {
|
||||
host: string;
|
||||
port: number;
|
||||
name: string;
|
||||
};
|
||||
server: {
|
||||
port: number;
|
||||
timeout: number;
|
||||
};
|
||||
features: {
|
||||
auth: boolean;
|
||||
rateLimit: boolean;
|
||||
};
|
||||
};
|
||||
|
||||
export = contents;
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > YAML](https://bun.com/docs/api/yaml) for complete documentation on YAML support in Bun.
|
||||
|
||||
@@ -407,6 +407,9 @@ export default {
|
||||
page("api/cc", "C Compiler", {
|
||||
description: `Build & run native C from JavaScript with Bun's native C compiler API`,
|
||||
}), // "`bun:ffi`"),
|
||||
page("api/secrets", "Secrets", {
|
||||
description: `Store and retrieve sensitive credentials securely using the operating system's native credential storage APIs.`,
|
||||
}), // "`Bun.secrets`"),
|
||||
page("cli/test", "Testing", {
|
||||
description: `Bun's built-in test runner is fast and uses Jest-compatible syntax.`,
|
||||
}), // "`bun:test`"),
|
||||
|
||||
@@ -521,7 +521,7 @@ When a security scanner is configured:
|
||||
- Installation is cancelled if fatal issues are found
|
||||
- Security warnings are displayed during installation
|
||||
|
||||
Learn more about [using and writing security scanners](/docs/install/security).
|
||||
Learn more about [using and writing security scanners](/docs/install/security-scanner-api).
|
||||
|
||||
### `install.linker`
|
||||
|
||||
|
||||
@@ -756,3 +756,76 @@ Bun implements the following matchers. Full Jest compatibility is on the roadmap
|
||||
- [`.toThrowErrorMatchingInlineSnapshot()`](https://jestjs.io/docs/expect#tothrowerrormatchinginlinesnapshotinlinesnapshot)
|
||||
|
||||
{% /table %}
|
||||
|
||||
## TypeScript Type Safety
|
||||
|
||||
Bun's test runner provides enhanced TypeScript support with intelligent type checking for your test assertions. The type system helps catch potential bugs at compile time while still allowing flexibility when needed.
|
||||
|
||||
### Strict Type Checking by Default
|
||||
|
||||
By default, Bun's test matchers enforce strict type checking between the actual value and expected value:
|
||||
|
||||
```ts
|
||||
import { expect, test } from "bun:test";
|
||||
|
||||
test("strict typing", () => {
|
||||
const str = "hello";
|
||||
const num = 42;
|
||||
|
||||
expect(str).toBe("hello"); // ✅ OK: string to string
|
||||
expect(num).toBe(42); // ✅ OK: number to number
|
||||
expect(str).toBe(42); // ❌ TypeScript error: string vs number
|
||||
});
|
||||
```
|
||||
|
||||
This helps catch common mistakes where you might accidentally compare values of different types.
|
||||
|
||||
### Relaxed Type Checking with Type Parameters
|
||||
|
||||
Sometimes you need more flexibility in your tests, especially when working with:
|
||||
|
||||
- Dynamic data from APIs
|
||||
- Polymorphic functions that can return multiple types
|
||||
- Generic utility functions
|
||||
- Migration of existing test suites
|
||||
|
||||
For these cases, you can "opt out" of strict type checking by providing an explicit type parameter to matcher methods:
|
||||
|
||||
```ts
|
||||
import { expect, test } from "bun:test";
|
||||
|
||||
test("relaxed typing with type parameters", () => {
|
||||
const value: unknown = getSomeValue();
|
||||
|
||||
// These would normally cause TypeScript errors, but type parameters allow them:
|
||||
expect(value).toBe<number>(42); // No TS error, runtime check still works
|
||||
expect(value).toEqual<string>("hello"); // No TS error, runtime check still works
|
||||
expect(value).toStrictEqual<boolean>(true); // No TS error, runtime check still works
|
||||
});
|
||||
|
||||
test("useful for dynamic data", () => {
|
||||
const apiResponse: any = { status: "success" };
|
||||
|
||||
// Without type parameter: TypeScript error (any vs string)
|
||||
// expect(apiResponse.status).toBe("success");
|
||||
|
||||
// With type parameter: No TypeScript error, runtime assertion still enforced
|
||||
expect(apiResponse.status).toBe<string>("success"); // ✅ OK
|
||||
});
|
||||
```
|
||||
|
||||
### Migration from Looser Type Systems
|
||||
|
||||
If migrating from a test framework with looser TypeScript integration, you can use type parameters as a stepping stone:
|
||||
|
||||
```ts
|
||||
// Old Jest test that worked but wasn't type-safe
|
||||
expect(response.data).toBe(200); // No type error in some setups
|
||||
|
||||
// Bun equivalent with explicit typing during migration
|
||||
expect(response.data).toBe<number>(200); // Explicit about expected type
|
||||
|
||||
// Ideal Bun test after refactoring
|
||||
const statusCode: number = response.data;
|
||||
expect(statusCode).toBe(200); // Type-safe without explicit parameter
|
||||
```
|
||||
|
||||
@@ -19,3 +19,6 @@ command script import -c bun_pretty_printer.py
|
||||
|
||||
command script delete btjs
|
||||
command alias btjs p {printf("gathering btjs trace...\n");printf("%s\n", (char*)dumpBtjsTrace())}
|
||||
|
||||
# do not pass SIGHUP on to child process. it is often not the real error and the stop point will be nonsensical.
|
||||
process handle -p false -s false -n true SIGHUP
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "bun",
|
||||
"version": "1.2.22",
|
||||
"version": "1.2.23",
|
||||
"workspaces": [
|
||||
"./packages/bun-types",
|
||||
"./packages/@types/bun"
|
||||
|
||||
1
packages/bun-types/bun.d.ts
vendored
1
packages/bun-types/bun.d.ts
vendored
@@ -1819,6 +1819,7 @@ declare module "bun" {
|
||||
whitespace?: boolean;
|
||||
syntax?: boolean;
|
||||
identifiers?: boolean;
|
||||
keepNames?: boolean;
|
||||
};
|
||||
|
||||
/**
|
||||
|
||||
9
packages/bun-types/globals.d.ts
vendored
9
packages/bun-types/globals.d.ts
vendored
@@ -1556,6 +1556,15 @@ declare var URL: Bun.__internal.UseLibDomIfAvailable<
|
||||
}
|
||||
>;
|
||||
|
||||
/**
|
||||
* The **`AbortController`** interface represents a controller object that allows you to abort one or more Web requests as and when desired.
|
||||
*
|
||||
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortController)
|
||||
*/
|
||||
interface AbortController {
|
||||
readonly signal: AbortSignal;
|
||||
abort(reason?: any): void;
|
||||
}
|
||||
declare var AbortController: Bun.__internal.UseLibDomIfAvailable<
|
||||
"AbortController",
|
||||
{
|
||||
|
||||
4
packages/bun-types/index.d.ts
vendored
4
packages/bun-types/index.d.ts
vendored
@@ -26,6 +26,6 @@
|
||||
|
||||
/// <reference path="./bun.ns.d.ts" />
|
||||
|
||||
// @ts-ignore Must disable this so it doesn't conflict with the DOM onmessage type, but still
|
||||
// Must disable this so it doesn't conflict with the DOM onmessage type, but still
|
||||
// allows us to declare our own globals that Node's types can "see" and not conflict with
|
||||
declare var onmessage: never;
|
||||
declare var onmessage: Bun.__internal.UseLibDomIfAvailable<"onmessage", never>;
|
||||
|
||||
8
packages/bun-types/redis.d.ts
vendored
8
packages/bun-types/redis.d.ts
vendored
@@ -270,6 +270,14 @@ declare module "bun" {
|
||||
*/
|
||||
hmset(key: RedisClient.KeyLike, fieldValues: string[]): Promise<string>;
|
||||
|
||||
/**
|
||||
* Get the value of a hash field
|
||||
* @param key The hash key
|
||||
* @param field The field to get
|
||||
* @returns Promise that resolves with the field value or null if the field doesn't exist
|
||||
*/
|
||||
hget(key: RedisClient.KeyLike, field: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Get the values of all the given hash fields
|
||||
* @param key The hash key
|
||||
|
||||
4
packages/bun-types/shell.d.ts
vendored
4
packages/bun-types/shell.d.ts
vendored
@@ -58,7 +58,7 @@ declare module "bun" {
|
||||
* // "bun"
|
||||
* ```
|
||||
*/
|
||||
function env(newEnv?: Record<string, string | undefined>): $;
|
||||
function env(newEnv?: Record<string, string | undefined> | NodeJS.Dict<string> | undefined): $;
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -106,7 +106,7 @@ declare module "bun" {
|
||||
* expect(stdout.toString()).toBe("LOL!");
|
||||
* ```
|
||||
*/
|
||||
env(newEnv: Record<string, string> | undefined): this;
|
||||
env(newEnv: Record<string, string | undefined> | NodeJS.Dict<string> | undefined): this;
|
||||
|
||||
/**
|
||||
* By default, the shell will write to the current process's stdout and stderr, as well as buffering that output.
|
||||
|
||||
45
packages/bun-types/sql.d.ts
vendored
45
packages/bun-types/sql.d.ts
vendored
@@ -41,22 +41,22 @@ declare module "bun" {
|
||||
|
||||
class PostgresError extends SQLError {
|
||||
public readonly code: string;
|
||||
public readonly errno: string | undefined;
|
||||
public readonly detail: string | undefined;
|
||||
public readonly hint: string | undefined;
|
||||
public readonly severity: string | undefined;
|
||||
public readonly position: string | undefined;
|
||||
public readonly internalPosition: string | undefined;
|
||||
public readonly internalQuery: string | undefined;
|
||||
public readonly where: string | undefined;
|
||||
public readonly schema: string | undefined;
|
||||
public readonly table: string | undefined;
|
||||
public readonly column: string | undefined;
|
||||
public readonly dataType: string | undefined;
|
||||
public readonly constraint: string | undefined;
|
||||
public readonly file: string | undefined;
|
||||
public readonly line: string | undefined;
|
||||
public readonly routine: string | undefined;
|
||||
public readonly errno?: string | undefined;
|
||||
public readonly detail?: string | undefined;
|
||||
public readonly hint?: string | undefined;
|
||||
public readonly severity?: string | undefined;
|
||||
public readonly position?: string | undefined;
|
||||
public readonly internalPosition?: string | undefined;
|
||||
public readonly internalQuery?: string | undefined;
|
||||
public readonly where?: string | undefined;
|
||||
public readonly schema?: string | undefined;
|
||||
public readonly table?: string | undefined;
|
||||
public readonly column?: string | undefined;
|
||||
public readonly dataType?: string | undefined;
|
||||
public readonly constraint?: string | undefined;
|
||||
public readonly file?: string | undefined;
|
||||
public readonly line?: string | undefined;
|
||||
public readonly routine?: string | undefined;
|
||||
|
||||
constructor(
|
||||
message: string,
|
||||
@@ -84,8 +84,8 @@ declare module "bun" {
|
||||
|
||||
class MySQLError extends SQLError {
|
||||
public readonly code: string;
|
||||
public readonly errno: number | undefined;
|
||||
public readonly sqlState: string | undefined;
|
||||
public readonly errno?: number | undefined;
|
||||
public readonly sqlState?: string | undefined;
|
||||
constructor(message: string, options: { code: string; errno: number | undefined; sqlState: string | undefined });
|
||||
}
|
||||
|
||||
@@ -143,13 +143,13 @@ declare module "bun" {
|
||||
|
||||
/**
|
||||
* Database server hostname
|
||||
* @deprecated Prefer {@link hostname}
|
||||
* @default "localhost"
|
||||
*/
|
||||
host?: string | undefined;
|
||||
|
||||
/**
|
||||
* Database server hostname (alias for host)
|
||||
* @deprecated Prefer {@link host}
|
||||
* Database server hostname
|
||||
* @default "localhost"
|
||||
*/
|
||||
hostname?: string | undefined;
|
||||
@@ -264,13 +264,14 @@ declare module "bun" {
|
||||
* Whether to use TLS/SSL for the connection
|
||||
* @default false
|
||||
*/
|
||||
tls?: TLSOptions | boolean | undefined;
|
||||
tls?: Bun.BunFile | TLSOptions | boolean | undefined;
|
||||
|
||||
/**
|
||||
* Whether to use TLS/SSL for the connection (alias for tls)
|
||||
* @deprecated Prefer {@link tls}
|
||||
* @default false
|
||||
*/
|
||||
ssl?: TLSOptions | boolean | undefined;
|
||||
ssl?: Bun.BunFile | TLSOptions | boolean | undefined;
|
||||
|
||||
/**
|
||||
* Unix domain socket path for connection
|
||||
|
||||
126
packages/bun-types/test.d.ts
vendored
126
packages/bun-types/test.d.ts
vendored
@@ -14,11 +14,6 @@
|
||||
* ```
|
||||
*/
|
||||
declare module "bun:test" {
|
||||
/**
|
||||
* -- Mocks --
|
||||
*
|
||||
* @category Testing
|
||||
*/
|
||||
export type Mock<T extends (...args: any[]) => any> = JestMock.Mock<T>;
|
||||
|
||||
export const mock: {
|
||||
@@ -588,7 +583,9 @@ declare module "bun:test" {
|
||||
* @param customFailMessage an optional custom message to display if the test fails.
|
||||
* */
|
||||
|
||||
<T = unknown>(actual?: T, customFailMessage?: string): Matchers<T>;
|
||||
(actual?: never, customFailMessage?: string): Matchers<undefined>;
|
||||
<T = unknown>(actual: T, customFailMessage?: string): Matchers<T>;
|
||||
<T = unknown>(actual?: T, customFailMessage?: string): Matchers<T | undefined>;
|
||||
|
||||
/**
|
||||
* Access to negated asymmetric matchers.
|
||||
@@ -906,6 +903,7 @@ declare module "bun:test" {
|
||||
* @param message the message to display if the test fails (optional)
|
||||
*/
|
||||
pass: (message?: string) => void;
|
||||
|
||||
/**
|
||||
* Assertion which fails.
|
||||
*
|
||||
@@ -917,6 +915,7 @@ declare module "bun:test" {
|
||||
* expect().not.fail("hi");
|
||||
*/
|
||||
fail: (message?: string) => void;
|
||||
|
||||
/**
|
||||
* Asserts that a value equals what is expected.
|
||||
*
|
||||
@@ -930,9 +929,15 @@ declare module "bun:test" {
|
||||
* expect([123]).toBe([123]); // fail, use toEqual()
|
||||
* expect(3 + 0.14).toBe(3.14); // fail, use toBeCloseTo()
|
||||
*
|
||||
* // TypeScript errors:
|
||||
* expect("hello").toBe(3.14); // typescript error + fail
|
||||
* expect("hello").toBe<number>(3.14); // no typescript error, but still fails
|
||||
*
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toBe(expected: T): void;
|
||||
toBe<X = T>(expected: NoInfer<X>): void;
|
||||
|
||||
/**
|
||||
* Asserts that a number is odd.
|
||||
*
|
||||
@@ -942,6 +947,7 @@ declare module "bun:test" {
|
||||
* expect(2).not.toBeOdd();
|
||||
*/
|
||||
toBeOdd(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a number is even.
|
||||
*
|
||||
@@ -951,6 +957,7 @@ declare module "bun:test" {
|
||||
* expect(1).not.toBeEven();
|
||||
*/
|
||||
toBeEven(): void;
|
||||
|
||||
/**
|
||||
* Asserts that value is close to the expected by floating point precision.
|
||||
*
|
||||
@@ -969,6 +976,7 @@ declare module "bun:test" {
|
||||
* @param numDigits the number of digits to check after the decimal point. Default is `2`
|
||||
*/
|
||||
toBeCloseTo(expected: number, numDigits?: number): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is deeply equal to what is expected.
|
||||
*
|
||||
@@ -981,6 +989,8 @@ declare module "bun:test" {
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toEqual(expected: T): void;
|
||||
toEqual<X = T>(expected: NoInfer<X>): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is deeply and strictly equal to
|
||||
* what is expected.
|
||||
@@ -1005,6 +1015,8 @@ declare module "bun:test" {
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toStrictEqual(expected: T): void;
|
||||
toStrictEqual<X = T>(expected: NoInfer<X>): void;
|
||||
|
||||
/**
|
||||
* Asserts that the value is deep equal to an element in the expected array.
|
||||
*
|
||||
@@ -1017,7 +1029,9 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toBeOneOf(expected: Array<unknown> | Iterable<unknown>): void;
|
||||
toBeOneOf(expected: Iterable<T>): void;
|
||||
toBeOneOf<X = T>(expected: NoInfer<Iterable<X>>): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value contains what is expected.
|
||||
*
|
||||
@@ -1031,7 +1045,9 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toContain(expected: unknown): void;
|
||||
toContain(expected: T extends Iterable<infer U> ? U : T): void;
|
||||
toContain<X = T>(expected: NoInfer<X extends Iterable<infer U> ? U : X>): void;
|
||||
|
||||
/**
|
||||
* Asserts that an `object` contains a key.
|
||||
*
|
||||
@@ -1045,7 +1061,9 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toContainKey(expected: unknown): void;
|
||||
toContainKey(expected: keyof T): void;
|
||||
toContainKey<X = T>(expected: NoInfer<keyof X>): void;
|
||||
|
||||
/**
|
||||
* Asserts that an `object` contains all the provided keys.
|
||||
*
|
||||
@@ -1060,7 +1078,9 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toContainAllKeys(expected: unknown): void;
|
||||
toContainAllKeys(expected: Array<keyof T>): void;
|
||||
toContainAllKeys<X = T>(expected: NoInfer<Array<keyof X>>): void;
|
||||
|
||||
/**
|
||||
* Asserts that an `object` contains at least one of the provided keys.
|
||||
* Asserts that an `object` contains all the provided keys.
|
||||
@@ -1075,12 +1095,16 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toContainAnyKeys(expected: unknown): void;
|
||||
toContainAnyKeys(expected: Array<keyof T>): void;
|
||||
toContainAnyKeys<X = T>(expected: NoInfer<Array<keyof X>>): void;
|
||||
|
||||
/**
|
||||
* Asserts that an `object` contain the provided value.
|
||||
*
|
||||
* The value must be an object
|
||||
* This method is deep and will look through child properties to find the
|
||||
* expected value.
|
||||
*
|
||||
* The input value must be an object.
|
||||
*
|
||||
* @example
|
||||
* const shallow = { hello: "world" };
|
||||
@@ -1104,11 +1128,16 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param expected the expected value
|
||||
*/
|
||||
// Contributor note: In theory we could type this better but it would be a
|
||||
// slow union to compute...
|
||||
toContainValue(expected: unknown): void;
|
||||
|
||||
/**
|
||||
* Asserts that an `object` contain the provided value.
|
||||
*
|
||||
* This is the same as {@link toContainValue}, but accepts an array of
|
||||
* values instead.
|
||||
*
|
||||
* The value must be an object
|
||||
*
|
||||
* @example
|
||||
@@ -1118,7 +1147,7 @@ declare module "bun:test" {
|
||||
* expect(o).not.toContainValues(['qux', 'foo']);
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toContainValues(expected: unknown): void;
|
||||
toContainValues(expected: Array<unknown>): void;
|
||||
|
||||
/**
|
||||
* Asserts that an `object` contain all the provided values.
|
||||
@@ -1132,7 +1161,7 @@ declare module "bun:test" {
|
||||
* expect(o).not.toContainAllValues(['bar', 'foo']);
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toContainAllValues(expected: unknown): void;
|
||||
toContainAllValues(expected: Array<unknown>): void;
|
||||
|
||||
/**
|
||||
* Asserts that an `object` contain any provided value.
|
||||
@@ -1147,7 +1176,7 @@ declare module "bun:test" {
|
||||
* expect(o).not.toContainAnyValues(['qux']);
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toContainAnyValues(expected: unknown): void;
|
||||
toContainAnyValues(expected: Array<unknown>): void;
|
||||
|
||||
/**
|
||||
* Asserts that an `object` contains all the provided keys.
|
||||
@@ -1159,7 +1188,9 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toContainKeys(expected: unknown): void;
|
||||
toContainKeys(expected: Array<keyof T>): void;
|
||||
toContainKeys<X = T>(expected: NoInfer<Array<keyof X>>): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value contains and equals what is expected.
|
||||
*
|
||||
@@ -1172,7 +1203,9 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toContainEqual(expected: unknown): void;
|
||||
toContainEqual(expected: T extends Iterable<infer U> ? U : T): void;
|
||||
toContainEqual<X = T>(expected: NoInfer<X extends Iterable<infer U> ? U : X>): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value has a `.length` property
|
||||
* that is equal to the expected length.
|
||||
@@ -1184,6 +1217,7 @@ declare module "bun:test" {
|
||||
* @param length the expected length
|
||||
*/
|
||||
toHaveLength(length: number): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value has a property with the
|
||||
* expected name, and value if provided.
|
||||
@@ -1198,6 +1232,7 @@ declare module "bun:test" {
|
||||
* @param value the expected property value, if provided
|
||||
*/
|
||||
toHaveProperty(keyPath: string | number | Array<string | number>, value?: unknown): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is "truthy".
|
||||
*
|
||||
@@ -1210,6 +1245,7 @@ declare module "bun:test" {
|
||||
* expect({}).toBeTruthy();
|
||||
*/
|
||||
toBeTruthy(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is "falsy".
|
||||
*
|
||||
@@ -1222,6 +1258,7 @@ declare module "bun:test" {
|
||||
* expect({}).toBeTruthy();
|
||||
*/
|
||||
toBeFalsy(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is defined. (e.g. is not `undefined`)
|
||||
*
|
||||
@@ -1230,6 +1267,7 @@ declare module "bun:test" {
|
||||
* expect(undefined).toBeDefined(); // fail
|
||||
*/
|
||||
toBeDefined(): void;
|
||||
|
||||
/**
|
||||
* Asserts that the expected value is an instance of value
|
||||
*
|
||||
@@ -1238,6 +1276,7 @@ declare module "bun:test" {
|
||||
* expect(null).toBeInstanceOf(Array); // fail
|
||||
*/
|
||||
toBeInstanceOf(value: unknown): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is `undefined`.
|
||||
*
|
||||
@@ -1246,6 +1285,7 @@ declare module "bun:test" {
|
||||
* expect(null).toBeUndefined(); // fail
|
||||
*/
|
||||
toBeUndefined(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is `null`.
|
||||
*
|
||||
@@ -1254,6 +1294,7 @@ declare module "bun:test" {
|
||||
* expect(undefined).toBeNull(); // fail
|
||||
*/
|
||||
toBeNull(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is `NaN`.
|
||||
*
|
||||
@@ -1265,6 +1306,7 @@ declare module "bun:test" {
|
||||
* expect("notanumber").toBeNaN(); // fail
|
||||
*/
|
||||
toBeNaN(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `number` and is greater than the expected value.
|
||||
*
|
||||
@@ -1276,6 +1318,7 @@ declare module "bun:test" {
|
||||
* @param expected the expected number
|
||||
*/
|
||||
toBeGreaterThan(expected: number | bigint): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `number` and is greater than or equal to the expected value.
|
||||
*
|
||||
@@ -1287,6 +1330,7 @@ declare module "bun:test" {
|
||||
* @param expected the expected number
|
||||
*/
|
||||
toBeGreaterThanOrEqual(expected: number | bigint): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `number` and is less than the expected value.
|
||||
*
|
||||
@@ -1298,6 +1342,7 @@ declare module "bun:test" {
|
||||
* @param expected the expected number
|
||||
*/
|
||||
toBeLessThan(expected: number | bigint): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `number` and is less than or equal to the expected value.
|
||||
*
|
||||
@@ -1309,6 +1354,7 @@ declare module "bun:test" {
|
||||
* @param expected the expected number
|
||||
*/
|
||||
toBeLessThanOrEqual(expected: number | bigint): void;
|
||||
|
||||
/**
|
||||
* Asserts that a function throws an error.
|
||||
*
|
||||
@@ -1329,6 +1375,7 @@ declare module "bun:test" {
|
||||
* @param expected the expected error, error message, or error pattern
|
||||
*/
|
||||
toThrow(expected?: unknown): void;
|
||||
|
||||
/**
|
||||
* Asserts that a function throws an error.
|
||||
*
|
||||
@@ -1350,6 +1397,7 @@ declare module "bun:test" {
|
||||
* @alias toThrow
|
||||
*/
|
||||
toThrowError(expected?: unknown): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value matches a regular expression or includes a substring.
|
||||
*
|
||||
@@ -1360,6 +1408,7 @@ declare module "bun:test" {
|
||||
* @param expected the expected substring or pattern.
|
||||
*/
|
||||
toMatch(expected: string | RegExp): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value matches the most recent snapshot.
|
||||
*
|
||||
@@ -1368,6 +1417,7 @@ declare module "bun:test" {
|
||||
* @param hint Hint used to identify the snapshot in the snapshot file.
|
||||
*/
|
||||
toMatchSnapshot(hint?: string): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value matches the most recent snapshot.
|
||||
*
|
||||
@@ -1380,6 +1430,7 @@ declare module "bun:test" {
|
||||
* @param hint Hint used to identify the snapshot in the snapshot file.
|
||||
*/
|
||||
toMatchSnapshot(propertyMatchers?: object, hint?: string): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value matches the most recent inline snapshot.
|
||||
*
|
||||
@@ -1390,6 +1441,7 @@ declare module "bun:test" {
|
||||
* @param value The latest automatically-updated snapshot value.
|
||||
*/
|
||||
toMatchInlineSnapshot(value?: string): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value matches the most recent inline snapshot.
|
||||
*
|
||||
@@ -1405,6 +1457,7 @@ declare module "bun:test" {
|
||||
* @param value The latest automatically-updated snapshot value.
|
||||
*/
|
||||
toMatchInlineSnapshot(propertyMatchers?: object, value?: string): void;
|
||||
|
||||
/**
|
||||
* Asserts that a function throws an error matching the most recent snapshot.
|
||||
*
|
||||
@@ -1418,6 +1471,7 @@ declare module "bun:test" {
|
||||
* @param value The latest automatically-updated snapshot value.
|
||||
*/
|
||||
toThrowErrorMatchingSnapshot(hint?: string): void;
|
||||
|
||||
/**
|
||||
* Asserts that a function throws an error matching the most recent snapshot.
|
||||
*
|
||||
@@ -1431,6 +1485,7 @@ declare module "bun:test" {
|
||||
* @param value The latest automatically-updated snapshot value.
|
||||
*/
|
||||
toThrowErrorMatchingInlineSnapshot(value?: string): void;
|
||||
|
||||
/**
|
||||
* Asserts that an object matches a subset of properties.
|
||||
*
|
||||
@@ -1441,6 +1496,7 @@ declare module "bun:test" {
|
||||
* @param subset Subset of properties to match with.
|
||||
*/
|
||||
toMatchObject(subset: object): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is empty.
|
||||
*
|
||||
@@ -1451,6 +1507,7 @@ declare module "bun:test" {
|
||||
* expect(new Set()).toBeEmpty();
|
||||
*/
|
||||
toBeEmpty(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is an empty `object`.
|
||||
*
|
||||
@@ -1459,6 +1516,7 @@ declare module "bun:test" {
|
||||
* expect({ a: 'hello' }).not.toBeEmptyObject();
|
||||
*/
|
||||
toBeEmptyObject(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is `null` or `undefined`.
|
||||
*
|
||||
@@ -1467,6 +1525,7 @@ declare module "bun:test" {
|
||||
* expect(undefined).toBeNil();
|
||||
*/
|
||||
toBeNil(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `array`.
|
||||
*
|
||||
@@ -1477,6 +1536,7 @@ declare module "bun:test" {
|
||||
* expect({}).not.toBeArray();
|
||||
*/
|
||||
toBeArray(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `array` of a certain length.
|
||||
*
|
||||
@@ -1488,6 +1548,7 @@ declare module "bun:test" {
|
||||
* expect({}).not.toBeArrayOfSize(0);
|
||||
*/
|
||||
toBeArrayOfSize(size: number): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `boolean`.
|
||||
*
|
||||
@@ -1498,6 +1559,7 @@ declare module "bun:test" {
|
||||
* expect(0).not.toBeBoolean();
|
||||
*/
|
||||
toBeBoolean(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is `true`.
|
||||
*
|
||||
@@ -1507,6 +1569,7 @@ declare module "bun:test" {
|
||||
* expect(1).not.toBeTrue();
|
||||
*/
|
||||
toBeTrue(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value matches a specific type.
|
||||
*
|
||||
@@ -1517,6 +1580,7 @@ declare module "bun:test" {
|
||||
* expect([]).not.toBeTypeOf("boolean");
|
||||
*/
|
||||
toBeTypeOf(type: "bigint" | "boolean" | "function" | "number" | "object" | "string" | "symbol" | "undefined"): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is `false`.
|
||||
*
|
||||
@@ -1526,6 +1590,7 @@ declare module "bun:test" {
|
||||
* expect(0).not.toBeFalse();
|
||||
*/
|
||||
toBeFalse(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `number`.
|
||||
*
|
||||
@@ -1536,6 +1601,7 @@ declare module "bun:test" {
|
||||
* expect(BigInt(1)).not.toBeNumber();
|
||||
*/
|
||||
toBeNumber(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `number`, and is an integer.
|
||||
*
|
||||
@@ -1545,6 +1611,7 @@ declare module "bun:test" {
|
||||
* expect(NaN).not.toBeInteger();
|
||||
*/
|
||||
toBeInteger(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is an `object`.
|
||||
*
|
||||
@@ -1554,6 +1621,7 @@ declare module "bun:test" {
|
||||
* expect(NaN).not.toBeObject();
|
||||
*/
|
||||
toBeObject(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `number`, and is not `NaN` or `Infinity`.
|
||||
*
|
||||
@@ -1564,6 +1632,7 @@ declare module "bun:test" {
|
||||
* expect(Infinity).not.toBeFinite();
|
||||
*/
|
||||
toBeFinite(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a positive `number`.
|
||||
*
|
||||
@@ -1573,6 +1642,7 @@ declare module "bun:test" {
|
||||
* expect(NaN).not.toBePositive();
|
||||
*/
|
||||
toBePositive(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a negative `number`.
|
||||
*
|
||||
@@ -1582,6 +1652,7 @@ declare module "bun:test" {
|
||||
* expect(NaN).not.toBeNegative();
|
||||
*/
|
||||
toBeNegative(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a number between a start and end value.
|
||||
*
|
||||
@@ -1589,6 +1660,7 @@ declare module "bun:test" {
|
||||
* @param end the end number (exclusive)
|
||||
*/
|
||||
toBeWithin(start: number, end: number): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is equal to the expected string, ignoring any whitespace.
|
||||
*
|
||||
@@ -1599,6 +1671,7 @@ declare module "bun:test" {
|
||||
* @param expected the expected string
|
||||
*/
|
||||
toEqualIgnoringWhitespace(expected: string): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `symbol`.
|
||||
*
|
||||
@@ -1607,6 +1680,7 @@ declare module "bun:test" {
|
||||
* expect("foo").not.toBeSymbol();
|
||||
*/
|
||||
toBeSymbol(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `function`.
|
||||
*
|
||||
@@ -1614,6 +1688,7 @@ declare module "bun:test" {
|
||||
* expect(() => {}).toBeFunction();
|
||||
*/
|
||||
toBeFunction(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `Date` object.
|
||||
*
|
||||
@@ -1625,6 +1700,7 @@ declare module "bun:test" {
|
||||
* expect("2020-03-01").not.toBeDate();
|
||||
*/
|
||||
toBeDate(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a valid `Date` object.
|
||||
*
|
||||
@@ -1634,6 +1710,7 @@ declare module "bun:test" {
|
||||
* expect("2020-03-01").not.toBeValidDate();
|
||||
*/
|
||||
toBeValidDate(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `string`.
|
||||
*
|
||||
@@ -1643,6 +1720,7 @@ declare module "bun:test" {
|
||||
* expect(123).not.toBeString();
|
||||
*/
|
||||
toBeString(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value includes a `string`.
|
||||
*
|
||||
@@ -1651,12 +1729,14 @@ declare module "bun:test" {
|
||||
* @param expected the expected substring
|
||||
*/
|
||||
toInclude(expected: string): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value includes a `string` {times} times.
|
||||
* @param expected the expected substring
|
||||
* @param times the number of times the substring should occur
|
||||
*/
|
||||
toIncludeRepeated(expected: string, times: number): void;
|
||||
|
||||
/**
|
||||
* Checks whether a value satisfies a custom condition.
|
||||
* @param {Function} predicate - The custom condition to be satisfied. It should be a function that takes a value as an argument (in this case the value from expect) and returns a boolean.
|
||||
@@ -1668,18 +1748,21 @@ declare module "bun:test" {
|
||||
* @link https://jest-extended.jestcommunity.dev/docs/matchers/toSatisfy
|
||||
*/
|
||||
toSatisfy(predicate: (value: T) => boolean): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value starts with a `string`.
|
||||
*
|
||||
* @param expected the string to start with
|
||||
*/
|
||||
toStartWith(expected: string): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value ends with a `string`.
|
||||
*
|
||||
* @param expected the string to end with
|
||||
*/
|
||||
toEndWith(expected: string): void;
|
||||
|
||||
/**
|
||||
* Ensures that a mock function has returned successfully at least once.
|
||||
*
|
||||
@@ -1720,42 +1803,51 @@ declare module "bun:test" {
|
||||
* Ensures that a mock function is called.
|
||||
*/
|
||||
toHaveBeenCalled(): void;
|
||||
|
||||
/**
|
||||
* Ensures that a mock function is called an exact number of times.
|
||||
* @alias toHaveBeenCalled
|
||||
*/
|
||||
toBeCalled(): void;
|
||||
|
||||
/**
|
||||
* Ensures that a mock function is called an exact number of times.
|
||||
*/
|
||||
toHaveBeenCalledTimes(expected: number): void;
|
||||
|
||||
/**
|
||||
* Ensure that a mock function is called with specific arguments.
|
||||
* @alias toHaveBeenCalledTimes
|
||||
*/
|
||||
toBeCalledTimes(expected: number): void;
|
||||
|
||||
/**
|
||||
* Ensure that a mock function is called with specific arguments.
|
||||
*/
|
||||
toHaveBeenCalledWith(...expected: unknown[]): void;
|
||||
|
||||
/**
|
||||
* Ensure that a mock function is called with specific arguments.
|
||||
* @alias toHaveBeenCalledWith
|
||||
*/
|
||||
toBeCalledWith(...expected: unknown[]): void;
|
||||
|
||||
/**
|
||||
* Ensure that a mock function is called with specific arguments for the last call.
|
||||
*/
|
||||
toHaveBeenLastCalledWith(...expected: unknown[]): void;
|
||||
|
||||
/**
|
||||
* Ensure that a mock function is called with specific arguments for the nth call.
|
||||
* @alias toHaveBeenCalledWith
|
||||
*/
|
||||
lastCalledWith(...expected: unknown[]): void;
|
||||
|
||||
/**
|
||||
* Ensure that a mock function is called with specific arguments for the nth call.
|
||||
*/
|
||||
toHaveBeenNthCalledWith(n: number, ...expected: unknown[]): void;
|
||||
|
||||
/**
|
||||
* Ensure that a mock function is called with specific arguments for the nth call.
|
||||
* @alias toHaveBeenCalledWith
|
||||
|
||||
@@ -25,6 +25,23 @@
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
#if BUN_DEBUG
|
||||
// Debug network traffic logging
|
||||
static FILE *debug_recv_file = NULL;
|
||||
static FILE *debug_send_file = NULL;
|
||||
static int debug_logging_initialized = 0;
|
||||
|
||||
static void init_debug_logging() {
|
||||
if (debug_logging_initialized) return;
|
||||
debug_logging_initialized = 1;
|
||||
|
||||
const char *recv_path = getenv("BUN_RECV");
|
||||
const char *send_path = getenv("BUN_SEND");
|
||||
if (recv_path) if (!debug_recv_file) debug_recv_file = fopen(recv_path, "w");
|
||||
if (send_path) if (!debug_send_file) debug_send_file = fopen(send_path, "w");
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifndef _WIN32
|
||||
// Necessary for the stdint include
|
||||
#ifndef _GNU_SOURCE
|
||||
@@ -721,6 +738,17 @@ ssize_t bsd_recv(LIBUS_SOCKET_DESCRIPTOR fd, void *buf, int length, int flags) {
|
||||
continue;
|
||||
}
|
||||
|
||||
#if BUN_DEBUG
|
||||
// Debug logging for received data
|
||||
if (ret > 0) {
|
||||
init_debug_logging();
|
||||
if (debug_recv_file) {
|
||||
fwrite(buf, 1, ret, debug_recv_file);
|
||||
fflush(debug_recv_file);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
@@ -788,6 +816,17 @@ ssize_t bsd_send(LIBUS_SOCKET_DESCRIPTOR fd, const char *buf, int length) {
|
||||
continue;
|
||||
}
|
||||
|
||||
#if BUN_DEBUG
|
||||
// Debug logging for sent data
|
||||
if (rc > 0) {
|
||||
init_debug_logging();
|
||||
if (debug_send_file) {
|
||||
fwrite(buf, 1, rc, debug_send_file);
|
||||
fflush(debug_send_file);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
return rc;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,7 +22,16 @@
|
||||
#ifndef WIN32
|
||||
#include <sys/ioctl.h>
|
||||
#endif
|
||||
|
||||
#if __has_include("wtf/Platform.h")
|
||||
#include "wtf/Platform.h"
|
||||
#elif !defined(ASSERT_ENABLED)
|
||||
#if defined(BUN_DEBUG) || defined(__has_feature) && __has_feature(address_sanitizer) || defined(__SANITIZE_ADDRESS__)
|
||||
#define ASSERT_ENABLED 1
|
||||
#else
|
||||
#define ASSERT_ENABLED 0
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#if ASSERT_ENABLED
|
||||
extern const size_t Bun__lock__size;
|
||||
|
||||
@@ -117,7 +117,7 @@ async function countReactions(issueNumbers: number[], verbose = false): Promise<
|
||||
}
|
||||
|
||||
// Small delay to avoid rate limiting
|
||||
await Bun.sleep(50);
|
||||
await Bun.sleep(1);
|
||||
}
|
||||
|
||||
return totalReactions;
|
||||
|
||||
72
scripts/handle-crash-patterns.ts
Normal file
72
scripts/handle-crash-patterns.ts
Normal file
@@ -0,0 +1,72 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
const body = process.env.GITHUB_ISSUE_BODY || "";
|
||||
const title = process.env.GITHUB_ISSUE_TITLE || "";
|
||||
const issueNumber = process.env.GITHUB_ISSUE_NUMBER;
|
||||
|
||||
if (!issueNumber) {
|
||||
throw new Error("GITHUB_ISSUE_NUMBER must be set");
|
||||
}
|
||||
|
||||
interface CloseAction {
|
||||
reason: "not_planned" | "completed";
|
||||
comment: string;
|
||||
}
|
||||
|
||||
let closeAction: CloseAction | null = null;
|
||||
|
||||
// Check for workers_terminated
|
||||
if (body.includes("workers_terminated")) {
|
||||
closeAction = {
|
||||
reason: "not_planned",
|
||||
comment: `Duplicate of #15964
|
||||
We are tracking worker stability issues in https://github.com/oven-sh/bun/issues/15964. For now, I recommend against terminating workers when possible.`,
|
||||
};
|
||||
}
|
||||
|
||||
// Check for better-sqlite3 with RunCommand or AutoCommand
|
||||
else if (body.includes("better-sqlite3") && (body.includes("[RunCommand]") || body.includes("[AutoCommand]"))) {
|
||||
closeAction = {
|
||||
reason: "not_planned",
|
||||
comment: `Duplicate of #4290.
|
||||
better-sqlite3 is not supported yet in Bun due to missing V8 C++ APIs. For now, you can try [bun:sqlite](https://bun.com/docs/api/sqlite) for an almost drop-in replacement.`,
|
||||
};
|
||||
}
|
||||
|
||||
// Check for CPU architecture issues (Segmentation Fault/Illegal Instruction with no_avx)
|
||||
else if (
|
||||
(body.includes("Segmentation Fault") ||
|
||||
body.includes("Illegal Instruction") ||
|
||||
body.includes("IllegalInstruction")) &&
|
||||
body.includes("no_avx")
|
||||
) {
|
||||
let comment = `Bun requires a CPU with the micro-architecture [\`nehalem\`](https://en.wikipedia.org/wiki/Nehalem_(microarchitecture)) or later (released in 2008). If you're using a CPU emulator like qemu, then try enabling x86-64-v2.`;
|
||||
|
||||
// Check if it's macOS
|
||||
const platformMatch = body.match(/Platform:\s*([^\n]+)/i) || body.match(/on\s+(macos|darwin)/i);
|
||||
const isMacOS =
|
||||
platformMatch &&
|
||||
(platformMatch[1]?.toLowerCase().includes("darwin") || platformMatch[1]?.toLowerCase().includes("macos"));
|
||||
|
||||
if (isMacOS) {
|
||||
comment += `\n\nIf you're on a macOS silicon device, you're running Bun via the Rosetta CPU emulator and your best option is to run Bun natively instead.`;
|
||||
}
|
||||
|
||||
closeAction = {
|
||||
reason: "not_planned",
|
||||
comment,
|
||||
};
|
||||
}
|
||||
|
||||
if (closeAction) {
|
||||
// Output the action to take
|
||||
console.write(
|
||||
JSON.stringify({
|
||||
close: true,
|
||||
reason: closeAction.reason,
|
||||
comment: closeAction.comment,
|
||||
}),
|
||||
);
|
||||
} else {
|
||||
console.write(JSON.stringify({ close: false }));
|
||||
}
|
||||
@@ -6,6 +6,9 @@ if (!body) {
|
||||
|
||||
const latest = (await Bun.file(join(import.meta.dir, "..", "LATEST")).text()).trim();
|
||||
|
||||
// Check if this is a standalone executable
|
||||
const isStandalone = body.includes("standalone_executable");
|
||||
|
||||
const lines = body.split("\n").reverse();
|
||||
|
||||
for (let line of lines) {
|
||||
@@ -39,6 +42,11 @@ for (let line of lines) {
|
||||
await Bun.write("is-outdated.txt", "true");
|
||||
await Bun.write("outdated.txt", version);
|
||||
|
||||
// Write flag for standalone executables
|
||||
if (isStandalone) {
|
||||
await Bun.write("is-standalone.txt", "true");
|
||||
}
|
||||
|
||||
const isVeryOutdated =
|
||||
major !== latestMajor || minor !== latestMinor || (latestPatch > patch && latestPatch - patch > 3);
|
||||
|
||||
|
||||
@@ -298,7 +298,7 @@ function getTestExpectations() {
|
||||
return expectations;
|
||||
}
|
||||
|
||||
const skipArray = (() => {
|
||||
const skipsForExceptionValidation = (() => {
|
||||
const path = join(cwd, "test/no-validate-exceptions.txt");
|
||||
if (!existsSync(path)) {
|
||||
return [];
|
||||
@@ -309,13 +309,32 @@ const skipArray = (() => {
|
||||
.filter(line => !line.startsWith("#") && line.length > 0);
|
||||
})();
|
||||
|
||||
const skipsForLeaksan = (() => {
|
||||
const path = join(cwd, "test/no-validate-leaksan.txt");
|
||||
if (!existsSync(path)) {
|
||||
return [];
|
||||
}
|
||||
return readFileSync(path, "utf-8")
|
||||
.split("\n")
|
||||
.filter(line => !line.startsWith("#") && line.length > 0);
|
||||
})();
|
||||
|
||||
/**
|
||||
* Returns whether we should validate exception checks running the given test
|
||||
* @param {string} test
|
||||
* @returns {boolean}
|
||||
*/
|
||||
const shouldValidateExceptions = test => {
|
||||
return !(skipArray.includes(test) || skipArray.includes("test/" + test));
|
||||
return !(skipsForExceptionValidation.includes(test) || skipsForExceptionValidation.includes("test/" + test));
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns whether we should validate exception checks running the given test
|
||||
* @param {string} test
|
||||
* @returns {boolean}
|
||||
*/
|
||||
const shouldValidateLeakSan = test => {
|
||||
return !(skipsForLeaksan.includes(test) || skipsForLeaksan.includes("test/" + test));
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -400,7 +419,9 @@ async function runTests() {
|
||||
|
||||
const okResults = [];
|
||||
const flakyResults = [];
|
||||
const flakyResultsTitles = [];
|
||||
const failedResults = [];
|
||||
const failedResultsTitles = [];
|
||||
const maxAttempts = 1 + (parseInt(options["retries"]) || 0);
|
||||
|
||||
const parallelism = options["parallel"] ? availableParallelism() : 1;
|
||||
@@ -436,6 +457,7 @@ async function runTests() {
|
||||
if (ok) {
|
||||
if (failure) {
|
||||
flakyResults.push(failure);
|
||||
flakyResultsTitles.push(title);
|
||||
} else {
|
||||
okResults.push(result);
|
||||
}
|
||||
@@ -455,6 +477,7 @@ async function runTests() {
|
||||
if (attempt >= maxAttempts || isAlwaysFailure(error)) {
|
||||
flaky = false;
|
||||
failedResults.push(failure);
|
||||
failedResultsTitles.push(title);
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -567,6 +590,12 @@ async function runTests() {
|
||||
env.BUN_JSC_validateExceptionChecks = "1";
|
||||
env.BUN_JSC_dumpSimulatedThrows = "1";
|
||||
}
|
||||
if ((basename(execPath).includes("asan") || !isCI) && shouldValidateLeakSan(testPath)) {
|
||||
env.BUN_DESTRUCT_VM_ON_EXIT = "1";
|
||||
env.ASAN_OPTIONS = "allow_user_segv_handler=1:disable_coredump=0:detect_leaks=1";
|
||||
// prettier-ignore
|
||||
env.LSAN_OPTIONS = `malloc_context_size=100:print_suppressions=0:suppressions=${process.cwd()}/test/leaksan.supp`;
|
||||
}
|
||||
return runTest(title, async () => {
|
||||
const { ok, error, stdout, crashes } = await spawnBun(execPath, {
|
||||
cwd: cwd,
|
||||
@@ -809,14 +838,14 @@ async function runTests() {
|
||||
|
||||
if (failedResults.length) {
|
||||
console.log(`${getAnsi("red")}Failing Tests:${getAnsi("reset")}`);
|
||||
for (const { testPath } of failedResults) {
|
||||
for (const testPath of failedResultsTitles) {
|
||||
console.log(`${getAnsi("red")}- ${testPath}${getAnsi("reset")}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (flakyResults.length) {
|
||||
console.log(`${getAnsi("yellow")}Flaky Tests:${getAnsi("reset")}`);
|
||||
for (const { testPath } of flakyResults) {
|
||||
for (const testPath of flakyResultsTitles) {
|
||||
console.log(`${getAnsi("yellow")}- ${testPath}${getAnsi("reset")}`);
|
||||
}
|
||||
}
|
||||
@@ -1094,7 +1123,7 @@ async function spawnBun(execPath, { args, cwd, timeout, env, stdout, stderr }) {
|
||||
: { BUN_ENABLE_CRASH_REPORTING: "0" }),
|
||||
};
|
||||
|
||||
if (basename(execPath).includes("asan")) {
|
||||
if (basename(execPath).includes("asan") && bunEnv.ASAN_OPTIONS === undefined) {
|
||||
bunEnv.ASAN_OPTIONS = "allow_user_segv_handler=1:disable_coredump=0";
|
||||
}
|
||||
|
||||
@@ -1250,17 +1279,17 @@ async function spawnBun(execPath, { args, cwd, timeout, env, stdout, stderr }) {
|
||||
*
|
||||
* @param {string} execPath
|
||||
* @param {string} testPath
|
||||
* @param {object} [options]
|
||||
* @param {string} [options.cwd]
|
||||
* @param {string[]} [options.args]
|
||||
* @param {object} [opts]
|
||||
* @param {string} [opts.cwd]
|
||||
* @param {string[]} [opts.args]
|
||||
* @returns {Promise<TestResult>}
|
||||
*/
|
||||
async function spawnBunTest(execPath, testPath, options = { cwd }) {
|
||||
async function spawnBunTest(execPath, testPath, opts = { cwd }) {
|
||||
const timeout = getTestTimeout(testPath);
|
||||
const perTestTimeout = Math.ceil(timeout / 2);
|
||||
const absPath = join(options["cwd"], testPath);
|
||||
const absPath = join(opts["cwd"], testPath);
|
||||
const isReallyTest = isTestStrict(testPath) || absPath.includes("vendor");
|
||||
const args = options["args"] ?? [];
|
||||
const args = opts["args"] ?? [];
|
||||
|
||||
const testArgs = ["test", ...args, `--timeout=${perTestTimeout}`];
|
||||
|
||||
@@ -1291,10 +1320,16 @@ async function spawnBunTest(execPath, testPath, options = { cwd }) {
|
||||
env.BUN_JSC_validateExceptionChecks = "1";
|
||||
env.BUN_JSC_dumpSimulatedThrows = "1";
|
||||
}
|
||||
if ((basename(execPath).includes("asan") || !isCI) && shouldValidateLeakSan(relative(cwd, absPath))) {
|
||||
env.BUN_DESTRUCT_VM_ON_EXIT = "1";
|
||||
env.ASAN_OPTIONS = "allow_user_segv_handler=1:disable_coredump=0:detect_leaks=1";
|
||||
// prettier-ignore
|
||||
env.LSAN_OPTIONS = `malloc_context_size=100:print_suppressions=0:suppressions=${process.cwd()}/test/leaksan.supp`;
|
||||
}
|
||||
|
||||
const { ok, error, stdout, crashes } = await spawnBun(execPath, {
|
||||
args: isReallyTest ? testArgs : [...args, absPath],
|
||||
cwd: options["cwd"],
|
||||
cwd: opts["cwd"],
|
||||
timeout: isReallyTest ? timeout : 30_000,
|
||||
env,
|
||||
stdout: options.stdout,
|
||||
@@ -1528,7 +1563,11 @@ function isNodeTest(path) {
|
||||
return false;
|
||||
}
|
||||
const unixPath = path.replaceAll(sep, "/");
|
||||
return unixPath.includes("js/node/test/parallel/") || unixPath.includes("js/node/test/sequential/");
|
||||
return (
|
||||
unixPath.includes("js/node/test/parallel/") ||
|
||||
unixPath.includes("js/node/test/sequential/") ||
|
||||
unixPath.includes("js/bun/test/parallel/")
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -2217,7 +2256,7 @@ function getExitCode(outcome) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
// A flaky segfault, sigtrap, or sigill must never be ignored.
|
||||
// A flaky segfault, sigtrap, or sigkill must never be ignored.
|
||||
// If it happens in CI, it will happen to our users.
|
||||
// Flaky AddressSanitizer errors cannot be ignored since they still represent real bugs.
|
||||
function isAlwaysFailure(error) {
|
||||
@@ -2226,6 +2265,7 @@ function isAlwaysFailure(error) {
|
||||
error.includes("segmentation fault") ||
|
||||
error.includes("illegal instruction") ||
|
||||
error.includes("sigtrap") ||
|
||||
error.includes("sigkill") ||
|
||||
error.includes("error: addresssanitizer") ||
|
||||
error.includes("internal assertion failure") ||
|
||||
error.includes("core dumped") ||
|
||||
|
||||
@@ -2808,6 +2808,7 @@ export function endGroup() {
|
||||
} else {
|
||||
console.groupEnd();
|
||||
}
|
||||
console.log();
|
||||
}
|
||||
|
||||
export function printEnvironment() {
|
||||
|
||||
@@ -1,35 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -exo pipefail
|
||||
|
||||
WEBKIT_VERSION=$(grep 'set(WEBKIT_TAG' "CMakeLists.txt" | awk '{print $2}' | cut -f 1 -d ')')
|
||||
MIMALLOC_VERSION=$(git rev-parse HEAD:./src/deps/mimalloc)
|
||||
LIBARCHIVE_VERSION=$(git rev-parse HEAD:./src/deps/libarchive)
|
||||
PICOHTTPPARSER_VERSION=$(git rev-parse HEAD:./src/deps/picohttpparser)
|
||||
BORINGSSL_VERSION=$(git rev-parse HEAD:./src/deps/boringssl)
|
||||
ZLIB_VERSION=$(git rev-parse HEAD:./src/deps/zlib)
|
||||
LOLHTML=$(git rev-parse HEAD:./src/deps/lol-html)
|
||||
TINYCC=$(git rev-parse HEAD:./src/deps/tinycc)
|
||||
C_ARES=$(git rev-parse HEAD:./src/deps/c-ares)
|
||||
ZSTD=$(git rev-parse HEAD:./src/deps/zstd)
|
||||
LSHPACK=$(git rev-parse HEAD:./src/deps/ls-hpack)
|
||||
LIBDEFLATE=$(git rev-parse HEAD:./src/deps/libdeflate)
|
||||
|
||||
rm -rf src/generated_versions_list.zig
|
||||
echo "// AUTO-GENERATED FILE. Created via .scripts/write-versions.sh" >src/generated_versions_list.zig
|
||||
echo "" >>src/generated_versions_list.zig
|
||||
echo "pub const boringssl = \"$BORINGSSL_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const libarchive = \"$LIBARCHIVE_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const mimalloc = \"$MIMALLOC_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const picohttpparser = \"$PICOHTTPPARSER_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const webkit = \"$WEBKIT_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const zig = @import(\"std\").fmt.comptimePrint(\"{}\", .{@import(\"builtin\").zig_version});" >>src/generated_versions_list.zig
|
||||
echo "pub const zlib = \"$ZLIB_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const tinycc = \"$TINYCC\";" >>src/generated_versions_list.zig
|
||||
echo "pub const lolhtml = \"$LOLHTML\";" >>src/generated_versions_list.zig
|
||||
echo "pub const c_ares = \"$C_ARES\";" >>src/generated_versions_list.zig
|
||||
echo "pub const libdeflate = \"$LIBDEFLATE\";" >>src/generated_versions_list.zig
|
||||
echo "pub const zstd = \"$ZSTD\";" >>src/generated_versions_list.zig
|
||||
echo "pub const lshpack = \"$LSHPACK\";" >>src/generated_versions_list.zig
|
||||
echo "" >>src/generated_versions_list.zig
|
||||
|
||||
zig fmt src/generated_versions_list.zig
|
||||
@@ -159,7 +159,7 @@ pub inline fn mimalloc_cleanup(force: bool) void {
|
||||
Mimalloc.mi_collect(force);
|
||||
}
|
||||
}
|
||||
pub const versions = @import("./generated_versions_list.zig");
|
||||
// Versions are now handled by CMake-generated header (bun_dependency_versions.h)
|
||||
|
||||
// Enabling huge pages slows down bun by 8x or so
|
||||
// Keeping this code for:
|
||||
|
||||
@@ -18,7 +18,7 @@ pub fn deinit(this: *HTMLScanner) void {
|
||||
for (this.import_records.slice()) |*record| {
|
||||
this.allocator.free(record.path.text);
|
||||
}
|
||||
this.import_records.deinitWithAllocator(this.allocator);
|
||||
this.import_records.deinit(this.allocator);
|
||||
}
|
||||
|
||||
fn createImportRecord(this: *HTMLScanner, input_path: []const u8, kind: ImportKind) !void {
|
||||
@@ -41,10 +41,10 @@ fn createImportRecord(this: *HTMLScanner, input_path: []const u8, kind: ImportKi
|
||||
const record = ImportRecord{
|
||||
.path = fs.Path.init(try this.allocator.dupeZ(u8, path_to_use)),
|
||||
.kind = kind,
|
||||
.range = .none,
|
||||
.range = logger.Range.None,
|
||||
};
|
||||
|
||||
try this.import_records.push(this.allocator, record);
|
||||
try this.import_records.append(this.allocator, record);
|
||||
}
|
||||
|
||||
const debug = bun.Output.scoped(.HTMLScanner, .hidden);
|
||||
@@ -56,7 +56,7 @@ pub fn onWriteHTML(_: *HTMLScanner, bytes: []const u8) void {
|
||||
pub fn onHTMLParseError(this: *HTMLScanner, message: []const u8) void {
|
||||
this.log.addError(
|
||||
this.source,
|
||||
.none,
|
||||
logger.Loc.Empty,
|
||||
message,
|
||||
) catch |err| bun.handleOom(err);
|
||||
}
|
||||
|
||||
@@ -229,6 +229,11 @@ pub fn BSSList(comptime ValueType: type, comptime _count: anytype) type {
|
||||
this.data[index] = item;
|
||||
return &this.data[index];
|
||||
}
|
||||
|
||||
pub fn deinit(this: *OverflowBlock) void {
|
||||
if (this.prev) |p| p.deinit();
|
||||
bun.default_allocator.destroy(this);
|
||||
}
|
||||
};
|
||||
|
||||
const Self = @This();
|
||||
@@ -264,6 +269,12 @@ pub fn BSSList(comptime ValueType: type, comptime _count: anytype) type {
|
||||
return instance;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Self) void {
|
||||
self.head.deinit();
|
||||
bun.default_allocator.destroy(instance);
|
||||
loaded = false;
|
||||
}
|
||||
|
||||
pub fn isOverflowing() bool {
|
||||
return instance.used >= @as(u16, count);
|
||||
}
|
||||
@@ -350,6 +361,12 @@ pub fn BSSStringList(comptime _count: usize, comptime _item_length: usize) type
|
||||
return instance;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *const Self) void {
|
||||
_ = self;
|
||||
bun.default_allocator.destroy(instance);
|
||||
loaded = false;
|
||||
}
|
||||
|
||||
pub inline fn isOverflowing() bool {
|
||||
return instance.slice_buf_used >= @as(u16, count);
|
||||
}
|
||||
@@ -530,6 +547,12 @@ pub fn BSSMap(comptime ValueType: type, comptime count: anytype, comptime store_
|
||||
return instance;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Self) void {
|
||||
self.index.deinit(self.allocator);
|
||||
bun.default_allocator.destroy(instance);
|
||||
loaded = false;
|
||||
}
|
||||
|
||||
pub fn isOverflowing() bool {
|
||||
return instance.backing_buf_used >= @as(u16, count);
|
||||
}
|
||||
@@ -653,6 +676,10 @@ pub fn BSSMap(comptime ValueType: type, comptime count: anytype, comptime store_
|
||||
// }
|
||||
|
||||
}
|
||||
|
||||
pub fn values(self: *Self) []ValueType {
|
||||
return (&self.backing_buf)[0..self.backing_buf_used];
|
||||
}
|
||||
};
|
||||
if (!store_keys) {
|
||||
return BSSMapType;
|
||||
@@ -684,6 +711,12 @@ pub fn BSSMap(comptime ValueType: type, comptime count: anytype, comptime store_
|
||||
return instance;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Self) void {
|
||||
self.map.deinit();
|
||||
bun.default_allocator.destroy(instance);
|
||||
instance_loaded = false;
|
||||
}
|
||||
|
||||
pub fn isOverflowing() bool {
|
||||
return instance.map.backing_buf_used >= count;
|
||||
}
|
||||
|
||||
@@ -78,6 +78,15 @@ pub const Borrowed = struct {
|
||||
else
|
||||
null;
|
||||
}
|
||||
|
||||
pub fn downcast(std_alloc: std.mem.Allocator) Borrowed {
|
||||
bun.assertf(
|
||||
isInstance(std_alloc),
|
||||
"not a MimallocArena (vtable is {*})",
|
||||
.{std_alloc.vtable},
|
||||
);
|
||||
return .fromOpaque(std_alloc.ptr);
|
||||
}
|
||||
};
|
||||
|
||||
const BorrowedHeap = if (safety_checks) *DebugHeap else *mimalloc.Heap;
|
||||
@@ -115,6 +124,7 @@ pub fn borrow(self: Self) Borrowed {
|
||||
/// It uses pthread_getspecific to do that.
|
||||
/// We can save those extra calls if we just do it once in here
|
||||
pub fn getThreadLocalDefault() std.mem.Allocator {
|
||||
if (bun.Environment.enable_asan) return bun.default_allocator;
|
||||
return Borrowed.getDefault().allocator();
|
||||
}
|
||||
|
||||
|
||||
@@ -61,7 +61,7 @@ pub const AssignTarget = enum(u2) {
|
||||
};
|
||||
|
||||
pub const LocRef = struct {
|
||||
loc: logger.Loc = .none,
|
||||
loc: logger.Loc = logger.Loc.Empty,
|
||||
|
||||
// TODO: remove this optional and make Ref a function getter
|
||||
// That will make this struct 128 bits instead of 192 bits and we can remove some heap allocations
|
||||
@@ -121,7 +121,7 @@ pub const ClauseItem = struct {
|
||||
/// For exports: `export { foo as bar }` - "bar" is the alias
|
||||
/// For re-exports: `export { foo as bar } from 'path'` - "bar" is the alias
|
||||
alias: string,
|
||||
alias_loc: logger.Loc = .none,
|
||||
alias_loc: logger.Loc = logger.Loc.Empty,
|
||||
/// Reference to the actual symbol being imported/exported.
|
||||
/// For imports: `import { foo as bar }` - ref to the symbol representing "foo" from the source module
|
||||
/// For exports: `export { foo as bar }` - ref to the local symbol "foo"
|
||||
|
||||
@@ -22,9 +22,9 @@ exports_kind: ExportsKind = ExportsKind.none,
|
||||
|
||||
// This is a list of ES6 features. They are ranges instead of booleans so
|
||||
// that they can be used in log messages. Check to see if "Len > 0".
|
||||
import_keyword: logger.Range = .none, // Does not include TypeScript-specific syntax or "import()"
|
||||
export_keyword: logger.Range = .none, // Does not include TypeScript-specific syntax
|
||||
top_level_await_keyword: logger.Range = .none,
|
||||
import_keyword: logger.Range = logger.Range.None, // Does not include TypeScript-specific syntax or "import()"
|
||||
export_keyword: logger.Range = logger.Range.None, // Does not include TypeScript-specific syntax
|
||||
top_level_await_keyword: logger.Range = logger.Range.None,
|
||||
|
||||
/// These are stored at the AST level instead of on individual AST nodes so
|
||||
/// they can be manipulated efficiently without a full AST traversal
|
||||
@@ -83,14 +83,14 @@ pub const TsEnumsMap = std.ArrayHashMapUnmanaged(Ref, bun.StringHashMapUnmanaged
|
||||
|
||||
pub fn fromParts(parts: []Part) Ast {
|
||||
return Ast{
|
||||
.parts = Part.List.init(parts),
|
||||
.parts = Part.List.fromOwnedSlice(parts),
|
||||
.runtime_imports = .{},
|
||||
};
|
||||
}
|
||||
|
||||
pub fn initTest(parts: []Part) Ast {
|
||||
pub fn initTest(parts: []const Part) Ast {
|
||||
return Ast{
|
||||
.parts = Part.List.init(parts),
|
||||
.parts = Part.List.fromBorrowedSliceDangerous(parts),
|
||||
.runtime_imports = .{},
|
||||
};
|
||||
}
|
||||
@@ -107,9 +107,9 @@ pub fn toJSON(self: *const Ast, _: std.mem.Allocator, stream: anytype) !void {
|
||||
/// Do not call this if it wasn't globally allocated!
|
||||
pub fn deinit(this: *Ast) void {
|
||||
// TODO: assert mimalloc-owned memory
|
||||
if (this.parts.len > 0) this.parts.deinitWithAllocator(bun.default_allocator);
|
||||
if (this.symbols.len > 0) this.symbols.deinitWithAllocator(bun.default_allocator);
|
||||
if (this.import_records.len > 0) this.import_records.deinitWithAllocator(bun.default_allocator);
|
||||
this.parts.deinit(bun.default_allocator);
|
||||
this.symbols.deinit(bun.default_allocator);
|
||||
this.import_records.deinit(bun.default_allocator);
|
||||
}
|
||||
|
||||
pub const Class = G.Class;
|
||||
|
||||
@@ -56,7 +56,14 @@ pub fn toExpr(binding: *const Binding, wrapper: anytype) Expr {
|
||||
};
|
||||
}
|
||||
|
||||
return Expr.init(E.Array, E.Array{ .items = ExprNodeList.init(exprs), .is_single_line = b.is_single_line }, loc);
|
||||
return Expr.init(
|
||||
E.Array,
|
||||
E.Array{
|
||||
.items = ExprNodeList.fromOwnedSlice(exprs),
|
||||
.is_single_line = b.is_single_line,
|
||||
},
|
||||
loc,
|
||||
);
|
||||
},
|
||||
.b_object => |b| {
|
||||
const properties = wrapper
|
||||
@@ -77,7 +84,7 @@ pub fn toExpr(binding: *const Binding, wrapper: anytype) Expr {
|
||||
return Expr.init(
|
||||
E.Object,
|
||||
E.Object{
|
||||
.properties = G.Property.List.init(properties),
|
||||
.properties = G.Property.List.fromOwnedSlice(properties),
|
||||
.is_single_line = b.is_single_line,
|
||||
},
|
||||
loc,
|
||||
|
||||
@@ -111,7 +111,7 @@ pub fn toAST(this: *const BundledAst) Ast {
|
||||
.uses_exports_ref = this.flags.uses_exports_ref,
|
||||
.uses_module_ref = this.flags.uses_module_ref,
|
||||
// .uses_require_ref = ast.uses_require_ref,
|
||||
.export_keyword = .{ .len = if (this.flags.uses_export_keyword) 1 else 0, .loc = .none },
|
||||
.export_keyword = .{ .len = if (this.flags.uses_export_keyword) 1 else 0, .loc = .{} },
|
||||
.force_cjs_to_esm = this.flags.force_cjs_to_esm,
|
||||
.has_lazy_export = this.flags.has_lazy_export,
|
||||
.commonjs_module_exports_assigned_deoptimized = this.flags.commonjs_module_exports_assigned_deoptimized,
|
||||
|
||||
@@ -121,7 +121,7 @@ pub fn convertStmt(ctx: *ConvertESMExportsForHmr, p: anytype, stmt: Stmt) !void
|
||||
const temp_id = p.generateTempRef("default_export");
|
||||
try ctx.last_part.declared_symbols.append(p.allocator, .{ .ref = temp_id, .is_top_level = true });
|
||||
try ctx.last_part.symbol_uses.putNoClobber(p.allocator, temp_id, .{ .count_estimate = 1 });
|
||||
try p.current_scope.generated.push(p.allocator, temp_id);
|
||||
try p.current_scope.generated.append(p.allocator, temp_id);
|
||||
|
||||
try ctx.export_props.append(p.allocator, .{
|
||||
.key = Expr.init(E.String, .{ .data = "default" }, stmt.loc),
|
||||
@@ -395,7 +395,7 @@ fn visitRefToExport(
|
||||
const arg1 = p.generateTempRef(symbol.original_name);
|
||||
try ctx.last_part.declared_symbols.append(p.allocator, .{ .ref = arg1, .is_top_level = true });
|
||||
try ctx.last_part.symbol_uses.putNoClobber(p.allocator, arg1, .{ .count_estimate = 1 });
|
||||
try p.current_scope.generated.push(p.allocator, arg1);
|
||||
try p.current_scope.generated.append(p.allocator, arg1);
|
||||
|
||||
// 'get abc() { return abc }'
|
||||
try ctx.export_props.append(p.allocator, .{
|
||||
@@ -438,20 +438,20 @@ pub fn finalize(ctx: *ConvertESMExportsForHmr, p: anytype, all_parts: []js_ast.P
|
||||
|
||||
if (ctx.export_props.items.len > 0) {
|
||||
const obj = Expr.init(E.Object, .{
|
||||
.properties = G.Property.List.fromList(ctx.export_props),
|
||||
}, .none);
|
||||
.properties = G.Property.List.moveFromList(&ctx.export_props),
|
||||
}, logger.Loc.Empty);
|
||||
|
||||
// `hmr.exports = ...`
|
||||
try ctx.stmts.append(p.allocator, Stmt.alloc(S.SExpr, .{
|
||||
.value = Expr.assign(
|
||||
Expr.init(E.Dot, .{
|
||||
.target = Expr.initIdentifier(p.hmr_api_ref, .none),
|
||||
.target = Expr.initIdentifier(p.hmr_api_ref, logger.Loc.Empty),
|
||||
.name = "exports",
|
||||
.name_loc = .none,
|
||||
}, .none),
|
||||
.name_loc = logger.Loc.Empty,
|
||||
}, logger.Loc.Empty),
|
||||
obj,
|
||||
),
|
||||
}, .none));
|
||||
}, logger.Loc.Empty));
|
||||
|
||||
// mark a dependency on module_ref so it is renamed
|
||||
try ctx.last_part.symbol_uses.put(p.allocator, p.module_ref, .{ .count_estimate = 1 });
|
||||
@@ -462,19 +462,22 @@ pub fn finalize(ctx: *ConvertESMExportsForHmr, p: anytype, all_parts: []js_ast.P
|
||||
try ctx.stmts.append(p.allocator, Stmt.alloc(S.SExpr, .{
|
||||
.value = Expr.init(E.Call, .{
|
||||
.target = Expr.init(E.Dot, .{
|
||||
.target = Expr.initIdentifier(p.hmr_api_ref, .none),
|
||||
.target = Expr.initIdentifier(p.hmr_api_ref, .Empty),
|
||||
.name = "reactRefreshAccept",
|
||||
.name_loc = .none,
|
||||
}, .none),
|
||||
.args = .init(&.{}),
|
||||
}, .none),
|
||||
}, .none));
|
||||
.name_loc = .Empty,
|
||||
}, .Empty),
|
||||
.args = .empty,
|
||||
}, .Empty),
|
||||
}, .Empty));
|
||||
}
|
||||
|
||||
// Merge all part metadata into the first part.
|
||||
for (all_parts[0 .. all_parts.len - 1]) |*part| {
|
||||
try ctx.last_part.declared_symbols.appendList(p.allocator, part.declared_symbols);
|
||||
try ctx.last_part.import_record_indices.append(p.allocator, part.import_record_indices.slice());
|
||||
try ctx.last_part.import_record_indices.appendSlice(
|
||||
p.allocator,
|
||||
part.import_record_indices.slice(),
|
||||
);
|
||||
for (part.symbol_uses.keys(), part.symbol_uses.values()) |k, v| {
|
||||
const gop = try ctx.last_part.symbol_uses.getOrPut(p.allocator, k);
|
||||
if (!gop.found_existing) {
|
||||
@@ -487,13 +490,16 @@ pub fn finalize(ctx: *ConvertESMExportsForHmr, p: anytype, all_parts: []js_ast.P
|
||||
part.declared_symbols.entries.len = 0;
|
||||
part.tag = .dead_due_to_inlining;
|
||||
part.dependencies.clearRetainingCapacity();
|
||||
try part.dependencies.push(p.allocator, .{
|
||||
try part.dependencies.append(p.allocator, .{
|
||||
.part_index = @intCast(all_parts.len - 1),
|
||||
.source_index = p.source.index,
|
||||
});
|
||||
}
|
||||
|
||||
try ctx.last_part.import_record_indices.append(p.allocator, p.import_records_for_current_part.items);
|
||||
try ctx.last_part.import_record_indices.appendSlice(
|
||||
p.allocator,
|
||||
p.import_records_for_current_part.items,
|
||||
);
|
||||
try ctx.last_part.declared_symbols.appendList(p.allocator, p.declared_symbols);
|
||||
|
||||
ctx.last_part.stmts = ctx.stmts.items;
|
||||
|
||||
105
src/ast/E.zig
105
src/ast/E.zig
@@ -15,10 +15,10 @@ pub const Array = struct {
|
||||
is_single_line: bool = false,
|
||||
is_parenthesized: bool = false,
|
||||
was_originally_macro: bool = false,
|
||||
close_bracket_loc: logger.Loc = .none,
|
||||
close_bracket_loc: logger.Loc = logger.Loc.Empty,
|
||||
|
||||
pub fn push(this: *Array, allocator: std.mem.Allocator, item: Expr) !void {
|
||||
try this.items.push(allocator, item);
|
||||
try this.items.append(allocator, item);
|
||||
}
|
||||
|
||||
pub inline fn slice(this: Array) []Expr {
|
||||
@@ -30,12 +30,13 @@ pub const Array = struct {
|
||||
allocator: std.mem.Allocator,
|
||||
estimated_count: usize,
|
||||
) !ExprNodeList {
|
||||
var out = try allocator.alloc(
|
||||
Expr,
|
||||
var out: bun.BabyList(Expr) = try .initCapacity(
|
||||
allocator,
|
||||
// This over-allocates a little but it's fine
|
||||
estimated_count + @as(usize, this.items.len),
|
||||
);
|
||||
var remain = out;
|
||||
out.expandToCapacity();
|
||||
var remain = out.slice();
|
||||
for (this.items.slice()) |item| {
|
||||
switch (item.data) {
|
||||
.e_spread => |val| {
|
||||
@@ -63,7 +64,8 @@ pub const Array = struct {
|
||||
remain = remain[1..];
|
||||
}
|
||||
|
||||
return ExprNodeList.init(out[0 .. out.len - remain.len]);
|
||||
out.shrinkRetainingCapacity(out.len - remain.len);
|
||||
return out;
|
||||
}
|
||||
|
||||
pub fn toJS(this: @This(), allocator: std.mem.Allocator, globalObject: *jsc.JSGlobalObject) ToJSError!jsc.JSValue {
|
||||
@@ -98,6 +100,43 @@ pub const Array = struct {
|
||||
pub const Unary = struct {
|
||||
op: Op.Code,
|
||||
value: ExprNodeIndex,
|
||||
flags: Unary.Flags = .{},
|
||||
|
||||
pub const Flags = packed struct(u8) {
|
||||
/// The expression "typeof (0, x)" must not become "typeof x" if "x"
|
||||
/// is unbound because that could suppress a ReferenceError from "x".
|
||||
///
|
||||
/// Also if we know a typeof operator was originally an identifier, then
|
||||
/// we know that this typeof operator always has no side effects (even if
|
||||
/// we consider the identifier by itself to have a side effect).
|
||||
///
|
||||
/// Note that there *is* actually a case where "typeof x" can throw an error:
|
||||
/// when "x" is being referenced inside of its TDZ (temporal dead zone). TDZ
|
||||
/// checks are not yet handled correctly by Bun, so this possibility is
|
||||
/// currently ignored.
|
||||
was_originally_typeof_identifier: bool = false,
|
||||
|
||||
/// Similarly the expression "delete (0, x)" must not become "delete x"
|
||||
/// because that syntax is invalid in strict mode. We also need to make sure
|
||||
/// we don't accidentally change the return value:
|
||||
///
|
||||
/// Returns false:
|
||||
/// "var a; delete (a)"
|
||||
/// "var a = Object.freeze({b: 1}); delete (a.b)"
|
||||
/// "var a = Object.freeze({b: 1}); delete (a?.b)"
|
||||
/// "var a = Object.freeze({b: 1}); delete (a['b'])"
|
||||
/// "var a = Object.freeze({b: 1}); delete (a?.['b'])"
|
||||
///
|
||||
/// Returns true:
|
||||
/// "var a; delete (0, a)"
|
||||
/// "var a = Object.freeze({b: 1}); delete (true && a.b)"
|
||||
/// "var a = Object.freeze({b: 1}); delete (false || a?.b)"
|
||||
/// "var a = Object.freeze({b: 1}); delete (null ?? a?.['b'])"
|
||||
///
|
||||
/// "var a = Object.freeze({b: 1}); delete (true ? a['b'] : a['b'])"
|
||||
was_originally_delete_of_identifier_or_property_access: bool = false,
|
||||
_: u6 = 0,
|
||||
};
|
||||
};
|
||||
|
||||
pub const Binary = struct {
|
||||
@@ -161,7 +200,7 @@ pub const Call = struct {
|
||||
args: ExprNodeList = ExprNodeList{},
|
||||
optional_chain: ?OptionalChain = null,
|
||||
is_direct_eval: bool = false,
|
||||
close_paren_loc: logger.Loc = .none,
|
||||
close_paren_loc: logger.Loc = logger.Loc.Empty,
|
||||
|
||||
// True if there is a comment containing "@__PURE__" or "#__PURE__" preceding
|
||||
// this call expression. This is an annotation used for tree shaking, and
|
||||
@@ -233,7 +272,7 @@ pub const Arrow = struct {
|
||||
pub const noop_return_undefined: Arrow = .{
|
||||
.args = &.{},
|
||||
.body = .{
|
||||
.loc = .none,
|
||||
.loc = .Empty,
|
||||
.stmts = &.{},
|
||||
},
|
||||
};
|
||||
@@ -365,7 +404,7 @@ pub const JSXElement = struct {
|
||||
|
||||
flags: Flags.JSXElement.Bitset = Flags.JSXElement.Bitset{},
|
||||
|
||||
close_tag_loc: logger.Loc = .none,
|
||||
close_tag_loc: logger.Loc = logger.Loc.Empty,
|
||||
|
||||
pub const SpecialProp = enum {
|
||||
__self, // old react transform used this as a prop
|
||||
@@ -493,7 +532,7 @@ pub const Object = struct {
|
||||
is_parenthesized: bool = false,
|
||||
was_originally_macro: bool = false,
|
||||
|
||||
close_brace_loc: logger.Loc = .none,
|
||||
close_brace_loc: logger.Loc = logger.Loc.Empty,
|
||||
|
||||
// used in TOML parser to merge properties
|
||||
pub const Rope = struct {
|
||||
@@ -536,7 +575,7 @@ pub const Object = struct {
|
||||
if (asProperty(self, key)) |query| {
|
||||
self.properties.ptr[query.i].value = expr;
|
||||
} else {
|
||||
try self.properties.push(allocator, .{
|
||||
try self.properties.append(allocator, .{
|
||||
.key = Expr.init(E.String, E.String.init(key), expr.loc),
|
||||
.value = expr,
|
||||
});
|
||||
@@ -544,14 +583,14 @@ pub const Object = struct {
|
||||
}
|
||||
|
||||
pub fn putString(self: *Object, allocator: std.mem.Allocator, key: string, value: string) !void {
|
||||
return try put(self, allocator, key, Expr.init(E.String, E.String.init(value), .none));
|
||||
return try put(self, allocator, key, Expr.init(E.String, E.String.init(value), logger.Loc.Empty));
|
||||
}
|
||||
|
||||
pub const SetError = error{ OutOfMemory, Clobber };
|
||||
|
||||
pub fn set(self: *const Object, key: Expr, allocator: std.mem.Allocator, value: Expr) SetError!void {
|
||||
if (self.hasProperty(key.data.e_string.data)) return error.Clobber;
|
||||
try self.properties.push(allocator, .{
|
||||
try self.properties.append(allocator, .{
|
||||
.key = key,
|
||||
.value = value,
|
||||
});
|
||||
@@ -605,7 +644,7 @@ pub const Object = struct {
|
||||
value_ = obj;
|
||||
}
|
||||
|
||||
try self.properties.push(allocator, .{
|
||||
try self.properties.append(allocator, .{
|
||||
.key = rope.head,
|
||||
.value = value_,
|
||||
});
|
||||
@@ -646,7 +685,7 @@ pub const Object = struct {
|
||||
if (rope.next) |next| {
|
||||
var obj = Expr.init(E.Object, E.Object{ .properties = .{} }, rope.head.loc);
|
||||
const out = try obj.data.e_object.getOrPutObject(next, allocator);
|
||||
try self.properties.push(allocator, .{
|
||||
try self.properties.append(allocator, .{
|
||||
.key = rope.head,
|
||||
.value = obj,
|
||||
});
|
||||
@@ -654,7 +693,7 @@ pub const Object = struct {
|
||||
}
|
||||
|
||||
const out = Expr.init(E.Object, E.Object{}, rope.head.loc);
|
||||
try self.properties.push(allocator, .{
|
||||
try self.properties.append(allocator, .{
|
||||
.key = rope.head,
|
||||
.value = out,
|
||||
});
|
||||
@@ -695,7 +734,7 @@ pub const Object = struct {
|
||||
if (rope.next) |next| {
|
||||
var obj = Expr.init(E.Object, E.Object{ .properties = .{} }, rope.head.loc);
|
||||
const out = try obj.data.e_object.getOrPutArray(next, allocator);
|
||||
try self.properties.push(allocator, .{
|
||||
try self.properties.append(allocator, .{
|
||||
.key = rope.head,
|
||||
.value = obj,
|
||||
});
|
||||
@@ -703,7 +742,7 @@ pub const Object = struct {
|
||||
}
|
||||
|
||||
const out = Expr.init(E.Array, E.Array{}, rope.head.loc);
|
||||
try self.properties.push(allocator, .{
|
||||
try self.properties.append(allocator, .{
|
||||
.key = rope.head,
|
||||
.value = out,
|
||||
});
|
||||
@@ -940,6 +979,30 @@ pub const String = struct {
|
||||
return bun.handleOom(this.string(allocator));
|
||||
}
|
||||
|
||||
fn stringCompareForJavaScript(comptime T: type, a: []const T, b: []const T) std.math.Order {
|
||||
const a_slice = a[0..@min(a.len, b.len)];
|
||||
const b_slice = b[0..@min(a.len, b.len)];
|
||||
for (a_slice, b_slice) |a_char, b_char| {
|
||||
const delta: i32 = @as(i32, a_char) - @as(i32, b_char);
|
||||
if (delta != 0) {
|
||||
return if (delta < 0) .lt else .gt;
|
||||
}
|
||||
}
|
||||
return std.math.order(a.len, b.len);
|
||||
}
|
||||
|
||||
/// Compares two strings lexicographically for JavaScript semantics.
|
||||
/// Both strings must share the same encoding (UTF-8 vs UTF-16).
|
||||
pub inline fn order(this: *const String, other: *const String) std.math.Order {
|
||||
bun.debugAssert(this.isUTF8() == other.isUTF8());
|
||||
|
||||
if (this.isUTF8()) {
|
||||
return stringCompareForJavaScript(u8, this.data, other.data);
|
||||
} else {
|
||||
return stringCompareForJavaScript(u16, this.slice16(), other.slice16());
|
||||
}
|
||||
}
|
||||
|
||||
pub var empty = String{};
|
||||
pub var @"true" = String{ .data = "true" };
|
||||
pub var @"false" = String{ .data = "false" };
|
||||
@@ -1246,7 +1309,7 @@ pub const Template = struct {
|
||||
if (part.value.data == .e_string and part.tail.cooked.isUTF8() and part.value.data.e_string.isUTF8()) {
|
||||
if (parts.items.len == 0) {
|
||||
if (part.value.data.e_string.len() > 0) {
|
||||
head.data.e_string.push(Expr.init(E.String, part.value.data.e_string.*, .none).data.e_string);
|
||||
head.data.e_string.push(Expr.init(E.String, part.value.data.e_string.*, logger.Loc.Empty).data.e_string);
|
||||
}
|
||||
|
||||
if (part.tail.cooked.len() > 0) {
|
||||
@@ -1260,7 +1323,7 @@ pub const Template = struct {
|
||||
|
||||
if (prev_part.tail.cooked.isUTF8()) {
|
||||
if (part.value.data.e_string.len() > 0) {
|
||||
prev_part.tail.cooked.push(Expr.init(E.String, part.value.data.e_string.*, .none).data.e_string);
|
||||
prev_part.tail.cooked.push(Expr.init(E.String, part.value.data.e_string.*, logger.Loc.Empty).data.e_string);
|
||||
}
|
||||
|
||||
if (part.tail.cooked.len() > 0) {
|
||||
@@ -1361,7 +1424,7 @@ pub const RequireString = struct {
|
||||
pub const RequireResolveString = struct {
|
||||
import_record_index: u32,
|
||||
|
||||
// close_paren_loc: logger.Loc = .none,
|
||||
// close_paren_loc: logger.Loc = logger.Loc.Empty,
|
||||
};
|
||||
|
||||
pub const InlinedEnum = struct {
|
||||
|
||||
174
src/ast/Expr.zig
174
src/ast/Expr.zig
@@ -1,7 +1,7 @@
|
||||
loc: logger.Loc,
|
||||
data: Data,
|
||||
|
||||
pub const empty = Expr{ .data = .{ .e_missing = E.Missing{} }, .loc = .none };
|
||||
pub const empty = Expr{ .data = .{ .e_missing = E.Missing{} }, .loc = logger.Loc.Empty };
|
||||
|
||||
pub fn isAnonymousNamed(expr: Expr) bool {
|
||||
return switch (expr.data) {
|
||||
@@ -273,13 +273,10 @@ pub fn set(expr: *Expr, allocator: std.mem.Allocator, name: string, value: Expr)
|
||||
}
|
||||
}
|
||||
|
||||
var new_props = expr.data.e_object.properties.listManaged(allocator);
|
||||
try new_props.append(.{
|
||||
.key = Expr.init(E.String, .{ .data = name }, .none),
|
||||
try expr.data.e_object.properties.append(allocator, .{
|
||||
.key = Expr.init(E.String, .{ .data = name }, logger.Loc.Empty),
|
||||
.value = value,
|
||||
});
|
||||
|
||||
expr.data.e_object.properties = BabyList(G.Property).fromList(new_props);
|
||||
}
|
||||
|
||||
/// Don't use this if you care about performance.
|
||||
@@ -293,18 +290,15 @@ pub fn setString(expr: *Expr, allocator: std.mem.Allocator, name: string, value:
|
||||
const key = prop.key orelse continue;
|
||||
if (std.meta.activeTag(key.data) != .e_string) continue;
|
||||
if (key.data.e_string.eql(string, name)) {
|
||||
prop.value = Expr.init(E.String, .{ .data = value }, .none);
|
||||
prop.value = Expr.init(E.String, .{ .data = value }, logger.Loc.Empty);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
var new_props = expr.data.e_object.properties.listManaged(allocator);
|
||||
try new_props.append(.{
|
||||
.key = Expr.init(E.String, .{ .data = name }, .none),
|
||||
.value = Expr.init(E.String, .{ .data = value }, .none),
|
||||
try expr.data.e_object.properties.append(allocator, .{
|
||||
.key = Expr.init(E.String, .{ .data = name }, logger.Loc.Empty),
|
||||
.value = Expr.init(E.String, .{ .data = value }, logger.Loc.Empty),
|
||||
});
|
||||
|
||||
expr.data.e_object.properties = BabyList(G.Property).fromList(new_props);
|
||||
}
|
||||
|
||||
pub fn getObject(expr: *const Expr, name: string) ?Expr {
|
||||
@@ -647,6 +641,29 @@ pub fn jsonStringify(self: *const @This(), writer: anytype) !void {
|
||||
return try writer.write(Serializable{ .type = std.meta.activeTag(self.data), .object = "expr", .value = self.data, .loc = self.loc });
|
||||
}
|
||||
|
||||
pub fn extractNumericValuesInSafeRange(left: Expr.Data, right: Expr.Data) ?[2]f64 {
|
||||
const l_value = left.extractNumericValue() orelse return null;
|
||||
const r_value = right.extractNumericValue() orelse return null;
|
||||
|
||||
// Check for NaN and return null if either value is NaN
|
||||
if (std.math.isNan(l_value) or std.math.isNan(r_value)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (std.math.isInf(l_value) or std.math.isInf(r_value)) {
|
||||
return .{ l_value, r_value };
|
||||
}
|
||||
|
||||
if (l_value > bun.jsc.MAX_SAFE_INTEGER or r_value > bun.jsc.MAX_SAFE_INTEGER) {
|
||||
return null;
|
||||
}
|
||||
if (l_value < bun.jsc.MIN_SAFE_INTEGER or r_value < bun.jsc.MIN_SAFE_INTEGER) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return .{ l_value, r_value };
|
||||
}
|
||||
|
||||
pub fn extractNumericValues(left: Expr.Data, right: Expr.Data) ?[2]f64 {
|
||||
return .{
|
||||
left.extractNumericValue() orelse return null,
|
||||
@@ -654,6 +671,20 @@ pub fn extractNumericValues(left: Expr.Data, right: Expr.Data) ?[2]f64 {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn extractStringValues(left: Expr.Data, right: Expr.Data, allocator: std.mem.Allocator) ?[2]*E.String {
|
||||
const l_string = left.extractStringValue() orelse return null;
|
||||
const r_string = right.extractStringValue() orelse return null;
|
||||
l_string.resolveRopeIfNeeded(allocator);
|
||||
r_string.resolveRopeIfNeeded(allocator);
|
||||
|
||||
if (l_string.isUTF8() != r_string.isUTF8()) return null;
|
||||
|
||||
return .{
|
||||
l_string,
|
||||
r_string,
|
||||
};
|
||||
}
|
||||
|
||||
pub var icount: usize = 0;
|
||||
|
||||
// We don't need to dynamically allocate booleans
|
||||
@@ -1407,11 +1438,17 @@ pub fn init(comptime Type: type, st: Type, loc: logger.Loc) Expr {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn isPrimitiveLiteral(this: Expr) bool {
|
||||
/// If this returns true, then calling this expression captures the target of
|
||||
/// the property access as "this" when calling the function in the property.
|
||||
pub inline fn isPropertyAccess(this: *const Expr) bool {
|
||||
return this.hasValueForThisInCall();
|
||||
}
|
||||
|
||||
pub inline fn isPrimitiveLiteral(this: *const Expr) bool {
|
||||
return @as(Tag, this.data).isPrimitiveLiteral();
|
||||
}
|
||||
|
||||
pub fn isRef(this: Expr, ref: Ref) bool {
|
||||
pub inline fn isRef(this: *const Expr, ref: Ref) bool {
|
||||
return switch (this.data) {
|
||||
.e_import_identifier => |import_identifier| import_identifier.ref.eql(ref),
|
||||
.e_identifier => |ident| ident.ref.eql(ref),
|
||||
@@ -1873,36 +1910,19 @@ pub const Tag = enum {
|
||||
}
|
||||
};
|
||||
|
||||
pub fn isBoolean(a: Expr) bool {
|
||||
switch (a.data) {
|
||||
.e_boolean => {
|
||||
return true;
|
||||
pub fn isBoolean(a: *const Expr) bool {
|
||||
return switch (a.data) {
|
||||
.e_boolean => true,
|
||||
.e_if => |ex| ex.yes.isBoolean() and ex.no.isBoolean(),
|
||||
.e_unary => |ex| ex.op == .un_not or ex.op == .un_delete,
|
||||
.e_binary => |ex| switch (ex.op) {
|
||||
.bin_strict_eq, .bin_strict_ne, .bin_loose_eq, .bin_loose_ne, .bin_lt, .bin_gt, .bin_le, .bin_ge, .bin_instanceof, .bin_in => true,
|
||||
.bin_logical_or => ex.left.isBoolean() and ex.right.isBoolean(),
|
||||
.bin_logical_and => ex.left.isBoolean() and ex.right.isBoolean(),
|
||||
else => false,
|
||||
},
|
||||
|
||||
.e_if => |ex| {
|
||||
return isBoolean(ex.yes) and isBoolean(ex.no);
|
||||
},
|
||||
.e_unary => |ex| {
|
||||
return ex.op == .un_not or ex.op == .un_delete;
|
||||
},
|
||||
.e_binary => |ex| {
|
||||
switch (ex.op) {
|
||||
.bin_strict_eq, .bin_strict_ne, .bin_loose_eq, .bin_loose_ne, .bin_lt, .bin_gt, .bin_le, .bin_ge, .bin_instanceof, .bin_in => {
|
||||
return true;
|
||||
},
|
||||
.bin_logical_or => {
|
||||
return isBoolean(ex.left) and isBoolean(ex.right);
|
||||
},
|
||||
.bin_logical_and => {
|
||||
return isBoolean(ex.left) and isBoolean(ex.right);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
return false;
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn assign(a: Expr, b: Expr) Expr {
|
||||
@@ -1912,7 +1932,7 @@ pub fn assign(a: Expr, b: Expr) Expr {
|
||||
.right = b,
|
||||
}, a.loc);
|
||||
}
|
||||
pub inline fn at(expr: Expr, comptime Type: type, t: Type, _: std.mem.Allocator) Expr {
|
||||
pub inline fn at(expr: *const Expr, comptime Type: type, t: Type, _: std.mem.Allocator) Expr {
|
||||
return init(Type, t, expr.loc);
|
||||
}
|
||||
|
||||
@@ -1920,21 +1940,19 @@ pub inline fn at(expr: Expr, comptime Type: type, t: Type, _: std.mem.Allocator)
|
||||
// will potentially be simplified to avoid generating unnecessary extra "!"
|
||||
// operators. For example, calling this with "!!x" will return "!x" instead
|
||||
// of returning "!!!x".
|
||||
pub fn not(expr: Expr, allocator: std.mem.Allocator) Expr {
|
||||
return maybeSimplifyNot(
|
||||
expr,
|
||||
allocator,
|
||||
) orelse Expr.init(
|
||||
E.Unary,
|
||||
E.Unary{
|
||||
.op = .un_not,
|
||||
.value = expr,
|
||||
},
|
||||
expr.loc,
|
||||
);
|
||||
pub fn not(expr: *const Expr, allocator: std.mem.Allocator) Expr {
|
||||
return expr.maybeSimplifyNot(allocator) orelse
|
||||
Expr.init(
|
||||
E.Unary,
|
||||
E.Unary{
|
||||
.op = .un_not,
|
||||
.value = expr.*,
|
||||
},
|
||||
expr.loc,
|
||||
);
|
||||
}
|
||||
|
||||
pub fn hasValueForThisInCall(expr: Expr) bool {
|
||||
pub inline fn hasValueForThisInCall(expr: *const Expr) bool {
|
||||
return switch (expr.data) {
|
||||
.e_dot, .e_index => true,
|
||||
else => false,
|
||||
@@ -1946,7 +1964,7 @@ pub fn hasValueForThisInCall(expr: Expr) bool {
|
||||
/// whole operator (i.e. the "!x") if it can be simplified, or false if not.
|
||||
/// It's separate from "Not()" above to avoid allocation on failure in case
|
||||
/// that is undesired.
|
||||
pub fn maybeSimplifyNot(expr: Expr, allocator: std.mem.Allocator) ?Expr {
|
||||
pub fn maybeSimplifyNot(expr: *const Expr, allocator: std.mem.Allocator) ?Expr {
|
||||
switch (expr.data) {
|
||||
.e_null, .e_undefined => {
|
||||
return expr.at(E.Boolean, E.Boolean{ .value = true }, allocator);
|
||||
@@ -1968,7 +1986,7 @@ pub fn maybeSimplifyNot(expr: Expr, allocator: std.mem.Allocator) ?Expr {
|
||||
},
|
||||
// "!!!a" => "!a"
|
||||
.e_unary => |un| {
|
||||
if (un.op == Op.Code.un_not and knownPrimitive(un.value) == .boolean) {
|
||||
if (un.op == Op.Code.un_not and un.value.knownPrimitive() == .boolean) {
|
||||
return un.value;
|
||||
}
|
||||
},
|
||||
@@ -1981,33 +1999,33 @@ pub fn maybeSimplifyNot(expr: Expr, allocator: std.mem.Allocator) ?Expr {
|
||||
Op.Code.bin_loose_eq => {
|
||||
// "!(a == b)" => "a != b"
|
||||
ex.op = .bin_loose_ne;
|
||||
return expr;
|
||||
return expr.*;
|
||||
},
|
||||
Op.Code.bin_loose_ne => {
|
||||
// "!(a != b)" => "a == b"
|
||||
ex.op = .bin_loose_eq;
|
||||
return expr;
|
||||
return expr.*;
|
||||
},
|
||||
Op.Code.bin_strict_eq => {
|
||||
// "!(a === b)" => "a !== b"
|
||||
ex.op = .bin_strict_ne;
|
||||
return expr;
|
||||
return expr.*;
|
||||
},
|
||||
Op.Code.bin_strict_ne => {
|
||||
// "!(a !== b)" => "a === b"
|
||||
ex.op = .bin_strict_eq;
|
||||
return expr;
|
||||
return expr.*;
|
||||
},
|
||||
Op.Code.bin_comma => {
|
||||
// "!(a, b)" => "a, !b"
|
||||
ex.right = ex.right.not(allocator);
|
||||
return expr;
|
||||
return expr.*;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
},
|
||||
.e_inlined_enum => |inlined| {
|
||||
return maybeSimplifyNot(inlined.value, allocator);
|
||||
return inlined.value.maybeSimplifyNot(allocator);
|
||||
},
|
||||
|
||||
else => {},
|
||||
@@ -2016,11 +2034,11 @@ pub fn maybeSimplifyNot(expr: Expr, allocator: std.mem.Allocator) ?Expr {
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn toStringExprWithoutSideEffects(expr: Expr, allocator: std.mem.Allocator) ?Expr {
|
||||
pub fn toStringExprWithoutSideEffects(expr: *const Expr, allocator: std.mem.Allocator) ?Expr {
|
||||
const unwrapped = expr.unwrapInlined();
|
||||
const slice = switch (unwrapped.data) {
|
||||
.e_null => "null",
|
||||
.e_string => return expr,
|
||||
.e_string => return expr.*,
|
||||
.e_undefined => "undefined",
|
||||
.e_boolean => |data| if (data.value) "true" else "false",
|
||||
.e_big_int => |bigint| bigint.value,
|
||||
@@ -2054,7 +2072,7 @@ pub fn isOptionalChain(self: *const @This()) bool {
|
||||
};
|
||||
}
|
||||
|
||||
pub inline fn knownPrimitive(self: @This()) PrimitiveType {
|
||||
pub inline fn knownPrimitive(self: *const @This()) PrimitiveType {
|
||||
return self.data.knownPrimitive();
|
||||
}
|
||||
|
||||
@@ -2294,6 +2312,7 @@ pub const Data = union(Tag) {
|
||||
const item = bun.create(allocator, E.Unary, .{
|
||||
.op = el.op,
|
||||
.value = try el.value.deepClone(allocator),
|
||||
.flags = el.flags,
|
||||
});
|
||||
return .{ .e_unary = item };
|
||||
},
|
||||
@@ -2506,6 +2525,7 @@ pub const Data = union(Tag) {
|
||||
}
|
||||
},
|
||||
.e_unary => |e| {
|
||||
writeAnyToHasher(hasher, @as(u8, @bitCast(e.flags)));
|
||||
writeAnyToHasher(hasher, .{e.op});
|
||||
e.value.data.writeToHasher(hasher, symbol_table);
|
||||
},
|
||||
@@ -2537,7 +2557,7 @@ pub const Data = union(Tag) {
|
||||
inline .e_spread, .e_await => |e| {
|
||||
e.value.data.writeToHasher(hasher, symbol_table);
|
||||
},
|
||||
inline .e_yield => |e| {
|
||||
.e_yield => |e| {
|
||||
writeAnyToHasher(hasher, .{ e.is_star, e.value });
|
||||
if (e.value) |value|
|
||||
value.data.writeToHasher(hasher, symbol_table);
|
||||
@@ -2860,6 +2880,17 @@ pub const Data = union(Tag) {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn extractStringValue(data: Expr.Data) ?*E.String {
|
||||
return switch (data) {
|
||||
.e_string => data.e_string,
|
||||
.e_inlined_enum => |inlined| switch (inlined.value.data) {
|
||||
.e_string => |str| str,
|
||||
else => null,
|
||||
},
|
||||
else => null,
|
||||
};
|
||||
}
|
||||
|
||||
pub const Equality = struct {
|
||||
equal: bool = false,
|
||||
ok: bool = false,
|
||||
@@ -3088,7 +3119,7 @@ pub const Data = union(Tag) {
|
||||
|
||||
// brk: {
|
||||
// // var node = try allocator.create(Macro.JSNode);
|
||||
// // node.* = Macro.JSNode.initExpr(Expr{ .data = this, .loc = .none });
|
||||
// // node.* = Macro.JSNode.initExpr(Expr{ .data = this, .loc = logger.Loc.Empty });
|
||||
// // break :brk jsc.JSValue.c(Macro.JSNode.Class.make(globalObject, node));
|
||||
// },
|
||||
|
||||
@@ -3208,7 +3239,6 @@ const JSPrinter = @import("../js_printer.zig");
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const BabyList = bun.BabyList;
|
||||
const Environment = bun.Environment;
|
||||
const JSONParser = bun.json;
|
||||
const MutableString = bun.MutableString;
|
||||
|
||||
@@ -24,12 +24,12 @@ pub const ExportStarAlias = struct {
|
||||
};
|
||||
|
||||
pub const Class = struct {
|
||||
class_keyword: logger.Range = .none,
|
||||
class_keyword: logger.Range = logger.Range.None,
|
||||
ts_decorators: ExprNodeList = ExprNodeList{},
|
||||
class_name: ?LocRef = null,
|
||||
extends: ?ExprNodeIndex = null,
|
||||
body_loc: logger.Loc = .none,
|
||||
close_brace_loc: logger.Loc = .none,
|
||||
body_loc: logger.Loc = logger.Loc.Empty,
|
||||
close_brace_loc: logger.Loc = logger.Loc.Empty,
|
||||
properties: []Property = &([_]Property{}),
|
||||
has_decorators: bool = false,
|
||||
|
||||
@@ -157,11 +157,11 @@ pub const FnBody = struct {
|
||||
|
||||
pub const Fn = struct {
|
||||
name: ?LocRef = null,
|
||||
open_parens_loc: logger.Loc = .none,
|
||||
open_parens_loc: logger.Loc = logger.Loc.Empty,
|
||||
args: []Arg = &.{},
|
||||
// This was originally nullable, but doing so I believe caused a miscompilation
|
||||
// Specifically, the body was always null.
|
||||
body: FnBody = .{ .loc = .none, .stmts = &.{} },
|
||||
body: FnBody = .{ .loc = logger.Loc.Empty, .stmts = &.{} },
|
||||
arguments_ref: ?Ref = null,
|
||||
|
||||
flags: Flags.Function.Set = Flags.Function.None,
|
||||
|
||||
@@ -8,18 +8,161 @@ pub const KnownGlobal = enum {
|
||||
Response,
|
||||
TextEncoder,
|
||||
TextDecoder,
|
||||
Error,
|
||||
TypeError,
|
||||
SyntaxError,
|
||||
RangeError,
|
||||
ReferenceError,
|
||||
EvalError,
|
||||
URIError,
|
||||
AggregateError,
|
||||
Array,
|
||||
Object,
|
||||
Function,
|
||||
RegExp,
|
||||
|
||||
pub const map = bun.ComptimeEnumMap(KnownGlobal);
|
||||
|
||||
pub noinline fn maybeMarkConstructorAsPure(noalias e: *E.New, symbols: []const Symbol) void {
|
||||
const id = if (e.target.data == .e_identifier) e.target.data.e_identifier.ref else return;
|
||||
inline fn callFromNew(e: *E.New, loc: logger.Loc) js_ast.Expr {
|
||||
const call = E.Call{
|
||||
.target = e.target,
|
||||
.args = e.args,
|
||||
.close_paren_loc = e.close_parens_loc,
|
||||
.can_be_unwrapped_if_unused = e.can_be_unwrapped_if_unused,
|
||||
};
|
||||
return js_ast.Expr.init(E.Call, call, loc);
|
||||
}
|
||||
|
||||
pub noinline fn minifyGlobalConstructor(allocator: std.mem.Allocator, noalias e: *E.New, symbols: []const Symbol, loc: logger.Loc, minify_whitespace: bool) ?js_ast.Expr {
|
||||
const id = if (e.target.data == .e_identifier) e.target.data.e_identifier.ref else return null;
|
||||
const symbol = &symbols[id.innerIndex()];
|
||||
if (symbol.kind != .unbound)
|
||||
return;
|
||||
return null;
|
||||
|
||||
const constructor = map.get(symbol.original_name) orelse return;
|
||||
const constructor = map.get(symbol.original_name) orelse return null;
|
||||
|
||||
switch (constructor) {
|
||||
return switch (constructor) {
|
||||
// Error constructors can be called without 'new' with identical behavior
|
||||
.Error, .TypeError, .SyntaxError, .RangeError, .ReferenceError, .EvalError, .URIError, .AggregateError => {
|
||||
// Convert `new Error(...)` to `Error(...)` to save bytes
|
||||
return callFromNew(e, loc);
|
||||
},
|
||||
|
||||
.Object => {
|
||||
const n = e.args.len;
|
||||
|
||||
if (n == 0) {
|
||||
// new Object() -> {}
|
||||
return js_ast.Expr.init(E.Object, E.Object{}, loc);
|
||||
}
|
||||
|
||||
if (n == 1) {
|
||||
const arg = e.args.ptr[0];
|
||||
switch (arg.data) {
|
||||
.e_object, .e_array => {
|
||||
// new Object({a: 1}) -> {a: 1}
|
||||
// new Object([1, 2]) -> [1, 2]
|
||||
return arg;
|
||||
},
|
||||
.e_null, .e_undefined => {
|
||||
// new Object(null) -> {}
|
||||
// new Object(undefined) -> {}
|
||||
return js_ast.Expr.init(E.Object, E.Object{}, loc);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
// For other cases, just remove 'new'
|
||||
return callFromNew(e, loc);
|
||||
},
|
||||
|
||||
.Array => {
|
||||
const n = e.args.len;
|
||||
|
||||
return switch (n) {
|
||||
0 => {
|
||||
// new Array() -> []
|
||||
return js_ast.Expr.init(E.Array, E.Array{}, loc);
|
||||
},
|
||||
1 => {
|
||||
// For single argument, only convert to literal if we're SURE it's not a number
|
||||
const arg = e.args.ptr[0];
|
||||
|
||||
// Check if it's an object or array literal first
|
||||
switch (arg.data) {
|
||||
.e_object, .e_array => {
|
||||
// new Array({}) -> [{}], new Array([1]) -> [[1]]
|
||||
// These are definitely not numbers, safe to convert
|
||||
return js_ast.Expr.init(E.Array, .{ .items = e.args }, loc);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
// For other types, check via knownPrimitive
|
||||
const primitive = arg.knownPrimitive();
|
||||
// Only convert if we know for certain it's not a number
|
||||
// unknown could be a number at runtime, so we must preserve Array() call
|
||||
switch (primitive) {
|
||||
.null, .undefined, .boolean, .string, .bigint => {
|
||||
// These are definitely not numbers, safe to convert
|
||||
return js_ast.Expr.init(E.Array, .{ .items = e.args }, loc);
|
||||
},
|
||||
.number => {
|
||||
if (arg.data != .e_number) {
|
||||
return callFromNew(e, loc);
|
||||
}
|
||||
const val = arg.data.e_number.value;
|
||||
if (
|
||||
// only want this with whitespace minification
|
||||
minify_whitespace and
|
||||
(val == 0 or
|
||||
val == 1 or
|
||||
val == 2 or
|
||||
val == 3 or
|
||||
val == 4 or
|
||||
val == 5 or
|
||||
val == 6 or
|
||||
val == 7 or
|
||||
val == 8 or
|
||||
val == 9 or
|
||||
val == 10))
|
||||
{
|
||||
const arg_loc = arg.loc;
|
||||
var list = e.args.moveToListManaged(allocator);
|
||||
list.clearRetainingCapacity();
|
||||
bun.handleOom(list.appendNTimes(js_ast.Expr{ .data = js_parser.Prefill.Data.EMissing, .loc = arg_loc }, @intFromFloat(val)));
|
||||
return js_ast.Expr.init(E.Array, .{ .items = .moveFromList(&list) }, loc);
|
||||
}
|
||||
return callFromNew(e, loc);
|
||||
},
|
||||
.unknown, .mixed => {
|
||||
// Could be a number, preserve Array() call
|
||||
return callFromNew(e, loc);
|
||||
},
|
||||
}
|
||||
},
|
||||
// > 1
|
||||
else => {
|
||||
// new Array(1, 2, 3) -> [1, 2, 3]
|
||||
// But NOT new Array(3) which creates an array with 3 empty slots
|
||||
return js_ast.Expr.init(E.Array, .{ .items = e.args }, loc);
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
.Function => {
|
||||
// Just remove 'new' for Function
|
||||
return callFromNew(e, loc);
|
||||
},
|
||||
.RegExp => {
|
||||
// Don't optimize RegExp - the semantics are too complex:
|
||||
// - new RegExp(re) creates a copy, but RegExp(re) returns the same instance
|
||||
// - This affects object identity and lastIndex behavior
|
||||
// - The difference only applies when flags are undefined
|
||||
// Keep the original new RegExp() call to preserve correct semantics
|
||||
return null;
|
||||
},
|
||||
.WeakSet, .WeakMap => {
|
||||
const n = e.args.len;
|
||||
|
||||
@@ -27,7 +170,7 @@ pub const KnownGlobal = enum {
|
||||
// "new WeakSet()" is pure
|
||||
e.can_be_unwrapped_if_unused = .if_unused;
|
||||
|
||||
return;
|
||||
return null;
|
||||
}
|
||||
|
||||
if (n == 1) {
|
||||
@@ -50,6 +193,7 @@ pub const KnownGlobal = enum {
|
||||
},
|
||||
}
|
||||
}
|
||||
return null;
|
||||
},
|
||||
.Date => {
|
||||
const n = e.args.len;
|
||||
@@ -58,7 +202,7 @@ pub const KnownGlobal = enum {
|
||||
// "new Date()" is pure
|
||||
e.can_be_unwrapped_if_unused = .if_unused;
|
||||
|
||||
return;
|
||||
return null;
|
||||
}
|
||||
|
||||
if (n == 1) {
|
||||
@@ -78,6 +222,7 @@ pub const KnownGlobal = enum {
|
||||
},
|
||||
}
|
||||
}
|
||||
return null;
|
||||
},
|
||||
|
||||
.Set => {
|
||||
@@ -86,7 +231,7 @@ pub const KnownGlobal = enum {
|
||||
if (n == 0) {
|
||||
// "new Set()" is pure
|
||||
e.can_be_unwrapped_if_unused = .if_unused;
|
||||
return;
|
||||
return null;
|
||||
}
|
||||
|
||||
if (n == 1) {
|
||||
@@ -102,6 +247,7 @@ pub const KnownGlobal = enum {
|
||||
},
|
||||
}
|
||||
}
|
||||
return null;
|
||||
},
|
||||
|
||||
.Headers => {
|
||||
@@ -111,8 +257,9 @@ pub const KnownGlobal = enum {
|
||||
// "new Headers()" is pure
|
||||
e.can_be_unwrapped_if_unused = .if_unused;
|
||||
|
||||
return;
|
||||
return null;
|
||||
}
|
||||
return null;
|
||||
},
|
||||
|
||||
.Response => {
|
||||
@@ -122,7 +269,7 @@ pub const KnownGlobal = enum {
|
||||
// "new Response()" is pure
|
||||
e.can_be_unwrapped_if_unused = .if_unused;
|
||||
|
||||
return;
|
||||
return null;
|
||||
}
|
||||
|
||||
if (n == 1) {
|
||||
@@ -142,6 +289,7 @@ pub const KnownGlobal = enum {
|
||||
},
|
||||
}
|
||||
}
|
||||
return null;
|
||||
},
|
||||
.TextDecoder, .TextEncoder => {
|
||||
const n = e.args.len;
|
||||
@@ -151,11 +299,12 @@ pub const KnownGlobal = enum {
|
||||
// "new TextDecoder()" is pure
|
||||
e.can_be_unwrapped_if_unused = .if_unused;
|
||||
|
||||
return;
|
||||
return null;
|
||||
}
|
||||
|
||||
// We _could_ validate the encoding argument
|
||||
// But let's not bother
|
||||
return null;
|
||||
},
|
||||
|
||||
.Map => {
|
||||
@@ -164,7 +313,7 @@ pub const KnownGlobal = enum {
|
||||
if (n == 0) {
|
||||
// "new Map()" is pure
|
||||
e.can_be_unwrapped_if_unused = .if_unused;
|
||||
return;
|
||||
return null;
|
||||
}
|
||||
|
||||
if (n == 1) {
|
||||
@@ -193,18 +342,20 @@ pub const KnownGlobal = enum {
|
||||
},
|
||||
}
|
||||
}
|
||||
return null;
|
||||
},
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
const string = []const u8;
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const js_parser = bun.js_parser;
|
||||
const logger = bun.logger;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const E = js_ast.E;
|
||||
const Symbol = js_ast.Symbol;
|
||||
|
||||
const std = @import("std");
|
||||
const Map = std.AutoHashMapUnmanaged;
|
||||
|
||||
@@ -386,7 +386,7 @@ pub const Runner = struct {
|
||||
const result = Expr.init(
|
||||
E.Array,
|
||||
E.Array{
|
||||
.items = ExprNodeList.init(&[_]Expr{}),
|
||||
.items = ExprNodeList.empty,
|
||||
.was_originally_macro = true,
|
||||
},
|
||||
this.caller.loc,
|
||||
@@ -398,7 +398,7 @@ pub const Runner = struct {
|
||||
var out = Expr.init(
|
||||
E.Array,
|
||||
E.Array{
|
||||
.items = ExprNodeList.init(array[0..0]),
|
||||
.items = ExprNodeList.empty,
|
||||
.was_originally_macro = true,
|
||||
},
|
||||
this.caller.loc,
|
||||
@@ -413,7 +413,7 @@ pub const Runner = struct {
|
||||
continue;
|
||||
i += 1;
|
||||
}
|
||||
out.data.e_array.items = ExprNodeList.init(array);
|
||||
out.data.e_array.items = ExprNodeList.fromOwnedSlice(array);
|
||||
_entry.value_ptr.* = out;
|
||||
return out;
|
||||
},
|
||||
@@ -438,27 +438,37 @@ pub const Runner = struct {
|
||||
.include_value = true,
|
||||
}).init(this.global, obj);
|
||||
defer object_iter.deinit();
|
||||
var properties = this.allocator.alloc(G.Property, object_iter.len) catch unreachable;
|
||||
errdefer this.allocator.free(properties);
|
||||
var out = Expr.init(
|
||||
|
||||
const out = _entry.value_ptr;
|
||||
out.* = Expr.init(
|
||||
E.Object,
|
||||
E.Object{
|
||||
.properties = BabyList(G.Property).init(properties),
|
||||
.properties = bun.handleOom(
|
||||
G.Property.List.initCapacity(this.allocator, object_iter.len),
|
||||
),
|
||||
.was_originally_macro = true,
|
||||
},
|
||||
this.caller.loc,
|
||||
);
|
||||
_entry.value_ptr.* = out;
|
||||
const properties = &out.data.e_object.properties;
|
||||
errdefer properties.clearAndFree(this.allocator);
|
||||
|
||||
while (try object_iter.next()) |prop| {
|
||||
properties[object_iter.i] = G.Property{
|
||||
.key = Expr.init(E.String, E.String.init(prop.toOwnedSlice(this.allocator) catch unreachable), this.caller.loc),
|
||||
bun.assertf(
|
||||
object_iter.i == properties.len,
|
||||
"`properties` unexpectedly modified (length {d}, expected {d})",
|
||||
.{ properties.len, object_iter.i },
|
||||
);
|
||||
properties.appendAssumeCapacity(G.Property{
|
||||
.key = Expr.init(
|
||||
E.String,
|
||||
E.String.init(prop.toOwnedSlice(this.allocator) catch unreachable),
|
||||
this.caller.loc,
|
||||
),
|
||||
.value = try this.run(object_iter.value),
|
||||
};
|
||||
});
|
||||
}
|
||||
out.data.e_object.properties = BabyList(G.Property).init(properties[0..object_iter.i]);
|
||||
_entry.value_ptr.* = out;
|
||||
return out;
|
||||
return out.*;
|
||||
},
|
||||
|
||||
.JSON => {
|
||||
@@ -644,7 +654,6 @@ const Resolver = @import("../resolver/resolver.zig").Resolver;
|
||||
const isPackagePath = @import("../resolver/resolver.zig").isPackagePath;
|
||||
|
||||
const bun = @import("bun");
|
||||
const BabyList = bun.BabyList;
|
||||
const Environment = bun.Environment;
|
||||
const Output = bun.Output;
|
||||
const Transpiler = bun.Transpiler;
|
||||
|
||||
376
src/ast/P.zig
376
src/ast/P.zig
File diff suppressed because it is too large
Load Diff
@@ -155,14 +155,14 @@ pub const Parser = struct {
|
||||
if (self.options.jsx.parse and p.needs_jsx_import) {
|
||||
_ = p.addImportRecord(
|
||||
.require,
|
||||
.from(0),
|
||||
logger.Loc{ .start = 0 },
|
||||
p.options.jsx.importSource(),
|
||||
);
|
||||
// Ensure we have both classic and automatic
|
||||
// This is to handle cases where they use fragments in the automatic runtime
|
||||
_ = p.addImportRecord(
|
||||
.require,
|
||||
.from(0),
|
||||
logger.Loc{ .start = 0 },
|
||||
p.options.jsx.classic_import_source,
|
||||
);
|
||||
}
|
||||
@@ -188,7 +188,7 @@ pub const Parser = struct {
|
||||
// in the `symbols` array.
|
||||
bun.assert(p.symbols.items.len == 0);
|
||||
var symbols_ = symbols;
|
||||
p.symbols = symbols_.listManaged(p.allocator);
|
||||
p.symbols = symbols_.moveToListManaged(p.allocator);
|
||||
|
||||
try p.prepareForVisitPass();
|
||||
|
||||
@@ -454,7 +454,7 @@ pub const Parser = struct {
|
||||
var debugger_stmts = try p.allocator.alloc(Stmt, 1);
|
||||
debugger_stmts[0] = Stmt{
|
||||
.data = .{ .s_debugger = .{} },
|
||||
.loc = .none,
|
||||
.loc = logger.Loc.Empty,
|
||||
};
|
||||
before.append(
|
||||
js_ast.Part{
|
||||
@@ -550,10 +550,7 @@ pub const Parser = struct {
|
||||
var sliced = try ListManaged(Stmt).initCapacity(p.allocator, 1);
|
||||
sliced.items.len = 1;
|
||||
var _local = local.*;
|
||||
var list = try ListManaged(G.Decl).initCapacity(p.allocator, 1);
|
||||
list.items.len = 1;
|
||||
list.items[0] = decl;
|
||||
_local.decls.update(list);
|
||||
_local.decls = try .initOne(p.allocator, decl);
|
||||
sliced.items[0] = p.s(_local, stmt.loc);
|
||||
try p.appendPart(&parts, sliced.items);
|
||||
}
|
||||
@@ -660,24 +657,24 @@ pub const Parser = struct {
|
||||
var decls = p.allocator.alloc(G.Decl, count) catch unreachable;
|
||||
if (uses_dirname) {
|
||||
decls[0] = .{
|
||||
.binding = p.b(B.Identifier{ .ref = p.dirname_ref }, .none),
|
||||
.binding = p.b(B.Identifier{ .ref = p.dirname_ref }, logger.Loc.Empty),
|
||||
.value = p.newExpr(
|
||||
E.String{
|
||||
.data = p.source.path.name.dir,
|
||||
},
|
||||
.none,
|
||||
logger.Loc.Empty,
|
||||
),
|
||||
};
|
||||
declared_symbols.appendAssumeCapacity(.{ .ref = p.dirname_ref, .is_top_level = true });
|
||||
}
|
||||
if (uses_filename) {
|
||||
decls[@as(usize, @intFromBool(uses_dirname))] = .{
|
||||
.binding = p.b(B.Identifier{ .ref = p.filename_ref }, .none),
|
||||
.binding = p.b(B.Identifier{ .ref = p.filename_ref }, logger.Loc.Empty),
|
||||
.value = p.newExpr(
|
||||
E.String{
|
||||
.data = p.source.path.text,
|
||||
},
|
||||
.none,
|
||||
logger.Loc.Empty,
|
||||
),
|
||||
};
|
||||
declared_symbols.appendAssumeCapacity(.{ .ref = p.filename_ref, .is_top_level = true });
|
||||
@@ -686,8 +683,8 @@ pub const Parser = struct {
|
||||
var part_stmts = p.allocator.alloc(Stmt, 1) catch unreachable;
|
||||
part_stmts[0] = p.s(S.Local{
|
||||
.kind = .k_var,
|
||||
.decls = Decl.List.init(decls),
|
||||
}, .none);
|
||||
.decls = Decl.List.fromOwnedSlice(decls),
|
||||
}, logger.Loc.Empty);
|
||||
before.append(js_ast.Part{
|
||||
.stmts = part_stmts,
|
||||
.declared_symbols = declared_symbols,
|
||||
@@ -713,7 +710,7 @@ pub const Parser = struct {
|
||||
var import_part_stmts = remaining_stmts[0..1];
|
||||
remaining_stmts = remaining_stmts[1..];
|
||||
|
||||
bun.handleOom(p.module_scope.generated.push(p.allocator, deferred_import.namespace.ref.?));
|
||||
bun.handleOom(p.module_scope.generated.append(p.allocator, deferred_import.namespace.ref.?));
|
||||
|
||||
import_part_stmts[0] = Stmt.alloc(
|
||||
S.Import,
|
||||
@@ -835,7 +832,7 @@ pub const Parser = struct {
|
||||
part.symbol_uses = .{};
|
||||
return js_ast.Result{
|
||||
.ast = js_ast.Ast{
|
||||
.import_records = ImportRecord.List.init(p.import_records.items),
|
||||
.import_records = ImportRecord.List.moveFromList(&p.import_records),
|
||||
.redirect_import_record_index = id,
|
||||
.named_imports = p.named_imports,
|
||||
.named_exports = p.named_exports,
|
||||
@@ -905,7 +902,10 @@ pub const Parser = struct {
|
||||
break :brk new_stmts.items;
|
||||
};
|
||||
|
||||
part.import_record_indices.push(p.allocator, right.data.e_require_string.import_record_index) catch unreachable;
|
||||
part.import_record_indices.append(
|
||||
p.allocator,
|
||||
right.data.e_require_string.import_record_index,
|
||||
) catch |err| bun.handleOom(err);
|
||||
p.symbols.items[p.module_ref.innerIndex()].use_count_estimate = 0;
|
||||
p.symbols.items[namespace_ref.innerIndex()].use_count_estimate -|= 1;
|
||||
_ = part.symbol_uses.swapRemove(namespace_ref);
|
||||
@@ -1134,14 +1134,14 @@ pub const Parser = struct {
|
||||
if (uses_dirname) {
|
||||
// var __dirname = import.meta
|
||||
decls[0] = .{
|
||||
.binding = p.b(B.Identifier{ .ref = p.dirname_ref }, .none),
|
||||
.binding = p.b(B.Identifier{ .ref = p.dirname_ref }, logger.Loc.Empty),
|
||||
.value = p.newExpr(
|
||||
E.Dot{
|
||||
.name = "dir",
|
||||
.name_loc = .none,
|
||||
.target = p.newExpr(E.ImportMeta{}, .none),
|
||||
.name_loc = logger.Loc.Empty,
|
||||
.target = p.newExpr(E.ImportMeta{}, logger.Loc.Empty),
|
||||
},
|
||||
.none,
|
||||
logger.Loc.Empty,
|
||||
),
|
||||
};
|
||||
declared_symbols.appendAssumeCapacity(.{ .ref = p.dirname_ref, .is_top_level = true });
|
||||
@@ -1149,14 +1149,14 @@ pub const Parser = struct {
|
||||
if (uses_filename) {
|
||||
// var __filename = import.meta.path
|
||||
decls[@as(usize, @intFromBool(uses_dirname))] = .{
|
||||
.binding = p.b(B.Identifier{ .ref = p.filename_ref }, .none),
|
||||
.binding = p.b(B.Identifier{ .ref = p.filename_ref }, logger.Loc.Empty),
|
||||
.value = p.newExpr(
|
||||
E.Dot{
|
||||
.name = "path",
|
||||
.name_loc = .none,
|
||||
.target = p.newExpr(E.ImportMeta{}, .none),
|
||||
.name_loc = logger.Loc.Empty,
|
||||
.target = p.newExpr(E.ImportMeta{}, logger.Loc.Empty),
|
||||
},
|
||||
.none,
|
||||
logger.Loc.Empty,
|
||||
),
|
||||
};
|
||||
declared_symbols.appendAssumeCapacity(.{ .ref = p.filename_ref, .is_top_level = true });
|
||||
@@ -1165,8 +1165,8 @@ pub const Parser = struct {
|
||||
var part_stmts = p.allocator.alloc(Stmt, 1) catch unreachable;
|
||||
part_stmts[0] = p.s(S.Local{
|
||||
.kind = .k_var,
|
||||
.decls = Decl.List.init(decls),
|
||||
}, .none);
|
||||
.decls = Decl.List.fromOwnedSlice(decls),
|
||||
}, logger.Loc.Empty);
|
||||
before.append(js_ast.Part{
|
||||
.stmts = part_stmts,
|
||||
.declared_symbols = declared_symbols,
|
||||
@@ -1208,7 +1208,7 @@ pub const Parser = struct {
|
||||
if (items_count == 0)
|
||||
break :outer;
|
||||
|
||||
const import_record_id = p.addImportRecord(.stmt, .none, "bun:test");
|
||||
const import_record_id = p.addImportRecord(.stmt, logger.Loc.Empty, "bun:test");
|
||||
var import_record: *ImportRecord = &p.import_records.items[import_record_id];
|
||||
import_record.tag = .bun_test;
|
||||
|
||||
@@ -1219,9 +1219,9 @@ pub const Parser = struct {
|
||||
inline for (comptime std.meta.fieldNames(Jest)) |symbol_name| {
|
||||
if (p.symbols.items[@field(jest, symbol_name).innerIndex()].use_count_estimate > 0) {
|
||||
clauses[clause_i] = js_ast.ClauseItem{
|
||||
.name = .{ .ref = @field(jest, symbol_name), .loc = .none },
|
||||
.name = .{ .ref = @field(jest, symbol_name), .loc = logger.Loc.Empty },
|
||||
.alias = symbol_name,
|
||||
.alias_loc = .none,
|
||||
.alias_loc = logger.Loc.Empty,
|
||||
.original_name = "",
|
||||
};
|
||||
declared_symbols.appendAssumeCapacity(.{ .ref = @field(jest, symbol_name), .is_top_level = true });
|
||||
@@ -1231,11 +1231,11 @@ pub const Parser = struct {
|
||||
|
||||
const import_stmt = p.s(
|
||||
S.Import{
|
||||
.namespace_ref = p.declareSymbol(.unbound, .none, "bun_test_import_namespace_for_internal_use_only") catch unreachable,
|
||||
.namespace_ref = p.declareSymbol(.unbound, logger.Loc.Empty, "bun_test_import_namespace_for_internal_use_only") catch unreachable,
|
||||
.items = clauses,
|
||||
.import_record_index = import_record_id,
|
||||
},
|
||||
.none,
|
||||
logger.Loc.Empty,
|
||||
);
|
||||
|
||||
var part_stmts = try p.allocator.alloc(Stmt, 1);
|
||||
@@ -1245,7 +1245,7 @@ pub const Parser = struct {
|
||||
before.append(js_ast.Part{
|
||||
.stmts = part_stmts,
|
||||
.declared_symbols = declared_symbols,
|
||||
.import_record_indices = bun.BabyList(u32).init(import_record_indices),
|
||||
.import_record_indices = bun.BabyList(u32).fromOwnedSlice(import_record_indices),
|
||||
.tag = .bun_test,
|
||||
}) catch unreachable;
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
pub const Block = struct {
|
||||
stmts: StmtNodeList,
|
||||
close_brace_loc: logger.Loc = .none,
|
||||
close_brace_loc: logger.Loc = logger.Loc.Empty,
|
||||
};
|
||||
|
||||
pub const SExpr = struct {
|
||||
@@ -123,7 +123,7 @@ pub const While = struct {
|
||||
pub const With = struct {
|
||||
value: ExprNodeIndex,
|
||||
body: StmtNodeIndex,
|
||||
body_loc: logger.Loc = .none,
|
||||
body_loc: logger.Loc = logger.Loc.Empty,
|
||||
};
|
||||
|
||||
pub const Try = struct {
|
||||
|
||||
@@ -74,7 +74,7 @@ pub const Member = struct {
|
||||
loc: logger.Loc,
|
||||
|
||||
pub fn eql(a: Member, b: Member) bool {
|
||||
return @call(bun.callmod_inline, Ref.eql, .{ a.ref, b.ref }) and a.loc == b.loc;
|
||||
return @call(bun.callmod_inline, Ref.eql, .{ a.ref, b.ref }) and a.loc.start == b.loc.start;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -153,7 +153,7 @@ pub const SideEffects = enum(u1) {
|
||||
// "typeof x" must not be transformed into if "x" since doing so could
|
||||
// cause an exception to be thrown. Instead we can just remove it since
|
||||
// "typeof x" is special-cased in the standard to never throw.
|
||||
if (std.meta.activeTag(un.value.data) == .e_identifier) {
|
||||
if (un.value.data == .e_identifier and un.flags.was_originally_typeof_identifier) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -199,6 +199,10 @@ pub const SideEffects = enum(u1) {
|
||||
// "toString" and/or "valueOf" to be called.
|
||||
.bin_loose_eq,
|
||||
.bin_loose_ne,
|
||||
.bin_lt,
|
||||
.bin_gt,
|
||||
.bin_le,
|
||||
.bin_ge,
|
||||
=> {
|
||||
if (isPrimitiveWithSideEffects(bin.left.data) and isPrimitiveWithSideEffects(bin.right.data)) {
|
||||
return Expr.joinWithComma(
|
||||
@@ -207,13 +211,23 @@ pub const SideEffects = enum(u1) {
|
||||
p.allocator,
|
||||
);
|
||||
}
|
||||
// If one side is a number, the number can be printed as
|
||||
// `0` since the result being unused doesnt matter, we
|
||||
// only care to invoke the coercion.
|
||||
if (bin.left.data == .e_number) {
|
||||
bin.left.data = .{ .e_number = .{ .value = 0.0 } };
|
||||
} else if (bin.right.data == .e_number) {
|
||||
bin.right.data = .{ .e_number = .{ .value = 0.0 } };
|
||||
|
||||
switch (bin.op) {
|
||||
.bin_loose_eq,
|
||||
.bin_loose_ne,
|
||||
=> {
|
||||
// If one side is a number and the other side is a known primitive with side effects,
|
||||
// the number can be printed as `0` since the result being unused doesn't matter,
|
||||
// we only care to invoke the coercion.
|
||||
// We only do this optimization if the other side is a known primitive with side effects
|
||||
// to avoid corrupting shared nodes when the other side is an undefined identifier
|
||||
if (bin.left.data == .e_number) {
|
||||
bin.left.data = .{ .e_number = .{ .value = 0.0 } };
|
||||
} else if (bin.right.data == .e_number) {
|
||||
bin.right.data = .{ .e_number = .{ .value = 0.0 } };
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
},
|
||||
|
||||
@@ -259,7 +273,8 @@ pub const SideEffects = enum(u1) {
|
||||
}
|
||||
|
||||
properties_slice = properties_slice[0..end];
|
||||
expr.data.e_object.properties = G.Property.List.init(properties_slice);
|
||||
expr.data.e_object.properties =
|
||||
G.Property.List.fromBorrowedSliceDangerous(properties_slice);
|
||||
return expr;
|
||||
}
|
||||
}
|
||||
@@ -297,16 +312,14 @@ pub const SideEffects = enum(u1) {
|
||||
for (items) |item| {
|
||||
if (item.data == .e_spread) {
|
||||
var end: usize = 0;
|
||||
for (items) |item__| {
|
||||
const item_ = item__;
|
||||
for (items) |item_| {
|
||||
if (item_.data != .e_missing) {
|
||||
items[end] = item_;
|
||||
end += 1;
|
||||
}
|
||||
|
||||
expr.data.e_array.items = ExprNodeList.init(items[0..end]);
|
||||
return expr;
|
||||
}
|
||||
expr.data.e_array.items.shrinkRetainingCapacity(end);
|
||||
return expr;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -443,7 +456,7 @@ pub const SideEffects = enum(u1) {
|
||||
findIdentifiers(decl.binding, &decls);
|
||||
}
|
||||
|
||||
local.decls.update(decls);
|
||||
local.decls = .moveFromList(&decls);
|
||||
return true;
|
||||
},
|
||||
|
||||
@@ -875,7 +888,6 @@ const js_ast = bun.ast;
|
||||
const Binding = js_ast.Binding;
|
||||
const E = js_ast.E;
|
||||
const Expr = js_ast.Expr;
|
||||
const ExprNodeList = js_ast.ExprNodeList;
|
||||
const Stmt = js_ast.Stmt;
|
||||
|
||||
const G = js_ast.G;
|
||||
|
||||
@@ -37,7 +37,7 @@ pub fn isMissingExpr(self: Stmt) bool {
|
||||
}
|
||||
|
||||
pub fn empty() Stmt {
|
||||
return Stmt{ .data = .{ .s_empty = None }, .loc = .none };
|
||||
return Stmt{ .data = .{ .s_empty = None }, .loc = logger.Loc{} };
|
||||
}
|
||||
|
||||
pub fn toEmpty(this: Stmt) Stmt {
|
||||
|
||||
@@ -412,7 +412,7 @@ pub const Map = struct {
|
||||
}
|
||||
|
||||
pub fn initWithOneList(list: List) Map {
|
||||
const baby_list = BabyList(List).init((&list)[0..1]);
|
||||
const baby_list = BabyList(List).fromBorrowedSliceDangerous((&list)[0..1]);
|
||||
return initList(baby_list);
|
||||
}
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ pub fn AstMaybe(
|
||||
return .{ .ok = false };
|
||||
}
|
||||
|
||||
var value: Expr = Expr{ .loc = .none, .data = Expr.Data{ .e_missing = E.Missing{} } };
|
||||
var value: Expr = Expr{ .loc = logger.Loc.Empty, .data = Expr.Data{ .e_missing = E.Missing{} } };
|
||||
|
||||
for (decls) |decl| {
|
||||
const binding = Binding.toExpr(
|
||||
@@ -68,7 +68,7 @@ pub fn AstMaybe(
|
||||
.loc = name_loc,
|
||||
.ref = p.newSymbol(.import, name) catch unreachable,
|
||||
};
|
||||
p.module_scope.generated.push(p.allocator, new_item.ref.?) catch unreachable;
|
||||
bun.handleOom(p.module_scope.generated.append(p.allocator, new_item.ref.?));
|
||||
|
||||
import_items.put(name, new_item) catch unreachable;
|
||||
p.is_import_item.put(p.allocator, new_item.ref.?, {}) catch unreachable;
|
||||
@@ -214,7 +214,7 @@ pub fn AstMaybe(
|
||||
.other,
|
||||
std.fmt.allocPrint(p.allocator, "${any}", .{bun.fmt.fmtIdentifier(key)}) catch unreachable,
|
||||
) catch unreachable;
|
||||
p.module_scope.generated.push(p.allocator, new_ref) catch unreachable;
|
||||
bun.handleOom(p.module_scope.generated.append(p.allocator, new_ref));
|
||||
named_export_entry.value_ptr.* = .{
|
||||
.loc_ref = LocRef{
|
||||
.loc = name_loc,
|
||||
@@ -320,7 +320,7 @@ pub fn AstMaybe(
|
||||
.other,
|
||||
std.fmt.allocPrint(p.allocator, "${any}", .{bun.fmt.fmtIdentifier(name)}) catch unreachable,
|
||||
) catch unreachable;
|
||||
p.module_scope.generated.push(p.allocator, new_ref) catch unreachable;
|
||||
bun.handleOom(p.module_scope.generated.append(p.allocator, new_ref));
|
||||
named_export_entry.value_ptr.* = .{
|
||||
.loc_ref = LocRef{
|
||||
.loc = name_loc,
|
||||
@@ -493,7 +493,7 @@ pub fn AstMaybe(
|
||||
.other,
|
||||
std.fmt.allocPrint(p.allocator, "${any}", .{bun.fmt.fmtIdentifier(name)}) catch unreachable,
|
||||
) catch unreachable;
|
||||
p.module_scope.generated.push(p.allocator, new_ref) catch unreachable;
|
||||
bun.handleOom(p.module_scope.generated.append(p.allocator, new_ref));
|
||||
named_export_entry.value_ptr.* = .{
|
||||
.loc_ref = LocRef{
|
||||
.loc = name_loc,
|
||||
@@ -650,15 +650,18 @@ pub fn AstMaybe(
|
||||
E.Unary{
|
||||
.op = .un_typeof,
|
||||
.value = expr,
|
||||
.flags = .{
|
||||
.was_originally_typeof_identifier = expr.data == .e_identifier,
|
||||
},
|
||||
},
|
||||
.none,
|
||||
logger.Loc.Empty,
|
||||
),
|
||||
.right = p.newExpr(
|
||||
E.String{ .data = "undefined" },
|
||||
.none,
|
||||
logger.Loc.Empty,
|
||||
),
|
||||
},
|
||||
.none,
|
||||
logger.Loc.Empty,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -668,19 +671,19 @@ pub fn AstMaybe(
|
||||
.test_ = try p.checkIfDefinedHelper(identifier_expr),
|
||||
.yes = p.newExpr(
|
||||
E.Identifier{
|
||||
.ref = (p.findSymbol(.none, "Object") catch unreachable).ref,
|
||||
.ref = (p.findSymbol(logger.Loc.Empty, "Object") catch unreachable).ref,
|
||||
},
|
||||
.none,
|
||||
logger.Loc.Empty,
|
||||
),
|
||||
.no = identifier_expr,
|
||||
},
|
||||
.none,
|
||||
logger.Loc.Empty,
|
||||
);
|
||||
}
|
||||
|
||||
pub fn maybeCommaSpreadError(p: *P, _comma_after_spread: ?logger.Loc) void {
|
||||
const comma_after_spread = _comma_after_spread orelse return;
|
||||
if (comma_after_spread == .none) return;
|
||||
if (comma_after_spread.start == -1) return;
|
||||
|
||||
p.log.addRangeError(p.source, logger.Range{ .loc = comma_after_spread, .len = 1 }, "Unexpected \",\" after rest pattern") catch unreachable;
|
||||
}
|
||||
|
||||
@@ -200,7 +200,7 @@ pub fn Parse(
|
||||
.class_name = name,
|
||||
.extends = extends,
|
||||
.close_brace_loc = close_brace_loc,
|
||||
.ts_decorators = ExprNodeList.init(class_opts.ts_decorators),
|
||||
.ts_decorators = ExprNodeList.fromOwnedSlice(class_opts.ts_decorators),
|
||||
.class_keyword = class_keyword,
|
||||
.body_loc = body_loc,
|
||||
.properties = properties.items,
|
||||
@@ -283,14 +283,14 @@ pub fn Parse(
|
||||
}
|
||||
const close_paren_loc = p.lexer.loc();
|
||||
try p.lexer.expect(.t_close_paren);
|
||||
return ExprListLoc{ .list = ExprNodeList.fromList(args), .loc = close_paren_loc };
|
||||
return ExprListLoc{ .list = ExprNodeList.moveFromList(&args), .loc = close_paren_loc };
|
||||
}
|
||||
|
||||
pub fn parseJSXPropValueIdentifier(noalias p: *P, previous_string_with_backslash_loc: *logger.Loc) !Expr {
|
||||
// Use NextInsideJSXElement() not Next() so we can parse a JSX-style string literal
|
||||
try p.lexer.nextInsideJSXElement();
|
||||
if (p.lexer.token == .t_string_literal) {
|
||||
previous_string_with_backslash_loc.* = p.lexer.loc().max(p.lexer.previous_backslash_quote_in_jsx.loc);
|
||||
previous_string_with_backslash_loc.start = @max(p.lexer.loc().start, p.lexer.previous_backslash_quote_in_jsx.loc.start);
|
||||
const expr = p.newExpr(try p.lexer.toEString(), previous_string_with_backslash_loc.*);
|
||||
|
||||
try p.lexer.nextInsideJSXElement();
|
||||
@@ -362,7 +362,7 @@ pub fn Parse(
|
||||
}
|
||||
|
||||
// There may be a "=" after the type (but not after an "as" cast)
|
||||
if (is_typescript_enabled and p.lexer.token == .t_equals and p.forbid_suffix_after_as_loc != p.lexer.loc()) {
|
||||
if (is_typescript_enabled and p.lexer.token == .t_equals and !p.forbid_suffix_after_as_loc.eql(p.lexer.loc())) {
|
||||
try p.lexer.next();
|
||||
item.* = Expr.assign(item.*, try p.parseExpr(.comma));
|
||||
}
|
||||
@@ -474,7 +474,10 @@ pub fn Parse(
|
||||
if (opts.is_async) {
|
||||
p.logExprErrors(&errors);
|
||||
const async_expr = p.newExpr(E.Identifier{ .ref = try p.storeNameInRef("async") }, loc);
|
||||
return p.newExpr(E.Call{ .target = async_expr, .args = ExprNodeList.init(items) }, loc);
|
||||
return p.newExpr(E.Call{
|
||||
.target = async_expr,
|
||||
.args = ExprNodeList.fromOwnedSlice(items),
|
||||
}, loc);
|
||||
}
|
||||
|
||||
// Is this a chain of expressions and comma operators?
|
||||
@@ -621,16 +624,17 @@ pub fn Parse(
|
||||
try p.forbidLexicalDecl(token_range.loc);
|
||||
}
|
||||
|
||||
const decls = try p.parseAndDeclareDecls(.other, opts);
|
||||
var decls_list = try p.parseAndDeclareDecls(.other, opts);
|
||||
const decls: G.Decl.List = .moveFromList(&decls_list);
|
||||
return ExprOrLetStmt{
|
||||
.stmt_or_expr = js_ast.StmtOrExpr{
|
||||
.stmt = p.s(S.Local{
|
||||
.kind = .k_let,
|
||||
.decls = G.Decl.List.fromList(decls),
|
||||
.decls = decls,
|
||||
.is_export = opts.is_export,
|
||||
}, token_range.loc),
|
||||
},
|
||||
.decls = decls.items,
|
||||
.decls = decls.slice(),
|
||||
};
|
||||
}
|
||||
},
|
||||
@@ -650,19 +654,20 @@ pub fn Parse(
|
||||
}
|
||||
// p.markSyntaxFeature(.using, token_range.loc);
|
||||
opts.is_using_statement = true;
|
||||
const decls = try p.parseAndDeclareDecls(.constant, opts);
|
||||
var decls_list = try p.parseAndDeclareDecls(.constant, opts);
|
||||
const decls: G.Decl.List = .moveFromList(&decls_list);
|
||||
if (!opts.is_for_loop_init) {
|
||||
try p.requireInitializers(.k_using, decls.items);
|
||||
try p.requireInitializers(.k_using, decls.slice());
|
||||
}
|
||||
return ExprOrLetStmt{
|
||||
.stmt_or_expr = js_ast.StmtOrExpr{
|
||||
.stmt = p.s(S.Local{
|
||||
.kind = .k_using,
|
||||
.decls = G.Decl.List.fromList(decls),
|
||||
.decls = decls,
|
||||
.is_export = false,
|
||||
}, token_range.loc),
|
||||
},
|
||||
.decls = decls.items,
|
||||
.decls = decls.slice(),
|
||||
};
|
||||
}
|
||||
} else if (p.fn_or_arrow_data_parse.allow_await == .allow_expr and strings.eqlComptime(raw, "await")) {
|
||||
@@ -689,19 +694,20 @@ pub fn Parse(
|
||||
}
|
||||
// p.markSyntaxFeature(.using, using_range.loc);
|
||||
opts.is_using_statement = true;
|
||||
const decls = try p.parseAndDeclareDecls(.constant, opts);
|
||||
var decls_list = try p.parseAndDeclareDecls(.constant, opts);
|
||||
const decls: G.Decl.List = .moveFromList(&decls_list);
|
||||
if (!opts.is_for_loop_init) {
|
||||
try p.requireInitializers(.k_await_using, decls.items);
|
||||
try p.requireInitializers(.k_await_using, decls.slice());
|
||||
}
|
||||
return ExprOrLetStmt{
|
||||
.stmt_or_expr = js_ast.StmtOrExpr{
|
||||
.stmt = p.s(S.Local{
|
||||
.kind = .k_await_using,
|
||||
.decls = G.Decl.List.fromList(decls),
|
||||
.decls = decls,
|
||||
.is_export = false,
|
||||
}, token_range.loc),
|
||||
},
|
||||
.decls = decls.items,
|
||||
.decls = decls.slice(),
|
||||
};
|
||||
}
|
||||
break :value Expr{
|
||||
@@ -886,7 +892,7 @@ pub fn Parse(
|
||||
}
|
||||
|
||||
pub fn parsePropertyBinding(p: *P) anyerror!B.Property {
|
||||
var key: js_ast.Expr = Expr{ .loc = .none, .data = Prefill.Data.EMissing };
|
||||
var key: js_ast.Expr = Expr{ .loc = logger.Loc.Empty, .data = Prefill.Data.EMissing };
|
||||
var is_computed = false;
|
||||
|
||||
switch (p.lexer.token) {
|
||||
@@ -1213,7 +1219,7 @@ pub fn Parse(
|
||||
switch (stmt.data) {
|
||||
.s_return => |ret| {
|
||||
if (ret.value == null and !p.latest_return_had_semicolon) {
|
||||
returnWithoutSemicolonStart = stmt.loc.get();
|
||||
returnWithoutSemicolonStart = stmt.loc.start;
|
||||
needsCheck = false;
|
||||
}
|
||||
},
|
||||
@@ -1225,7 +1231,7 @@ pub fn Parse(
|
||||
.s_expr => {
|
||||
try p.log.addWarning(
|
||||
p.source,
|
||||
.from(returnWithoutSemicolonStart + 6),
|
||||
logger.Loc{ .start = returnWithoutSemicolonStart + 6 },
|
||||
"The following expression is not returned because of an automatically-inserted semicolon",
|
||||
);
|
||||
},
|
||||
|
||||
@@ -72,7 +72,7 @@ pub fn ParseFn(
|
||||
|
||||
var func = try p.parseFn(name, FnOrArrowDataParse{
|
||||
.needs_async_loc = loc,
|
||||
.async_range = asyncRange orelse .none,
|
||||
.async_range = asyncRange orelse logger.Range.None,
|
||||
.has_async_range = asyncRange != null,
|
||||
.allow_await = if (is_async) AwaitOrYield.allow_expr else AwaitOrYield.allow_ident,
|
||||
.allow_yield = if (is_generator) AwaitOrYield.allow_expr else AwaitOrYield.allow_ident,
|
||||
@@ -170,7 +170,7 @@ pub fn ParseFn(
|
||||
AwaitOrYield.allow_ident;
|
||||
|
||||
// Don't suggest inserting "async" before anything if "await" is found
|
||||
p.fn_or_arrow_data_parse.needs_async_loc = .none;
|
||||
p.fn_or_arrow_data_parse.needs_async_loc = logger.Loc.Empty;
|
||||
|
||||
// If "super()" is allowed in the body, it's allowed in the arguments
|
||||
p.fn_or_arrow_data_parse.allow_super_call = opts.allow_super_call;
|
||||
@@ -281,7 +281,7 @@ pub fn ParseFn(
|
||||
}
|
||||
|
||||
args.append(p.allocator, G.Arg{
|
||||
.ts_decorators = ExprNodeList.init(ts_decorators),
|
||||
.ts_decorators = ExprNodeList.fromOwnedSlice(ts_decorators),
|
||||
.binding = arg,
|
||||
.default = default_value,
|
||||
|
||||
|
||||
@@ -240,7 +240,7 @@ pub fn ParseImportExport(
|
||||
var items = ListManaged(js_ast.ClauseItem).initCapacity(p.allocator, 1) catch unreachable;
|
||||
try p.lexer.expect(.t_open_brace);
|
||||
var is_single_line = !p.lexer.has_newline_before;
|
||||
var first_non_identifier_loc: logger.Loc = .from(0);
|
||||
var first_non_identifier_loc = logger.Loc{ .start = 0 };
|
||||
var had_type_only_exports = false;
|
||||
|
||||
while (p.lexer.token != .t_close_brace) {
|
||||
@@ -263,7 +263,7 @@ pub fn ParseImportExport(
|
||||
// // This is a syntax error
|
||||
// export { default }
|
||||
//
|
||||
if (p.lexer.token != .t_identifier and first_non_identifier_loc.get() == 0) {
|
||||
if (p.lexer.token != .t_identifier and first_non_identifier_loc.start == 0) {
|
||||
first_non_identifier_loc = p.lexer.loc();
|
||||
}
|
||||
try p.lexer.next();
|
||||
@@ -321,7 +321,7 @@ pub fn ParseImportExport(
|
||||
// // This is a syntax error
|
||||
// export { default }
|
||||
//
|
||||
if (p.lexer.token != .t_identifier and first_non_identifier_loc.get() == 0) {
|
||||
if (p.lexer.token != .t_identifier and first_non_identifier_loc.start == 0) {
|
||||
first_non_identifier_loc = p.lexer.loc();
|
||||
}
|
||||
|
||||
@@ -397,7 +397,7 @@ pub fn ParseImportExport(
|
||||
|
||||
// Throw an error here if we found a keyword earlier and this isn't an
|
||||
// "export from" statement after all
|
||||
if (first_non_identifier_loc.get() != 0 and !p.lexer.isContextualKeyword("from")) {
|
||||
if (first_non_identifier_loc.start != 0 and !p.lexer.isContextualKeyword("from")) {
|
||||
const r = js_lexer.rangeOfIdentifier(p.source, first_non_identifier_loc);
|
||||
try p.lexer.addRangeError(r, "Expected identifier but found \"{s}\"", .{p.source.textForRange(r)}, true);
|
||||
return error.SyntaxError;
|
||||
|
||||
@@ -21,7 +21,7 @@ pub fn ParseJSXElement(
|
||||
_ = try p.skipTypeScriptTypeArguments(true);
|
||||
}
|
||||
|
||||
var previous_string_with_backslash_loc: logger.Loc = .none;
|
||||
var previous_string_with_backslash_loc = logger.Loc{};
|
||||
var properties = G.Property.List{};
|
||||
var key_prop_i: i32 = -1;
|
||||
var flags = Flags.JSXElement.Bitset{};
|
||||
@@ -32,7 +32,7 @@ pub fn ParseJSXElement(
|
||||
if (@as(JSXTag.TagType, tag.data) == .tag) {
|
||||
start_tag = tag.data.tag;
|
||||
|
||||
var spread_loc: logger.Loc = .none;
|
||||
var spread_loc: logger.Loc = logger.Loc.Empty;
|
||||
var props = ListManaged(G.Property).init(p.allocator);
|
||||
var first_spread_prop_i: i32 = -1;
|
||||
var i: i32 = 0;
|
||||
@@ -65,7 +65,7 @@ pub fn ParseJSXElement(
|
||||
|
||||
// Implicitly true value
|
||||
// <button selected>
|
||||
value = p.newExpr(E.Boolean{ .value = true }, key_range.loc.add(key_range.len));
|
||||
value = p.newExpr(E.Boolean{ .value = true }, logger.Loc{ .start = key_range.loc.start + key_range.len });
|
||||
} else {
|
||||
value = try p.parseJSXPropValueIdentifier(&previous_string_with_backslash_loc);
|
||||
}
|
||||
@@ -148,7 +148,7 @@ pub fn ParseJSXElement(
|
||||
|
||||
const is_key_after_spread = key_prop_i > -1 and first_spread_prop_i > -1 and key_prop_i > first_spread_prop_i;
|
||||
flags.setPresent(.is_key_after_spread, is_key_after_spread);
|
||||
properties = G.Property.List.fromList(props);
|
||||
properties = G.Property.List.moveFromList(&props);
|
||||
if (is_key_after_spread and p.options.jsx.runtime == .automatic and !p.has_classic_runtime_warned) {
|
||||
try p.log.addWarning(p.source, spread_loc, "\"key\" prop after a {...spread} is deprecated in JSX. Falling back to classic runtime.");
|
||||
p.has_classic_runtime_warned = true;
|
||||
@@ -168,7 +168,7 @@ pub fn ParseJSXElement(
|
||||
// There is no "=" after the JSX attribute "text", so we expect a ">"
|
||||
//
|
||||
// This code special-cases this error to provide a less obscure error message.
|
||||
if (p.lexer.token == .t_syntax_error and strings.eqlComptime(p.lexer.raw(), "\\") and previous_string_with_backslash_loc.get() > 0) {
|
||||
if (p.lexer.token == .t_syntax_error and strings.eqlComptime(p.lexer.raw(), "\\") and previous_string_with_backslash_loc.start > 0) {
|
||||
const r = p.lexer.range();
|
||||
// Not dealing with this right now.
|
||||
try p.log.addRangeError(p.source, r, "Invalid JSX escape - use XML entity codes quotes or pass a JavaScript string instead");
|
||||
@@ -268,7 +268,7 @@ pub fn ParseJSXElement(
|
||||
|
||||
return p.newExpr(E.JSXElement{
|
||||
.tag = end_tag.data.asExpr(),
|
||||
.children = ExprNodeList.fromList(children),
|
||||
.children = ExprNodeList.moveFromList(&children),
|
||||
.properties = properties,
|
||||
.key_prop_index = key_prop_i,
|
||||
.flags = flags,
|
||||
|
||||
@@ -262,7 +262,16 @@ pub fn ParsePrefix(
|
||||
return error.SyntaxError;
|
||||
}
|
||||
|
||||
return p.newExpr(E.Unary{ .op = .un_typeof, .value = value }, loc);
|
||||
return p.newExpr(
|
||||
E.Unary{
|
||||
.op = .un_typeof,
|
||||
.value = value,
|
||||
.flags = .{
|
||||
.was_originally_typeof_identifier = value.data == .e_identifier,
|
||||
},
|
||||
},
|
||||
loc,
|
||||
);
|
||||
}
|
||||
fn t_delete(noalias p: *P) anyerror!Expr {
|
||||
const loc = p.lexer.loc();
|
||||
@@ -281,7 +290,14 @@ pub fn ParsePrefix(
|
||||
}
|
||||
}
|
||||
|
||||
return p.newExpr(E.Unary{ .op = .un_delete, .value = value }, loc);
|
||||
return p.newExpr(E.Unary{
|
||||
.op = .un_delete,
|
||||
.value = value,
|
||||
.flags = .{
|
||||
.was_originally_delete_of_identifier_or_property_access = value.data == .e_identifier or
|
||||
value.isPropertyAccess(),
|
||||
},
|
||||
}, loc);
|
||||
}
|
||||
fn t_plus(noalias p: *P) anyerror!Expr {
|
||||
const loc = p.lexer.loc();
|
||||
@@ -339,7 +355,7 @@ pub fn ParsePrefix(
|
||||
}
|
||||
fn t_function(noalias p: *P) anyerror!Expr {
|
||||
const loc = p.lexer.loc();
|
||||
return try p.parseFnExpr(loc, false, .none);
|
||||
return try p.parseFnExpr(loc, false, logger.Range.None);
|
||||
}
|
||||
fn t_class(noalias p: *P) anyerror!Expr {
|
||||
const loc = p.lexer.loc();
|
||||
@@ -391,7 +407,7 @@ pub fn ParsePrefix(
|
||||
try p.lexer.unexpected();
|
||||
return error.SyntaxError;
|
||||
}
|
||||
const range = logger.Range{ .loc = loc, .len = p.lexer.range().end().get() - loc.get() };
|
||||
const range = logger.Range{ .loc = loc, .len = p.lexer.range().end().start - loc.start };
|
||||
|
||||
try p.lexer.next();
|
||||
return p.newExpr(E.NewTarget{ .range = range }, loc);
|
||||
@@ -418,7 +434,7 @@ pub fn ParsePrefix(
|
||||
new.data.e_new.args = call_args.list;
|
||||
new.data.e_new.close_parens_loc = call_args.loc;
|
||||
} else {
|
||||
new.data.e_new.close_parens_loc = .none;
|
||||
new.data.e_new.close_parens_loc = .Empty;
|
||||
new.data.e_new.args = .{};
|
||||
}
|
||||
|
||||
@@ -430,7 +446,7 @@ pub fn ParsePrefix(
|
||||
var is_single_line = !p.lexer.has_newline_before;
|
||||
var items = ListManaged(Expr).init(p.allocator);
|
||||
var self_errors = DeferredErrors{};
|
||||
var comma_after_spread: logger.Loc = .none;
|
||||
var comma_after_spread = logger.Loc{};
|
||||
|
||||
// Allow "in" inside arrays
|
||||
const old_allow_in = p.allow_in;
|
||||
@@ -500,8 +516,8 @@ pub fn ParsePrefix(
|
||||
self_errors.mergeInto(errors.?);
|
||||
}
|
||||
return p.newExpr(E.Array{
|
||||
.items = ExprNodeList.fromList(items),
|
||||
.comma_after_spread = if (comma_after_spread == .none) null else comma_after_spread,
|
||||
.items = ExprNodeList.moveFromList(&items),
|
||||
.comma_after_spread = comma_after_spread.toNullable(),
|
||||
.is_single_line = is_single_line,
|
||||
.close_bracket_loc = close_bracket_loc,
|
||||
}, loc);
|
||||
@@ -512,7 +528,7 @@ pub fn ParsePrefix(
|
||||
var is_single_line = !p.lexer.has_newline_before;
|
||||
var properties = ListManaged(G.Property).init(p.allocator);
|
||||
var self_errors = DeferredErrors{};
|
||||
var comma_after_spread: logger.Loc = .none;
|
||||
var comma_after_spread: logger.Loc = logger.Loc{};
|
||||
|
||||
// Allow "in" inside object literals
|
||||
const old_allow_in = p.allow_in;
|
||||
@@ -584,8 +600,8 @@ pub fn ParsePrefix(
|
||||
}
|
||||
|
||||
return p.newExpr(E.Object{
|
||||
.properties = G.Property.List.fromList(properties),
|
||||
.comma_after_spread = if (comma_after_spread.get() > 0)
|
||||
.properties = G.Property.List.moveFromList(&properties),
|
||||
.comma_after_spread = if (comma_after_spread.start > 0)
|
||||
comma_after_spread
|
||||
else
|
||||
null,
|
||||
|
||||
@@ -119,7 +119,7 @@ pub fn ParseProperty(
|
||||
}
|
||||
|
||||
return G.Property{
|
||||
.ts_decorators = ExprNodeList.init(opts.ts_decorators),
|
||||
.ts_decorators = try ExprNodeList.fromSlice(p.allocator, opts.ts_decorators),
|
||||
.kind = kind,
|
||||
.flags = Flags.Property.init(.{
|
||||
.is_computed = is_computed,
|
||||
@@ -137,7 +137,7 @@ pub fn ParseProperty(
|
||||
var errors = errors_;
|
||||
// This while loop exists to conserve stack space by reducing (but not completely eliminating) recursion.
|
||||
restart: while (true) {
|
||||
var key: Expr = Expr{ .loc = .none, .data = .{ .e_missing = E.Missing{} } };
|
||||
var key: Expr = Expr{ .loc = logger.Loc.Empty, .data = .{ .e_missing = E.Missing{} } };
|
||||
const key_range = p.lexer.range();
|
||||
var is_computed = false;
|
||||
|
||||
@@ -333,7 +333,7 @@ pub fn ParseProperty(
|
||||
) catch unreachable;
|
||||
|
||||
block.* = G.ClassStaticBlock{
|
||||
.stmts = js_ast.BabyList(Stmt).init(stmts),
|
||||
.stmts = js_ast.BabyList(Stmt).fromOwnedSlice(stmts),
|
||||
.loc = loc,
|
||||
};
|
||||
|
||||
@@ -506,7 +506,7 @@ pub fn ParseProperty(
|
||||
try p.lexer.expectOrInsertSemicolon();
|
||||
|
||||
return G.Property{
|
||||
.ts_decorators = ExprNodeList.init(opts.ts_decorators),
|
||||
.ts_decorators = try ExprNodeList.fromSlice(p.allocator, opts.ts_decorators),
|
||||
.kind = kind,
|
||||
.flags = Flags.Property.init(.{
|
||||
.is_computed = is_computed,
|
||||
@@ -531,7 +531,7 @@ pub fn ParseProperty(
|
||||
.is_computed = is_computed,
|
||||
}),
|
||||
.key = key,
|
||||
.value = Expr{ .data = .e_missing, .loc = .none },
|
||||
.value = Expr{ .data = .e_missing, .loc = .{} },
|
||||
};
|
||||
|
||||
try p.parseExprOrBindings(.comma, errors, &property.value.?);
|
||||
|
||||
@@ -493,9 +493,13 @@ pub fn ParseStmt(
|
||||
}
|
||||
fn t_var(p: *P, opts: *ParseStatementOptions, loc: logger.Loc) anyerror!Stmt {
|
||||
try p.lexer.next();
|
||||
const decls = try p.parseAndDeclareDecls(.hoisted, opts);
|
||||
var decls = try p.parseAndDeclareDecls(.hoisted, opts);
|
||||
try p.lexer.expectOrInsertSemicolon();
|
||||
return p.s(S.Local{ .kind = .k_var, .decls = Decl.List.fromList(decls), .is_export = opts.is_export }, loc);
|
||||
return p.s(S.Local{
|
||||
.kind = .k_var,
|
||||
.decls = Decl.List.moveFromList(&decls),
|
||||
.is_export = opts.is_export,
|
||||
}, loc);
|
||||
}
|
||||
fn t_const(p: *P, opts: *ParseStatementOptions, loc: logger.Loc) anyerror!Stmt {
|
||||
if (opts.lexical_decl != .allow_all) {
|
||||
@@ -509,14 +513,18 @@ pub fn ParseStmt(
|
||||
return p.parseTypescriptEnumStmt(loc, opts);
|
||||
}
|
||||
|
||||
const decls = try p.parseAndDeclareDecls(.constant, opts);
|
||||
var decls = try p.parseAndDeclareDecls(.constant, opts);
|
||||
try p.lexer.expectOrInsertSemicolon();
|
||||
|
||||
if (!opts.is_typescript_declare) {
|
||||
try p.requireInitializers(.k_const, decls.items);
|
||||
}
|
||||
|
||||
return p.s(S.Local{ .kind = .k_const, .decls = Decl.List.fromList(decls), .is_export = opts.is_export }, loc);
|
||||
return p.s(S.Local{
|
||||
.kind = .k_const,
|
||||
.decls = Decl.List.moveFromList(&decls),
|
||||
.is_export = opts.is_export,
|
||||
}, loc);
|
||||
}
|
||||
fn t_if(p: *P, _: *ParseStatementOptions, loc: logger.Loc) anyerror!Stmt {
|
||||
var current_loc = loc;
|
||||
@@ -667,7 +675,7 @@ pub fn ParseStmt(
|
||||
},
|
||||
}
|
||||
}
|
||||
try cases.append(js_ast.Case{ .value = value, .body = body.items, .loc = .none });
|
||||
try cases.append(js_ast.Case{ .value = value, .body = body.items, .loc = logger.Loc.Empty });
|
||||
}
|
||||
try p.lexer.expect(.t_close_brace);
|
||||
return p.s(S.Switch{ .test_ = test_, .body_loc = body_loc, .cases = cases.items }, loc);
|
||||
@@ -795,15 +803,17 @@ pub fn ParseStmt(
|
||||
is_var = true;
|
||||
try p.lexer.next();
|
||||
var stmtOpts = ParseStatementOptions{};
|
||||
decls.update(try p.parseAndDeclareDecls(.hoisted, &stmtOpts));
|
||||
init_ = p.s(S.Local{ .kind = .k_var, .decls = Decl.List.fromList(decls) }, init_loc);
|
||||
var decls_list = try p.parseAndDeclareDecls(.hoisted, &stmtOpts);
|
||||
decls = .moveFromList(&decls_list);
|
||||
init_ = p.s(S.Local{ .kind = .k_var, .decls = decls }, init_loc);
|
||||
},
|
||||
// for (const )
|
||||
.t_const => {
|
||||
try p.lexer.next();
|
||||
var stmtOpts = ParseStatementOptions{};
|
||||
decls.update(try p.parseAndDeclareDecls(.constant, &stmtOpts));
|
||||
init_ = p.s(S.Local{ .kind = .k_const, .decls = Decl.List.fromList(decls) }, init_loc);
|
||||
var decls_list = try p.parseAndDeclareDecls(.constant, &stmtOpts);
|
||||
decls = .moveFromList(&decls_list);
|
||||
init_ = p.s(S.Local{ .kind = .k_const, .decls = decls }, init_loc);
|
||||
},
|
||||
// for (;)
|
||||
.t_semicolon => {},
|
||||
@@ -1039,7 +1049,7 @@ pub fn ParseStmt(
|
||||
// Parse TypeScript import assignment statements
|
||||
if (p.lexer.token == .t_equals or opts.is_export or (opts.is_namespace_scope and !opts.is_typescript_declare)) {
|
||||
p.esm_import_keyword = previous_import_keyword; // This wasn't an ESM import statement after all;
|
||||
return p.parseTypeScriptImportEqualsStmt(loc, opts, .none, default_name);
|
||||
return p.parseTypeScriptImportEqualsStmt(loc, opts, logger.Loc.Empty, default_name);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1115,7 +1125,9 @@ pub fn ParseStmt(
|
||||
fn t_throw(p: *P, _: *ParseStatementOptions, loc: logger.Loc) anyerror!Stmt {
|
||||
try p.lexer.next();
|
||||
if (p.lexer.has_newline_before) {
|
||||
try p.log.addError(p.source, loc.add(5), "Unexpected newline after \"throw\"");
|
||||
try p.log.addError(p.source, logger.Loc{
|
||||
.start = loc.start + 5,
|
||||
}, "Unexpected newline after \"throw\"");
|
||||
return error.SyntaxError;
|
||||
}
|
||||
const expr = try p.parseExpr(.lowest);
|
||||
@@ -1291,7 +1303,7 @@ pub fn ParseStmt(
|
||||
for (local.decls.slice()) |decl| {
|
||||
try extractDeclsForBinding(decl.binding, &_decls);
|
||||
}
|
||||
decls.update(_decls);
|
||||
decls = .moveFromList(&_decls);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
@@ -298,7 +298,7 @@ pub fn ParseSuffix(
|
||||
// "(a?) => {}"
|
||||
// "(a?: b) => {}"
|
||||
// "(a?, b?) => {}"
|
||||
if (is_typescript_enabled and left.loc == p.latest_arrow_arg_loc and (p.lexer.token == .t_colon or
|
||||
if (is_typescript_enabled and left.loc.start == p.latest_arrow_arg_loc.start and (p.lexer.token == .t_colon or
|
||||
p.lexer.token == .t_close_paren or p.lexer.token == .t_comma))
|
||||
{
|
||||
if (errors == null) {
|
||||
@@ -826,7 +826,7 @@ pub fn ParseSuffix(
|
||||
var optional_chain_: ?OptionalChain = null;
|
||||
const optional_chain = &optional_chain_;
|
||||
while (true) {
|
||||
if (p.lexer.loc() == p.after_arrow_body_loc) {
|
||||
if (p.lexer.loc().start == p.after_arrow_body_loc.start) {
|
||||
while (true) {
|
||||
switch (p.lexer.token) {
|
||||
.t_comma => {
|
||||
@@ -850,7 +850,7 @@ pub fn ParseSuffix(
|
||||
|
||||
if (comptime is_typescript_enabled) {
|
||||
// Stop now if this token is forbidden to follow a TypeScript "as" cast
|
||||
if (p.forbid_suffix_after_as_loc.get() > -1 and p.lexer.loc() == p.forbid_suffix_after_as_loc) {
|
||||
if (p.forbid_suffix_after_as_loc.start > -1 and p.lexer.loc().start == p.forbid_suffix_after_as_loc.start) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -201,7 +201,7 @@ pub fn ParseTypescript(
|
||||
// run the renamer. For external-facing things the renamer will avoid
|
||||
// collisions automatically so this isn't important for correctness.
|
||||
arg_ref = p.newSymbol(.hoisted, strings.cat(p.allocator, "_", name_text) catch unreachable) catch unreachable;
|
||||
p.current_scope.generated.push(p.allocator, arg_ref) catch unreachable;
|
||||
bun.handleOom(p.current_scope.generated.append(p.allocator, arg_ref));
|
||||
} else {
|
||||
arg_ref = p.newSymbol(.hoisted, name_text) catch unreachable;
|
||||
}
|
||||
@@ -238,7 +238,7 @@ pub fn ParseTypescript(
|
||||
try p.lexer.expect(.t_string_literal);
|
||||
try p.lexer.expect(.t_close_paren);
|
||||
if (!opts.is_typescript_declare) {
|
||||
const args = try ExprNodeList.one(p.allocator, path);
|
||||
const args = try ExprNodeList.initOne(p.allocator, path);
|
||||
value = p.newExpr(E.Call{ .target = target, .close_paren_loc = p.lexer.loc(), .args = args }, loc);
|
||||
}
|
||||
} else {
|
||||
@@ -266,7 +266,12 @@ pub fn ParseTypescript(
|
||||
.binding = p.b(B.Identifier{ .ref = ref }, default_name_loc),
|
||||
.value = value,
|
||||
};
|
||||
return p.s(S.Local{ .kind = kind, .decls = Decl.List.init(decls), .is_export = opts.is_export, .was_ts_import_equals = true }, loc);
|
||||
return p.s(S.Local{
|
||||
.kind = kind,
|
||||
.decls = Decl.List.fromOwnedSlice(decls),
|
||||
.is_export = opts.is_export,
|
||||
.was_ts_import_equals = true,
|
||||
}, loc);
|
||||
}
|
||||
|
||||
pub fn parseTypescriptEnumStmt(p: *P, loc: logger.Loc, opts: *ParseStatementOptions) anyerror!Stmt {
|
||||
@@ -372,7 +377,7 @@ pub fn ParseTypescript(
|
||||
// run the renamer. For external-facing things the renamer will avoid
|
||||
// collisions automatically so this isn't important for correctness.
|
||||
arg_ref = p.newSymbol(.hoisted, strings.cat(p.allocator, "_", name_text) catch unreachable) catch unreachable;
|
||||
p.current_scope.generated.push(p.allocator, arg_ref) catch unreachable;
|
||||
bun.handleOom(p.current_scope.generated.append(p.allocator, arg_ref));
|
||||
} else {
|
||||
arg_ref = p.declareSymbol(.hoisted, name_loc, name_text) catch unreachable;
|
||||
}
|
||||
|
||||
@@ -489,7 +489,7 @@ pub fn SkipTypescript(
|
||||
},
|
||||
else => {
|
||||
if (comptime get_metadata) {
|
||||
const find_result = p.findSymbol(.none, p.lexer.identifier) catch unreachable;
|
||||
const find_result = p.findSymbol(logger.Loc.Empty, p.lexer.identifier) catch unreachable;
|
||||
result.* = .{ .m_identifier = find_result.ref };
|
||||
}
|
||||
|
||||
@@ -686,12 +686,12 @@ pub fn SkipTypescript(
|
||||
if (result.* == .m_identifier) {
|
||||
var dot = List(Ref).initCapacity(p.allocator, 2) catch unreachable;
|
||||
dot.appendAssumeCapacity(result.m_identifier);
|
||||
const find_result = p.findSymbol(.none, p.lexer.identifier) catch unreachable;
|
||||
const find_result = p.findSymbol(logger.Loc.Empty, p.lexer.identifier) catch unreachable;
|
||||
dot.appendAssumeCapacity(find_result.ref);
|
||||
result.* = .{ .m_dot = dot };
|
||||
} else if (result.* == .m_dot) {
|
||||
if (p.lexer.isIdentifierOrKeyword()) {
|
||||
const find_result = p.findSymbol(.none, p.lexer.identifier) catch unreachable;
|
||||
const find_result = p.findSymbol(logger.Loc.Empty, p.lexer.identifier) catch unreachable;
|
||||
result.m_dot.append(p.allocator, find_result.ref) catch unreachable;
|
||||
}
|
||||
}
|
||||
@@ -904,7 +904,7 @@ pub fn SkipTypescript(
|
||||
var has_out = false;
|
||||
var expect_identifier = true;
|
||||
|
||||
var invalid_modifier_range: logger.Range = .none;
|
||||
var invalid_modifier_range = logger.Range.None;
|
||||
|
||||
// Scan over a sequence of "in" and "out" modifiers (a.k.a. optional
|
||||
// variance annotations) as well as "const" modifiers
|
||||
|
||||
@@ -11,7 +11,7 @@ pub fn Symbols(
|
||||
}
|
||||
|
||||
pub fn findSymbolWithRecordUsage(noalias p: *P, loc: logger.Loc, name: string, comptime record_usage: bool) !FindSymbolResult {
|
||||
var declare_loc: logger.Loc = .none;
|
||||
var declare_loc: logger.Loc = logger.Loc.Empty;
|
||||
var is_inside_with_scope = false;
|
||||
// This function can show up in profiling.
|
||||
// That's part of why we do this.
|
||||
|
||||
@@ -567,9 +567,9 @@ pub fn Visit(
|
||||
// Make it an error to use "arguments" in a static class block
|
||||
p.current_scope.forbid_arguments = true;
|
||||
|
||||
var list = property.class_static_block.?.stmts.listManaged(p.allocator);
|
||||
var list = property.class_static_block.?.stmts.moveToListManaged(p.allocator);
|
||||
p.visitStmts(&list, .fn_body) catch unreachable;
|
||||
property.class_static_block.?.stmts = js_ast.BabyList(Stmt).fromList(list);
|
||||
property.class_static_block.?.stmts = js_ast.BabyList(Stmt).moveFromList(&list);
|
||||
p.popScope();
|
||||
|
||||
p.fn_or_arrow_data_visit = old_fn_or_arrow_data;
|
||||
@@ -912,12 +912,13 @@ pub fn Visit(
|
||||
before.ensureUnusedCapacity(@as(usize, @intFromBool(let_decls.items.len > 0)) + @as(usize, @intFromBool(var_decls.items.len > 0)) + non_fn_stmts.items.len) catch unreachable;
|
||||
|
||||
if (let_decls.items.len > 0) {
|
||||
const decls: Decl.List = .moveFromList(&let_decls);
|
||||
before.appendAssumeCapacity(p.s(
|
||||
S.Local{
|
||||
.kind = .k_let,
|
||||
.decls = Decl.List.fromList(let_decls),
|
||||
.decls = decls,
|
||||
},
|
||||
let_decls.items[0].value.?.loc,
|
||||
decls.at(0).value.?.loc,
|
||||
));
|
||||
}
|
||||
|
||||
@@ -928,12 +929,13 @@ pub fn Visit(
|
||||
before.appendAssumeCapacity(new);
|
||||
}
|
||||
} else {
|
||||
const decls: Decl.List = .moveFromList(&var_decls);
|
||||
before.appendAssumeCapacity(p.s(
|
||||
S.Local{
|
||||
.kind = .k_var,
|
||||
.decls = Decl.List.fromList(var_decls),
|
||||
.decls = decls,
|
||||
},
|
||||
var_decls.items[0].value.?.loc,
|
||||
decls.at(0).value.?.loc,
|
||||
));
|
||||
}
|
||||
}
|
||||
@@ -1166,7 +1168,10 @@ pub fn Visit(
|
||||
if (prev_stmt.data == .s_local and
|
||||
local.canMergeWith(prev_stmt.data.s_local))
|
||||
{
|
||||
prev_stmt.data.s_local.decls.append(p.allocator, local.decls.slice()) catch unreachable;
|
||||
prev_stmt.data.s_local.decls.appendSlice(
|
||||
p.allocator,
|
||||
local.decls.slice(),
|
||||
) catch |err| bun.handleOom(err);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,6 +6,50 @@ pub fn CreateBinaryExpressionVisitor(
|
||||
return struct {
|
||||
const P = js_parser.NewParser_(parser_feature__typescript, parser_feature__jsx, parser_feature__scan_only);
|
||||
|
||||
/// Try to optimize "typeof x === 'undefined'" to "typeof x > 'u'" or similar
|
||||
/// Returns the optimized expression if successful, null otherwise
|
||||
fn tryOptimizeTypeofUndefined(e_: *E.Binary, p: *P, replacement_op: js_ast.Op.Code) ?Expr {
|
||||
// Check if this is a typeof comparison with "undefined"
|
||||
const typeof_expr, const string_expr, const flip_comparison = exprs: {
|
||||
// Try left side as typeof, right side as string
|
||||
if (e_.left.data == .e_unary and e_.left.data.e_unary.op == .un_typeof) {
|
||||
if (e_.right.data == .e_string and
|
||||
e_.right.data.e_string.eqlComptime("undefined"))
|
||||
{
|
||||
break :exprs .{ e_.left, e_.right, false };
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
// Try right side as typeof, left side as string
|
||||
if (e_.right.data == .e_unary and e_.right.data.e_unary.op == .un_typeof) {
|
||||
if (e_.left.data == .e_string and
|
||||
e_.left.data.e_string.eqlComptime("undefined"))
|
||||
{
|
||||
break :exprs .{ e_.right, e_.left, true };
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
|
||||
// Create new string with "u"
|
||||
const u_string = p.newExpr(E.String{ .data = "u" }, string_expr.loc);
|
||||
|
||||
// Create the optimized comparison
|
||||
const left = if (flip_comparison) u_string else typeof_expr;
|
||||
const right = if (flip_comparison) typeof_expr else u_string;
|
||||
|
||||
return p.newExpr(E.Binary{
|
||||
.left = left,
|
||||
.right = right,
|
||||
.op = replacement_op,
|
||||
}, e_.left.loc);
|
||||
}
|
||||
|
||||
pub const BinaryExpressionVisitor = struct {
|
||||
e: *E.Binary,
|
||||
loc: logger.Loc,
|
||||
@@ -121,6 +165,11 @@ pub fn CreateBinaryExpressionVisitor(
|
||||
}
|
||||
|
||||
if (p.options.features.minify_syntax) {
|
||||
// "typeof x == 'undefined'" => "typeof x > 'u'"
|
||||
if (tryOptimizeTypeofUndefined(e_, p, .bin_gt)) |optimized| {
|
||||
return optimized;
|
||||
}
|
||||
|
||||
// "x == void 0" => "x == null"
|
||||
if (e_.left.data == .e_undefined) {
|
||||
e_.left.data = .{ .e_null = E.Null{} };
|
||||
@@ -146,6 +195,13 @@ pub fn CreateBinaryExpressionVisitor(
|
||||
return p.newExpr(E.Boolean{ .value = equality.equal }, v.loc);
|
||||
}
|
||||
|
||||
if (p.options.features.minify_syntax) {
|
||||
// "typeof x === 'undefined'" => "typeof x > 'u'"
|
||||
if (tryOptimizeTypeofUndefined(e_, p, .bin_gt)) |optimized| {
|
||||
return optimized;
|
||||
}
|
||||
}
|
||||
|
||||
// const after_op_loc = locAfterOp(e_.);
|
||||
// TODO: warn about equality check
|
||||
// TODO: warn about typeof string
|
||||
@@ -161,6 +217,13 @@ pub fn CreateBinaryExpressionVisitor(
|
||||
|
||||
return p.newExpr(E.Boolean{ .value = !equality.equal }, v.loc);
|
||||
}
|
||||
if (p.options.features.minify_syntax) {
|
||||
// "typeof x != 'undefined'" => "typeof x < 'u'"
|
||||
if (tryOptimizeTypeofUndefined(e_, p, .bin_lt)) |optimized| {
|
||||
return optimized;
|
||||
}
|
||||
}
|
||||
|
||||
// const after_op_loc = locAfterOp(e_.);
|
||||
// TODO: warn about equality check
|
||||
// TODO: warn about typeof string
|
||||
@@ -181,6 +244,13 @@ pub fn CreateBinaryExpressionVisitor(
|
||||
|
||||
return p.newExpr(E.Boolean{ .value = !equality.equal }, v.loc);
|
||||
}
|
||||
|
||||
if (p.options.features.minify_syntax) {
|
||||
// "typeof x !== 'undefined'" => "typeof x < 'u'"
|
||||
if (tryOptimizeTypeofUndefined(e_, p, .bin_lt)) |optimized| {
|
||||
return optimized;
|
||||
}
|
||||
}
|
||||
},
|
||||
.bin_nullish_coalescing => {
|
||||
const nullorUndefined = SideEffects.toNullOrUndefined(p, e_.left.data);
|
||||
@@ -360,6 +430,70 @@ pub fn CreateBinaryExpressionVisitor(
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
.bin_lt => {
|
||||
if (p.should_fold_typescript_constant_expressions) {
|
||||
if (Expr.extractNumericValuesInSafeRange(e_.left.data, e_.right.data)) |vals| {
|
||||
return p.newExpr(E.Boolean{
|
||||
.value = vals[0] < vals[1],
|
||||
}, v.loc);
|
||||
}
|
||||
if (Expr.extractStringValues(e_.left.data, e_.right.data, p.allocator)) |vals| {
|
||||
return p.newExpr(E.Boolean{
|
||||
.value = vals[0].order(vals[1]) == .lt,
|
||||
}, v.loc);
|
||||
}
|
||||
}
|
||||
},
|
||||
.bin_gt => {
|
||||
if (p.should_fold_typescript_constant_expressions) {
|
||||
if (Expr.extractNumericValuesInSafeRange(e_.left.data, e_.right.data)) |vals| {
|
||||
return p.newExpr(E.Boolean{
|
||||
.value = vals[0] > vals[1],
|
||||
}, v.loc);
|
||||
}
|
||||
if (Expr.extractStringValues(e_.left.data, e_.right.data, p.allocator)) |vals| {
|
||||
return p.newExpr(E.Boolean{
|
||||
.value = vals[0].order(vals[1]) == .gt,
|
||||
}, v.loc);
|
||||
}
|
||||
}
|
||||
},
|
||||
.bin_le => {
|
||||
if (p.should_fold_typescript_constant_expressions) {
|
||||
if (Expr.extractNumericValuesInSafeRange(e_.left.data, e_.right.data)) |vals| {
|
||||
return p.newExpr(E.Boolean{
|
||||
.value = vals[0] <= vals[1],
|
||||
}, v.loc);
|
||||
}
|
||||
if (Expr.extractStringValues(e_.left.data, e_.right.data, p.allocator)) |vals| {
|
||||
return p.newExpr(E.Boolean{
|
||||
.value = switch (vals[0].order(vals[1])) {
|
||||
.eq, .lt => true,
|
||||
.gt => false,
|
||||
},
|
||||
}, v.loc);
|
||||
}
|
||||
}
|
||||
},
|
||||
.bin_ge => {
|
||||
if (p.should_fold_typescript_constant_expressions) {
|
||||
if (Expr.extractNumericValuesInSafeRange(e_.left.data, e_.right.data)) |vals| {
|
||||
return p.newExpr(E.Boolean{
|
||||
.value = vals[0] >= vals[1],
|
||||
}, v.loc);
|
||||
}
|
||||
if (Expr.extractStringValues(e_.left.data, e_.right.data, p.allocator)) |vals| {
|
||||
return p.newExpr(E.Boolean{
|
||||
.value = switch (vals[0].order(vals[1])) {
|
||||
.eq, .gt => true,
|
||||
.lt => false,
|
||||
},
|
||||
}, v.loc);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
// ---------------------------------------------------------------------------------------------------
|
||||
.bin_assign => {
|
||||
// Optionally preserve the name
|
||||
|
||||
@@ -228,26 +228,30 @@ pub fn VisitExpr(
|
||||
// That would reduce the amount of allocations a little
|
||||
if (runtime == .classic or is_key_after_spread) {
|
||||
// Arguments to createElement()
|
||||
const args = p.allocator.alloc(Expr, 2 + children_count) catch unreachable;
|
||||
// There are at least two args:
|
||||
// - name of the tag
|
||||
// - props
|
||||
var i: usize = 2;
|
||||
args[0] = tag;
|
||||
var args = bun.BabyList(Expr).initCapacity(
|
||||
p.allocator,
|
||||
2 + children_count,
|
||||
) catch |err| bun.handleOom(err);
|
||||
args.appendAssumeCapacity(tag);
|
||||
|
||||
const num_props = e_.properties.len;
|
||||
if (num_props > 0) {
|
||||
const props = p.allocator.alloc(G.Property, num_props) catch unreachable;
|
||||
bun.copy(G.Property, props, e_.properties.slice());
|
||||
args[1] = p.newExpr(E.Object{ .properties = G.Property.List.init(props) }, expr.loc);
|
||||
args.appendAssumeCapacity(p.newExpr(
|
||||
E.Object{ .properties = G.Property.List.fromOwnedSlice(props) },
|
||||
expr.loc,
|
||||
));
|
||||
} else {
|
||||
args[1] = p.newExpr(E.Null{}, expr.loc);
|
||||
args.appendAssumeCapacity(p.newExpr(E.Null{}, expr.loc));
|
||||
}
|
||||
|
||||
const children_elements = e_.children.slice()[0..children_count];
|
||||
for (children_elements) |child| {
|
||||
args[i] = p.visitExpr(child);
|
||||
i += @as(usize, @intCast(@intFromBool(args[i].data != .e_missing)));
|
||||
const arg = p.visitExpr(child);
|
||||
if (arg.data != .e_missing) {
|
||||
args.appendAssumeCapacity(arg);
|
||||
}
|
||||
}
|
||||
|
||||
const target = p.jsxStringsToMemberExpression(expr.loc, p.options.jsx.factory) catch unreachable;
|
||||
@@ -255,7 +259,7 @@ pub fn VisitExpr(
|
||||
// Call createElement()
|
||||
return p.newExpr(E.Call{
|
||||
.target = if (runtime == .classic) target else p.jsxImport(.createElement, expr.loc),
|
||||
.args = ExprNodeList.init(args[0..i]),
|
||||
.args = args,
|
||||
// Enable tree shaking
|
||||
.can_be_unwrapped_if_unused = if (!p.options.ignore_dce_annotations and !p.options.jsx.side_effects) .if_unused else .never,
|
||||
.close_paren_loc = e_.close_tag_loc,
|
||||
@@ -265,7 +269,7 @@ pub fn VisitExpr(
|
||||
else if (runtime == .automatic) {
|
||||
// --- These must be done in all cases --
|
||||
const allocator = p.allocator;
|
||||
var props: std.ArrayListUnmanaged(G.Property) = e_.properties.list();
|
||||
var props = &e_.properties;
|
||||
|
||||
const maybe_key_value: ?ExprNodeIndex =
|
||||
if (e_.key_prop_index > -1) props.orderedRemove(@intCast(e_.key_prop_index)).value else null;
|
||||
@@ -296,8 +300,8 @@ pub fn VisitExpr(
|
||||
// ->
|
||||
// <div {{...foo}} />
|
||||
// jsx("div", {...foo})
|
||||
while (props.items.len == 1 and props.items[0].kind == .spread and props.items[0].value.?.data == .e_object) {
|
||||
props = props.items[0].value.?.data.e_object.properties.list();
|
||||
while (props.len == 1 and props.at(0).kind == .spread and props.at(0).value.?.data == .e_object) {
|
||||
props = &props.at(0).value.?.data.e_object.properties;
|
||||
}
|
||||
|
||||
// Typescript defines static jsx as children.len > 1 or single spread
|
||||
@@ -326,7 +330,7 @@ pub fn VisitExpr(
|
||||
args[0] = tag;
|
||||
|
||||
args[1] = p.newExpr(E.Object{
|
||||
.properties = G.Property.List.fromList(props),
|
||||
.properties = props.*,
|
||||
}, expr.loc);
|
||||
|
||||
if (maybe_key_value) |key| {
|
||||
@@ -360,7 +364,7 @@ pub fn VisitExpr(
|
||||
|
||||
return p.newExpr(E.Call{
|
||||
.target = p.jsxImportAutomatic(expr.loc, is_static_jsx),
|
||||
.args = ExprNodeList.init(args),
|
||||
.args = ExprNodeList.fromOwnedSlice(args),
|
||||
// Enable tree shaking
|
||||
.can_be_unwrapped_if_unused = if (!p.options.ignore_dce_annotations and !p.options.jsx.side_effects) .if_unused else .never,
|
||||
.was_jsx_element = true,
|
||||
@@ -804,6 +808,7 @@ pub fn VisitExpr(
|
||||
E.Unary{
|
||||
.op = e_.op,
|
||||
.value = comma.right,
|
||||
.flags = e_.flags,
|
||||
},
|
||||
comma.right.loc,
|
||||
),
|
||||
@@ -1278,7 +1283,7 @@ pub fn VisitExpr(
|
||||
// the try/catch statement is there to handle the potential run-time
|
||||
// error from the unbundled require() call failing.
|
||||
if (e_.args.len == 1) {
|
||||
const first = e_.args.first_();
|
||||
const first = e_.args.slice()[0];
|
||||
const state = TransposeState{
|
||||
.is_require_immediately_assigned_to_decl = in.is_immediately_assigned_to_decl and
|
||||
first.data == .e_string,
|
||||
@@ -1323,7 +1328,7 @@ pub fn VisitExpr(
|
||||
}
|
||||
|
||||
if (e_.args.len == 1) {
|
||||
const first = e_.args.first_();
|
||||
const first = e_.args.slice()[0];
|
||||
switch (first.data) {
|
||||
.e_string => {
|
||||
// require.resolve(FOO) => require.resolve(FOO)
|
||||
@@ -1491,7 +1496,9 @@ pub fn VisitExpr(
|
||||
}
|
||||
|
||||
if (p.options.features.minify_syntax) {
|
||||
KnownGlobal.maybeMarkConstructorAsPure(e_, p.symbols.items);
|
||||
if (KnownGlobal.minifyGlobalConstructor(p.allocator, e_, p.symbols.items, expr.loc, p.options.features.minify_whitespace)) |minified| {
|
||||
return minified;
|
||||
}
|
||||
}
|
||||
return expr;
|
||||
}
|
||||
@@ -1564,6 +1571,18 @@ pub fn VisitExpr(
|
||||
|
||||
e_.func = p.visitFunc(e_.func, expr.loc);
|
||||
|
||||
// Remove unused function names when minifying (only when bundling is enabled)
|
||||
// unless --keep-names is specified
|
||||
if (p.options.features.minify_syntax and p.options.bundle and
|
||||
!p.options.features.minify_keep_names and
|
||||
!p.current_scope.contains_direct_eval and
|
||||
e_.func.name != null and
|
||||
e_.func.name.?.ref != null and
|
||||
p.symbols.items[e_.func.name.?.ref.?.innerIndex()].use_count_estimate == 0)
|
||||
{
|
||||
e_.func.name = null;
|
||||
}
|
||||
|
||||
var final_expr = expr;
|
||||
|
||||
if (react_hook_data) |*hook| try_mark_hook: {
|
||||
@@ -1585,6 +1604,19 @@ pub fn VisitExpr(
|
||||
}
|
||||
|
||||
_ = p.visitClass(expr.loc, e_, Ref.None);
|
||||
|
||||
// Remove unused class names when minifying (only when bundling is enabled)
|
||||
// unless --keep-names is specified
|
||||
if (p.options.features.minify_syntax and p.options.bundle and
|
||||
!p.options.features.minify_keep_names and
|
||||
!p.current_scope.contains_direct_eval and
|
||||
e_.class_name != null and
|
||||
e_.class_name.?.ref != null and
|
||||
p.symbols.items[e_.class_name.?.ref.?.innerIndex()].use_count_estimate == 0)
|
||||
{
|
||||
e_.class_name = null;
|
||||
}
|
||||
|
||||
return expr;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -126,7 +126,7 @@ pub fn VisitStmt(
|
||||
const name = p.loadNameFromRef(data.namespace_ref);
|
||||
|
||||
data.namespace_ref = try p.newSymbol(.other, name);
|
||||
try p.current_scope.generated.push(p.allocator, data.namespace_ref);
|
||||
try p.current_scope.generated.append(p.allocator, data.namespace_ref);
|
||||
try p.recordDeclaredSymbol(data.namespace_ref);
|
||||
|
||||
if (p.options.features.replace_exports.count() > 0) {
|
||||
@@ -137,7 +137,7 @@ pub fn VisitStmt(
|
||||
|
||||
if (p.options.features.replace_exports.count() > 0) {
|
||||
if (p.options.features.replace_exports.getPtr(item.alias)) |entry| {
|
||||
_ = p.injectReplacementExport(stmts, old_ref, .none, entry);
|
||||
_ = p.injectReplacementExport(stmts, old_ref, logger.Loc.Empty, entry);
|
||||
|
||||
continue;
|
||||
}
|
||||
@@ -146,7 +146,7 @@ pub fn VisitStmt(
|
||||
const _name = p.loadNameFromRef(old_ref);
|
||||
|
||||
const ref = try p.newSymbol(.import, _name);
|
||||
try p.current_scope.generated.push(p.allocator, ref);
|
||||
try p.current_scope.generated.append(p.allocator, ref);
|
||||
try p.recordDeclaredSymbol(ref);
|
||||
data.items[j] = item;
|
||||
data.items[j].name.ref = ref;
|
||||
@@ -163,7 +163,7 @@ pub fn VisitStmt(
|
||||
for (data.items) |*item| {
|
||||
const _name = p.loadNameFromRef(item.name.ref.?);
|
||||
const ref = try p.newSymbol(.import, _name);
|
||||
try p.current_scope.generated.push(p.allocator, ref);
|
||||
try p.current_scope.generated.append(p.allocator, ref);
|
||||
try p.recordDeclaredSymbol(ref);
|
||||
item.name.ref = ref;
|
||||
}
|
||||
@@ -176,14 +176,14 @@ pub fn VisitStmt(
|
||||
// "export * from 'path'"
|
||||
const name = p.loadNameFromRef(data.namespace_ref);
|
||||
data.namespace_ref = try p.newSymbol(.other, name);
|
||||
try p.current_scope.generated.push(p.allocator, data.namespace_ref);
|
||||
try p.current_scope.generated.append(p.allocator, data.namespace_ref);
|
||||
try p.recordDeclaredSymbol(data.namespace_ref);
|
||||
|
||||
// "export * as ns from 'path'"
|
||||
if (data.alias) |alias| {
|
||||
if (p.options.features.replace_exports.count() > 0) {
|
||||
if (p.options.features.replace_exports.getPtr(alias.original_name)) |entry| {
|
||||
_ = p.injectReplacementExport(stmts, p.declareSymbol(.other, .none, alias.original_name) catch unreachable, .none, entry);
|
||||
_ = p.injectReplacementExport(stmts, p.declareSymbol(.other, logger.Loc.Empty, alias.original_name) catch unreachable, logger.Loc.Empty, entry);
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -262,7 +262,7 @@ pub fn VisitStmt(
|
||||
}) {
|
||||
// declare a temporary ref for this
|
||||
const temp_id = p.generateTempRef("default_export");
|
||||
try p.current_scope.generated.push(p.allocator, temp_id);
|
||||
try p.current_scope.generated.append(p.allocator, temp_id);
|
||||
|
||||
try stmts.append(Stmt.alloc(S.Local, .{
|
||||
.kind = .k_const,
|
||||
@@ -293,7 +293,7 @@ pub fn VisitStmt(
|
||||
.value = data.value.expr,
|
||||
};
|
||||
stmts.appendAssumeCapacity(p.s(S.Local{
|
||||
.decls = G.Decl.List.init(decls),
|
||||
.decls = G.Decl.List.fromOwnedSlice(decls),
|
||||
}, stmt.loc));
|
||||
const items = bun.handleOom(p.allocator.alloc(js_ast.ClauseItem, 1));
|
||||
items[0] = js_ast.ClauseItem{
|
||||
@@ -312,7 +312,7 @@ pub fn VisitStmt(
|
||||
if (entry.* == .replace) {
|
||||
data.value.expr = entry.replace;
|
||||
} else {
|
||||
_ = p.injectReplacementExport(stmts, Ref.None, .none, entry);
|
||||
_ = p.injectReplacementExport(stmts, Ref.None, logger.Loc.Empty, entry);
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -358,7 +358,7 @@ pub fn VisitStmt(
|
||||
if (entry.* == .replace) {
|
||||
data.value = .{ .expr = entry.replace };
|
||||
} else {
|
||||
_ = p.injectReplacementExport(stmts, Ref.None, .none, entry);
|
||||
_ = p.injectReplacementExport(stmts, Ref.None, logger.Loc.Empty, entry);
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -390,7 +390,7 @@ pub fn VisitStmt(
|
||||
}
|
||||
|
||||
const temp_id = p.generateTempRef("default_export");
|
||||
try p.current_scope.generated.push(p.allocator, temp_id);
|
||||
try p.current_scope.generated.append(p.allocator, temp_id);
|
||||
break :brk temp_id;
|
||||
};
|
||||
|
||||
@@ -439,7 +439,7 @@ pub fn VisitStmt(
|
||||
if (entry.* == .replace) {
|
||||
data.value = .{ .expr = entry.replace };
|
||||
} else {
|
||||
_ = p.injectReplacementExport(stmts, Ref.None, .none, entry);
|
||||
_ = p.injectReplacementExport(stmts, Ref.None, logger.Loc.Empty, entry);
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -559,8 +559,8 @@ pub fn VisitStmt(
|
||||
if (react_hook_data) |*hook| {
|
||||
try stmts.append(p.getReactRefreshHookSignalDecl(hook.signature_cb));
|
||||
try stmts.append(p.s(S.SExpr{
|
||||
.value = p.getReactRefreshHookSignalInit(hook, Expr.initIdentifier(name_ref, .none)),
|
||||
}, .none));
|
||||
.value = p.getReactRefreshHookSignalInit(hook, Expr.initIdentifier(name_ref, logger.Loc.Empty)),
|
||||
}, logger.Loc.Empty));
|
||||
}
|
||||
|
||||
if (p.current_scope == p.module_scope) {
|
||||
@@ -865,7 +865,7 @@ pub fn VisitStmt(
|
||||
.kind = .k_var,
|
||||
.is_export = false,
|
||||
.was_commonjs_export = true,
|
||||
.decls = G.Decl.List.init(decls),
|
||||
.decls = G.Decl.List.fromOwnedSlice(decls),
|
||||
},
|
||||
stmt.loc,
|
||||
),
|
||||
@@ -907,13 +907,13 @@ pub fn VisitStmt(
|
||||
pub fn s_return(noalias p: *P, noalias stmts: *ListManaged(Stmt), noalias stmt: *Stmt, noalias data: *S.Return) !void {
|
||||
// Forbid top-level return inside modules with ECMAScript-style exports
|
||||
if (p.fn_or_arrow_data_visit.is_outside_fn_or_arrow) {
|
||||
const where: logger.Range = where: {
|
||||
const where = where: {
|
||||
if (p.esm_export_keyword.len > 0) {
|
||||
break :where p.esm_export_keyword;
|
||||
} else if (p.top_level_await_keyword.len > 0) {
|
||||
break :where p.top_level_await_keyword;
|
||||
} else {
|
||||
break :where .none;
|
||||
break :where logger.Range.None;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1205,7 +1205,7 @@ pub fn VisitStmt(
|
||||
.binding = p.b(B.Identifier{ .ref = id.ref }, loc),
|
||||
.value = p.newExpr(E.Identifier{ .ref = temp_ref }, loc),
|
||||
};
|
||||
break :bindings G.Decl.List.init(decls);
|
||||
break :bindings G.Decl.List.fromOwnedSlice(decls);
|
||||
},
|
||||
}, loc);
|
||||
|
||||
|
||||
@@ -69,8 +69,8 @@ pub fn runWithBody(ctx: *ErrorReportRequest, body: []const u8, r: AnyResponse) !
|
||||
.function_name = .init(function_name),
|
||||
.source_url = .init(file_name),
|
||||
.position = if (line > 0) .{
|
||||
.line = .fromOneBased(line + 1),
|
||||
.column = .fromOneBased(@max(1, column)),
|
||||
.line = .fromOneBased(line),
|
||||
.column = if (column < 1) .invalid else .fromOneBased(column),
|
||||
.line_start_byte = 0,
|
||||
} else .{
|
||||
.line = .invalid,
|
||||
@@ -78,6 +78,7 @@ pub fn runWithBody(ctx: *ErrorReportRequest, body: []const u8, r: AnyResponse) !
|
||||
.line_start_byte = 0,
|
||||
},
|
||||
.code_type = .None,
|
||||
.is_async = false,
|
||||
.remapped = false,
|
||||
});
|
||||
}
|
||||
@@ -146,10 +147,10 @@ pub fn runWithBody(ctx: *ErrorReportRequest, body: []const u8, r: AnyResponse) !
|
||||
|
||||
// Remap the frame
|
||||
const remapped = result.mappings.find(
|
||||
frame.position.line.oneBased(),
|
||||
frame.position.column.zeroBased(),
|
||||
frame.position.line,
|
||||
frame.position.column,
|
||||
);
|
||||
if (remapped) |remapped_position| {
|
||||
if (remapped) |*remapped_position| {
|
||||
frame.position = .{
|
||||
.line = .fromZeroBased(remapped_position.originalLine()),
|
||||
.column = .fromZeroBased(remapped_position.originalColumn()),
|
||||
@@ -347,7 +348,7 @@ fn extractJsonEncodedSourceCode(contents: []const u8, target_line: u32, comptime
|
||||
.source = .initEmptyFile(""),
|
||||
.allocator = arena,
|
||||
.should_redact_logs = false,
|
||||
.prev_error_loc = .none,
|
||||
.prev_error_loc = .Empty,
|
||||
};
|
||||
defer log.deinit();
|
||||
|
||||
|
||||
@@ -207,8 +207,9 @@ pub const Entry = struct {
|
||||
.original_column = 0,
|
||||
};
|
||||
|
||||
// +2 because the magic fairy in my dreams said it would align the source maps.
|
||||
var lines_between: u32 = runtime.line_count + 2;
|
||||
// The runtime.line_count counts newlines (e.g., 2941 for a 2942-line file).
|
||||
// The runtime ends at line 2942 with })({ so modules start after that.
|
||||
var lines_between: u32 = runtime.line_count;
|
||||
|
||||
// Join all of the mappings together.
|
||||
for (0..map_files.len) |i| switch (map_files.get(i)) {
|
||||
|
||||
@@ -251,8 +251,8 @@ export async function onRuntimeError(err: any, fatal = false, async = false) {
|
||||
writer.stringWithLength(browserUrl);
|
||||
writer.u32(parsed.length);
|
||||
for (const frame of parsed) {
|
||||
writer.u32(frame.line ?? 0);
|
||||
writer.u32(frame.col ?? 0);
|
||||
writer.i32(frame.line ?? 0);
|
||||
writer.i32(frame.col ?? 0);
|
||||
writer.stringWithLength(frame.fn ?? "");
|
||||
const fileName = frame.file;
|
||||
if (fileName) {
|
||||
|
||||
@@ -47,7 +47,7 @@ pub const Run = struct {
|
||||
vm.preload = ctx.preloads;
|
||||
vm.argv = ctx.passthrough;
|
||||
vm.arena = &run.arena;
|
||||
vm.allocator = arena.allocator();
|
||||
vm.allocator = vm.arena.allocator();
|
||||
|
||||
b.options.install = ctx.install;
|
||||
b.resolver.opts.install = ctx.install;
|
||||
@@ -135,7 +135,7 @@ pub const Run = struct {
|
||||
null,
|
||||
);
|
||||
try bundle.runEnvLoader(false);
|
||||
const mini = jsc.MiniEventLoop.initGlobal(bundle.env);
|
||||
const mini = jsc.MiniEventLoop.initGlobal(bundle.env, null);
|
||||
mini.top_level_dir = ctx.args.absolute_working_dir orelse "";
|
||||
return bun.shell.Interpreter.initAndRunFromFile(ctx, mini, entry_path);
|
||||
}
|
||||
@@ -185,7 +185,7 @@ pub const Run = struct {
|
||||
vm.preload = ctx.preloads;
|
||||
vm.argv = ctx.passthrough;
|
||||
vm.arena = &run.arena;
|
||||
vm.allocator = arena.allocator();
|
||||
vm.allocator = vm.arena.allocator();
|
||||
|
||||
if (ctx.runtime_options.eval.script.len > 0) {
|
||||
const script_source = try bun.default_allocator.create(logger.Source);
|
||||
|
||||
@@ -1111,7 +1111,7 @@ pub fn transpileSourceCode(
|
||||
.allocator = null,
|
||||
.specifier = input_specifier,
|
||||
.source_url = input_specifier.createIfDifferent(path.text),
|
||||
.jsvalue_for_export = parse_result.ast.parts.@"[0]"().stmts[0].data.s_expr.value.toJS(allocator, globalObject orelse jsc_vm.global) catch |e| panic("Unexpected JS error: {s}", .{@errorName(e)}),
|
||||
.jsvalue_for_export = parse_result.ast.parts.at(0).stmts[0].data.s_expr.value.toJS(allocator, globalObject orelse jsc_vm.global) catch |e| panic("Unexpected JS error: {s}", .{@errorName(e)}),
|
||||
.tag = .exports_object,
|
||||
};
|
||||
}
|
||||
@@ -1947,7 +1947,8 @@ export fn Bun__transpileVirtualModule(
|
||||
) bool {
|
||||
jsc.markBinding(@src());
|
||||
const jsc_vm = globalObject.bunVM();
|
||||
bun.assert(jsc_vm.plugin_runner != null);
|
||||
// Plugin runner is not required for virtual modules created via build.module()
|
||||
// bun.assert(jsc_vm.plugin_runner != null);
|
||||
|
||||
var specifier_slice = specifier_ptr.toUTF8(jsc_vm.allocator);
|
||||
const specifier = specifier_slice.slice();
|
||||
|
||||
@@ -12,7 +12,8 @@
|
||||
/// Version 13: Hoist `import.meta.require` definition, see #15738
|
||||
/// Version 14: Updated global defines table list.
|
||||
/// Version 15: Updated global defines table list.
|
||||
const expected_version = 15;
|
||||
/// Version 16: Added typeof undefined minification optimization.
|
||||
const expected_version = 16;
|
||||
|
||||
const debug = Output.scoped(.cache, .visible);
|
||||
const MINIMUM_CACHE_SIZE = 50 * 1024;
|
||||
|
||||
@@ -298,13 +298,13 @@ pub fn get(this: *SavedSourceMap, path: string) ?*ParsedSourceMap {
|
||||
pub fn resolveMapping(
|
||||
this: *SavedSourceMap,
|
||||
path: []const u8,
|
||||
line: i32,
|
||||
column: i32,
|
||||
line: bun.Ordinal,
|
||||
column: bun.Ordinal,
|
||||
source_handling: SourceMap.SourceContentHandling,
|
||||
) ?SourceMap.Mapping.Lookup {
|
||||
const parse = this.getWithContent(path, switch (source_handling) {
|
||||
.no_source_contents => .mappings_only,
|
||||
.source_contents => .{ .all = .{ .line = line, .column = column } },
|
||||
.source_contents => .{ .all = .{ .line = @max(line.zeroBased(), 0), .column = @max(column.zeroBased(), 0) } },
|
||||
});
|
||||
const map = parse.map orelse return null;
|
||||
|
||||
|
||||
@@ -838,7 +838,10 @@ extern fn Zig__GlobalObject__destructOnExit(*JSGlobalObject) void;
|
||||
|
||||
pub fn globalExit(this: *VirtualMachine) noreturn {
|
||||
if (this.shouldDestructMainThreadOnExit()) {
|
||||
if (this.eventLoop().forever_timer) |t| t.deinit(true);
|
||||
Zig__GlobalObject__destructOnExit(this.global);
|
||||
this.transpiler.deinit();
|
||||
this.gc_controller.deinit();
|
||||
this.deinit();
|
||||
}
|
||||
bun.Global.exit(this.exit_handler.exit_code);
|
||||
@@ -1723,7 +1726,7 @@ pub fn resolveMaybeNeedsTrailingSlash(
|
||||
const msg = logger.Msg{
|
||||
.data = logger.rangeData(
|
||||
null,
|
||||
.none,
|
||||
logger.Range.None,
|
||||
printed,
|
||||
),
|
||||
};
|
||||
@@ -1804,7 +1807,7 @@ pub fn resolveMaybeNeedsTrailingSlash(
|
||||
break :brk logger.Msg{
|
||||
.data = logger.rangeData(
|
||||
null,
|
||||
.none,
|
||||
logger.Range.None,
|
||||
printed,
|
||||
),
|
||||
.metadata = .{
|
||||
@@ -1851,14 +1854,14 @@ pub fn processFetchLog(globalThis: *JSGlobalObject, specifier: bun.String, refer
|
||||
break :brk logger.Msg{
|
||||
.data = logger.rangeData(
|
||||
null,
|
||||
.none,
|
||||
logger.Range.None,
|
||||
std.fmt.allocPrint(globalThis.allocator(), "Unexpected pending import in \"{}\". To automatically install npm packages with Bun, please use an import statement instead of require() or dynamic import().\nThis error can also happen if dependencies import packages which are not referenced anywhere. Worst case, run `bun install` and opt-out of the node_modules folder until we come up with a better way to handle this error.", .{specifier}) catch unreachable,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
break :brk logger.Msg{
|
||||
.data = logger.rangeData(null, .none, std.fmt.allocPrint(globalThis.allocator(), "{s} while building {}", .{ @errorName(err), specifier }) catch unreachable),
|
||||
.data = logger.rangeData(null, logger.Range.None, std.fmt.allocPrint(globalThis.allocator(), "{s} while building {}", .{ @errorName(err), specifier }) catch unreachable),
|
||||
};
|
||||
};
|
||||
{
|
||||
@@ -1915,7 +1918,6 @@ pub fn processFetchLog(globalThis: *JSGlobalObject, specifier: bun.String, refer
|
||||
}
|
||||
}
|
||||
|
||||
// TODO:
|
||||
pub fn deinit(this: *VirtualMachine) void {
|
||||
this.auto_killer.deinit();
|
||||
|
||||
@@ -2030,7 +2032,7 @@ fn loadPreloads(this: *VirtualMachine) !?*JSInternalPromise {
|
||||
.failure => |e| {
|
||||
this.log.addErrorFmt(
|
||||
null,
|
||||
.none,
|
||||
logger.Loc.Empty,
|
||||
this.allocator,
|
||||
"{s} resolving preload {}",
|
||||
.{
|
||||
@@ -2043,7 +2045,7 @@ fn loadPreloads(this: *VirtualMachine) !?*JSInternalPromise {
|
||||
.pending, .not_found => {
|
||||
this.log.addErrorFmt(
|
||||
null,
|
||||
.none,
|
||||
logger.Loc.Empty,
|
||||
this.allocator,
|
||||
"preload not found {}",
|
||||
.{
|
||||
@@ -2606,8 +2608,8 @@ pub fn remapStackFramePositions(this: *VirtualMachine, frames: [*]jsc.ZigStackFr
|
||||
|
||||
if (this.resolveSourceMapping(
|
||||
sourceURL.slice(),
|
||||
@max(frame.position.line.zeroBased(), 0),
|
||||
@max(frame.position.column.zeroBased(), 0),
|
||||
frame.position.line,
|
||||
frame.position.column,
|
||||
.no_source_contents,
|
||||
)) |lookup| {
|
||||
const source_map = lookup.source_map;
|
||||
@@ -2745,8 +2747,8 @@ pub fn remapZigException(
|
||||
else
|
||||
this.resolveSourceMapping(
|
||||
top_source_url.slice(),
|
||||
@max(top.position.line.zeroBased(), 0),
|
||||
@max(top.position.column.zeroBased(), 0),
|
||||
top.position.line,
|
||||
top.position.column,
|
||||
.source_contents,
|
||||
);
|
||||
|
||||
@@ -2834,8 +2836,8 @@ pub fn remapZigException(
|
||||
defer source_url.deinit();
|
||||
if (this.resolveSourceMapping(
|
||||
source_url.slice(),
|
||||
@max(frame.position.line.zeroBased(), 0),
|
||||
@max(frame.position.column.zeroBased(), 0),
|
||||
frame.position.line,
|
||||
frame.position.column,
|
||||
.no_source_contents,
|
||||
)) |lookup| {
|
||||
defer if (lookup.source_map) |map| map.deref();
|
||||
@@ -3440,8 +3442,8 @@ pub noinline fn printGithubAnnotation(exception: *ZigException) void {
|
||||
pub fn resolveSourceMapping(
|
||||
this: *VirtualMachine,
|
||||
path: []const u8,
|
||||
line: i32,
|
||||
column: i32,
|
||||
line: Ordinal,
|
||||
column: Ordinal,
|
||||
source_handling: SourceMap.SourceContentHandling,
|
||||
) ?SourceMap.Mapping.Lookup {
|
||||
return this.source_mappings.resolveMapping(path, line, column, source_handling) orelse {
|
||||
|
||||
@@ -1814,7 +1814,7 @@ pub const JSZstd = struct {
|
||||
output = try allocator.realloc(output, compressed_size);
|
||||
}
|
||||
|
||||
return jsc.JSValue.createBuffer(globalThis, output, bun.default_allocator);
|
||||
return jsc.JSValue.createBuffer(globalThis, output);
|
||||
}
|
||||
|
||||
pub fn decompressSync(globalThis: *JSGlobalObject, callframe: *jsc.CallFrame) bun.JSError!JSValue {
|
||||
@@ -1849,7 +1849,7 @@ pub const JSZstd = struct {
|
||||
// mimalloc doesn't care about the self-reported size of the slice.
|
||||
output.len = actual_size;
|
||||
|
||||
return jsc.JSValue.createBuffer(globalThis, output, bun.default_allocator);
|
||||
return jsc.JSValue.createBuffer(globalThis, output);
|
||||
}
|
||||
|
||||
// --- Async versions ---
|
||||
@@ -1951,7 +1951,7 @@ pub const JSZstd = struct {
|
||||
}
|
||||
|
||||
const output_slice = this.output;
|
||||
const buffer_value = jsc.JSValue.createBuffer(globalThis, output_slice, bun.default_allocator);
|
||||
const buffer_value = jsc.JSValue.createBuffer(globalThis, output_slice);
|
||||
this.output = &[_]u8{};
|
||||
promise.resolve(globalThis, buffer_value);
|
||||
}
|
||||
|
||||
@@ -582,7 +582,7 @@ pub fn toBuffer(
|
||||
return jsc.JSValue.createBufferWithCtx(globalThis, slice, ctx, callback);
|
||||
}
|
||||
|
||||
return jsc.JSValue.createBuffer(globalThis, slice, null);
|
||||
return jsc.JSValue.createBuffer(globalThis, slice);
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -409,6 +409,9 @@ pub const JSBundler = struct {
|
||||
if (try minify.getBooleanLoose(globalThis, "identifiers")) |syntax| {
|
||||
this.minify.identifiers = syntax;
|
||||
}
|
||||
if (try minify.getBooleanLoose(globalThis, "keepNames")) |keep_names| {
|
||||
this.minify.keep_names = keep_names;
|
||||
}
|
||||
} else {
|
||||
return globalThis.throwInvalidArguments("Expected minify to be a boolean or an object", .{});
|
||||
}
|
||||
@@ -688,6 +691,7 @@ pub const JSBundler = struct {
|
||||
whitespace: bool = false,
|
||||
identifiers: bool = false,
|
||||
syntax: bool = false,
|
||||
keep_names: bool = false,
|
||||
};
|
||||
|
||||
pub const Serve = struct {
|
||||
@@ -774,7 +778,7 @@ pub const JSBundler = struct {
|
||||
specifier: string = "",
|
||||
importer_source_index: u32,
|
||||
import_record_index: u32 = 0,
|
||||
range: logger.Range = .none,
|
||||
range: logger.Range = logger.Range.None,
|
||||
original_target: Target,
|
||||
|
||||
// pub inline fn loader(_: *const MiniImportRecord) ?options.Loader {
|
||||
|
||||
@@ -591,7 +591,7 @@ fn exportReplacementValue(value: JSValue, globalThis: *JSGlobalObject, allocator
|
||||
.value = value.toBoolean(),
|
||||
},
|
||||
},
|
||||
.loc = .none,
|
||||
.loc = logger.Loc.Empty,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -600,7 +600,7 @@ fn exportReplacementValue(value: JSValue, globalThis: *JSGlobalObject, allocator
|
||||
.data = .{
|
||||
.e_number = .{ .value = value.asNumber() },
|
||||
},
|
||||
.loc = .none,
|
||||
.loc = logger.Loc.Empty,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -609,7 +609,7 @@ fn exportReplacementValue(value: JSValue, globalThis: *JSGlobalObject, allocator
|
||||
.data = .{
|
||||
.e_null = .{},
|
||||
},
|
||||
.loc = .none,
|
||||
.loc = logger.Loc.Empty,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -618,7 +618,7 @@ fn exportReplacementValue(value: JSValue, globalThis: *JSGlobalObject, allocator
|
||||
.data = .{
|
||||
.e_undefined = .{},
|
||||
},
|
||||
.loc = .none,
|
||||
.loc = logger.Loc.Empty,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -632,7 +632,7 @@ fn exportReplacementValue(value: JSValue, globalThis: *JSGlobalObject, allocator
|
||||
.data = .{
|
||||
.e_string = out,
|
||||
},
|
||||
.loc = .none,
|
||||
.loc = logger.Loc.Empty,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1014,7 +1014,7 @@ fn namedExportsToJS(global: *JSGlobalObject, named_exports: *JSAst.Ast.NamedExpo
|
||||
});
|
||||
var i: usize = 0;
|
||||
while (named_exports_iter.next()) |entry| {
|
||||
names[i] = bun.String.cloneUTF8(entry.key_ptr.*);
|
||||
names[i] = bun.String.fromBytes(entry.key_ptr.*);
|
||||
i += 1;
|
||||
}
|
||||
return bun.String.toJSArray(global, names);
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user