Compare commits

..

1 Commits

Author SHA1 Message Date
Claude Bot
34f8ad90ab Remove unused files from repository
- Delete unused libtcc1.a.macos-aarch64 archive (never referenced, ARM64 doesn't need x64 compiler runtime)
- Remove misctools/publish-examples.js (references non-existent examples/ directory)
- Remove misctools/headers-cleaner.js (no references in codebase or build scripts)

The libtcc1.a.macos-aarch64 file was never used because:
- FFI code only compiles libtcc1.c on x64 platforms (Environment.isX64 check)
- ARM64 has native 64-bit operations and doesn't need these compiler runtime functions
- No equivalent files exist for other platforms

The misctools scripts were last modified in 2022 and are no longer used.

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-09-08 05:39:26 +00:00
812 changed files with 24033 additions and 48939 deletions

View File

@@ -371,7 +371,7 @@ function getZigAgent(platform, options) {
* @returns {Agent}
*/
function getTestAgent(platform, options) {
const { os, arch, profile } = platform;
const { os, arch } = platform;
if (os === "darwin") {
return {
@@ -391,13 +391,6 @@ function getTestAgent(platform, options) {
}
if (arch === "aarch64") {
if (profile === "asan") {
return getEc2Agent(platform, options, {
instanceType: "c8g.2xlarge",
cpuCount: 2,
threadsPerCore: 1,
});
}
return getEc2Agent(platform, options, {
instanceType: "c8g.xlarge",
cpuCount: 2,
@@ -405,13 +398,6 @@ function getTestAgent(platform, options) {
});
}
if (profile === "asan") {
return getEc2Agent(platform, options, {
instanceType: "c7i.2xlarge",
cpuCount: 2,
threadsPerCore: 1,
});
}
return getEc2Agent(platform, options, {
instanceType: "c7i.xlarge",
cpuCount: 2,

View File

@@ -3,21 +3,7 @@ FormatStyle: webkit
Checks: >
-*,
clang-analyzer-*,
-clang-analyzer-optin.core.EnumCastOutOfRange,
-clang-analyzer-webkit.UncountedLambdaCapturesChecker,
-clang-analyzer-webkit.RefCntblBaseVirtualDtor,
-clang-analyzer-security.insecureAPI.DeprecatedOrUnsafeBufferHandling,
-clang-analyzer-deadcode.DeadStores,
bugprone-*,
-bugprone-easily-swappable-parameters,
-bugprone-narrowing-conversions,
-bugprone-switch-missing-default-case,
-bugprone-assignment-in-if-condition,
-bugprone-implicit-widening-of-multiplication-result,
performance-*,
-performance-no-int-to-ptr,
cppcoreguidelines-*,
-cppcoreguidelines-avoid-magic-numbers,
-cppcoreguidelines-narrowing-conversions,
-cppcoreguidelines-macro-to-enum,
-cppcoreguidelines-avoid-non-const-global-variables,
-clang-analyzer-optin.core.EnumCastOutOfRange
-clang-analyzer-webkit.UncountedLambdaCapturesChecker
-clang-analyzer-optin.core.EnumCastOutOfRange
-clang-analyzer-webkit.RefCntblBaseVirtualDtor

4
.github/CODEOWNERS vendored
View File

@@ -3,7 +3,3 @@
# Tests
/test/expectations.txt @Jarred-Sumner
# Types
*.d.ts @alii
/packages/bun-types/ @alii

View File

@@ -25,7 +25,7 @@ runs:
echo "version=$LATEST" >> $GITHUB_OUTPUT
echo "message=$MESSAGE" >> $GITHUB_OUTPUT
- name: Create Pull Request
uses: peter-evans/create-pull-request@v7
uses: peter-evans/create-pull-request@v4
with:
add-paths: |
CMakeLists.txt

View File

@@ -1,19 +0,0 @@
name: Auto Assign Types Issues
on:
issues:
types: [labeled]
jobs:
auto-assign:
runs-on: ubuntu-latest
if: github.event.label.name == 'types'
permissions:
issues: write
steps:
- name: Assign to alii
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GH_REPO: ${{ github.repository }}
run: |
gh issue edit ${{ github.event.issue.number }} --add-assignee alii

View File

@@ -1,15 +1,15 @@
name: clang-tidy:check
name: C++ Linter comment
permissions:
actions: read
pull-requests: write
on:
# pull_request:
workflow_dispatch:
env:
LLVM_VERSION_MAJOR: "19"
workflow_run:
workflows:
- lint-cpp
types:
- completed
jobs:
comment-lint:
@@ -17,39 +17,50 @@ jobs:
name: Comment
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/setup-bun
- name: Run clang-tidy
- name: Download Comment
uses: actions/download-artifact@v4
with:
name: format.log
github-token: ${{ github.token }}
run-id: ${{ github.event.workflow_run.id }}
- name: PR Number
uses: actions/download-artifact@v4
with:
name: pr-number.txt
github-token: ${{ github.token }}
run-id: ${{ github.event.workflow_run.id }}
- name: Did Fail
uses: actions/download-artifact@v4
with:
name: did_fail.txt
github-token: ${{ github.token }}
run-id: ${{ github.event.workflow_run.id }}
- name: Setup Environment
id: env
shell: bash
run: |
sudo apt-get remove -qq clang-18
wget -qO- https://apt.llvm.org/llvm-snapshot.gpg.key | sudo tee /etc/apt/trusted.gpg.d/apt.llvm.org.asc > /dev/null
echo "deb http://apt.llvm.org/$(lsb_release -cs)/ llvm-toolchain-$(lsb_release -cs)-${{ env.LLVM_VERSION_MAJOR }} main" | sudo tee /etc/apt/sources.list.d/llvm.list > /dev/null
sudo apt-get update -qq
sudo apt-get install -y -qq --no-install-recommends --no-install-suggests -o=Dpkg::Use-Pty=0 clang-format-${{ env.LLVM_VERSION_MAJOR }}
# Copy to outputs
echo "pr-number=$(cat pr-number.txt)" >> $GITHUB_OUTPUT
{
echo 'text_output<<EOF'
bun run clang-tidy:check
cat format.log
echo EOF
} >> "$GITHUB_OUTPUT"
echo "did_fail=$(cat did_fail.txt)" >> $GITHUB_OUTPUT
- name: Find Comment
id: comment
uses: peter-evans/find-comment@v3
with:
issue-number: ${{ github.event.number }}
issue-number: ${{ steps.env.outputs.pr-number }}
comment-author: github-actions[bot]
body-includes: <!-- generated-comment lint-cpp-workflow=${{ github.workflow }} -->
- name: Update Comment
uses: peter-evans/create-or-update-comment@v4
if: steps.env.outputs.did_fail != '0'
with:
comment-id: ${{ steps.comment.outputs.comment-id }}
issue-number: ${{ github.event.number }}
edit-mode: replace
issue-number: ${{ steps.env.outputs.pr-number }}
body: |
@${{ github.actor }}, `clang-tidy` had something to share with you about your code:
@@ -60,3 +71,15 @@ jobs:
Commit: ${{ github.event.workflow_run.head_sha || github.sha }}
<!-- generated-comment lint-cpp-workflow=${{ github.workflow }} -->
edit-mode: replace
- name: Update Previous Comment
uses: peter-evans/create-or-update-comment@v4
if: steps.env.outputs.did_fail == '0' && steps.comment.outputs.comment-id != ''
with:
comment-id: ${{ steps.comment.outputs.comment-id }}
issue-number: ${{ steps.env.outputs.pr-number }}
body: |
clang-tidy nits are fixed! Thank you.
<!-- generated-comment lint-cpp-workflow=${{ github.workflow }} -->
edit-mode: replace

View File

@@ -105,16 +105,11 @@ jobs:
env:
GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
GITHUB_ISSUE_TITLE: ${{ github.event.issue.title }}
GITHUB_ISSUE_NUMBER: ${{ github.event.issue.number }}
shell: bash
run: |
LABELS=$(bun scripts/read-issue.ts)
bun scripts/is-outdated.ts
# Check for patterns that should close the issue
CLOSE_ACTION=$(bun scripts/handle-crash-patterns.ts)
echo "close-action=$CLOSE_ACTION" >> $GITHUB_OUTPUT
if [[ -f "is-outdated.txt" ]]; then
echo "is-outdated=true" >> $GITHUB_OUTPUT
fi
@@ -123,10 +118,6 @@ jobs:
echo "outdated=$(cat outdated.txt)" >> $GITHUB_OUTPUT
fi
if [[ -f "is-standalone.txt" ]]; then
echo "is-standalone=true" >> $GITHUB_OUTPUT
fi
if [[ -f "is-very-outdated.txt" ]]; then
echo "is-very-outdated=true" >> $GITHUB_OUTPUT
LABELS="$LABELS,old-version"
@@ -136,32 +127,9 @@ jobs:
echo "latest=$(cat LATEST)" >> $GITHUB_OUTPUT
echo "labels=$LABELS" >> $GITHUB_OUTPUT
rm -rf is-outdated.txt outdated.txt latest.txt is-very-outdated.txt is-standalone.txt
- name: Close issue if pattern detected
if: github.event.label.name == 'crash' && fromJson(steps.add-labels.outputs.close-action).close == true
uses: actions/github-script@v7
with:
script: |
const closeAction = JSON.parse('${{ steps.add-labels.outputs.close-action }}');
// Comment with the reason
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
body: closeAction.comment
});
// Close the issue
await github.rest.issues.update({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
state: 'closed',
state_reason: closeAction.reason
});
rm -rf is-outdated.txt outdated.txt latest.txt is-very-outdated.txt
- name: Generate comment text with Sentry Link
if: github.event.label.name == 'crash' && fromJson(steps.add-labels.outputs.close-action).close != true
if: github.event.label.name == 'crash'
# ignore if fail
continue-on-error: true
id: generate-comment-text
@@ -195,17 +163,8 @@ jobs:
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
labels: ${{ steps.add-labels.outputs.labels }}
- name: Comment outdated (standalone executable)
if: steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone == 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
uses: actions-cool/issues-helper@v3
with:
actions: "create-comment"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
body: |
@${{ github.event.issue.user.login }}, the latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but the standalone executable is running Bun v${{ steps.add-labels.outputs.outdated }}. When the CLI using Bun's single-file executable next updates it might be fixed.
- name: Comment outdated
if: steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone != 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
if: steps.add-labels.outputs.is-outdated == 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
uses: actions-cool/issues-helper@v3
with:
actions: "create-comment"
@@ -219,22 +178,8 @@ jobs:
```sh
bun upgrade
```
- name: Comment with Sentry Link and outdated version (standalone executable)
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone == 'true'
uses: actions-cool/issues-helper@v3
with:
actions: "create-comment"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
body: |
@${{ github.event.issue.user.login }}, thank you for reporting this crash. The latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but the standalone executable is running Bun v${{ steps.add-labels.outputs.outdated }}. When the CLI using Bun's single-file executable next updates it might be fixed.
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
- name: Comment with Sentry Link and outdated version
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone != 'true'
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true'
uses: actions-cool/issues-helper@v3
with:
actions: "create-comment"

View File

@@ -0,0 +1,89 @@
name: Comment on updated submodule
on:
pull_request_target:
paths:
- "src/generated_versions_list.zig"
- ".github/workflows/on-submodule-update.yml"
jobs:
comment:
name: Comment
runs-on: ubuntu-latest
if: ${{ github.repository_owner == 'oven-sh' }}
permissions:
contents: read
pull-requests: write
issues: write
steps:
- name: Checkout current
uses: actions/checkout@v4
with:
sparse-checkout: |
src
- name: Hash generated versions list
id: hash
run: |
echo "hash=$(sha256sum src/generated_versions_list.zig | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
- name: Checkout base
uses: actions/checkout@v4
with:
ref: ${{ github.base_ref }}
sparse-checkout: |
src
- name: Hash base
id: base
run: |
echo "base=$(sha256sum src/generated_versions_list.zig | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
- name: Compare
id: compare
run: |
if [ "${{ steps.hash.outputs.hash }}" != "${{ steps.base.outputs.base }}" ]; then
echo "changed=true" >> $GITHUB_OUTPUT
else
echo "changed=false" >> $GITHUB_OUTPUT
fi
- name: Find Comment
id: comment
uses: peter-evans/find-comment@v3
with:
issue-number: ${{ github.event.pull_request.number }}
comment-author: github-actions[bot]
body-includes: <!-- generated-comment submodule-updated -->
- name: Write Warning Comment
uses: peter-evans/create-or-update-comment@v4
if: steps.compare.outputs.changed == 'true'
with:
comment-id: ${{ steps.comment.outputs.comment-id }}
issue-number: ${{ github.event.pull_request.number }}
edit-mode: replace
body: |
⚠️ **Warning:** @${{ github.actor }}, this PR has changes to submodule versions.
If this change was intentional, please ignore this message. If not, please undo changes to submodules and rebase your branch.
<!-- generated-comment submodule-updated -->
- name: Add labels
uses: actions-cool/issues-helper@v3
if: steps.compare.outputs.changed == 'true'
with:
actions: "add-labels"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.pull_request.number }}
labels: "changed-submodules"
- name: Remove labels
uses: actions-cool/issues-helper@v3
if: steps.compare.outputs.changed == 'false'
with:
actions: "remove-labels"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.pull_request.number }}
labels: "changed-submodules"
- name: Delete outdated comment
uses: actions-cool/issues-helper@v3
if: steps.compare.outputs.changed == 'false' && steps.comment.outputs.comment-id != ''
with:
actions: "delete-comment"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.pull_request.number }}
comment-id: ${{ steps.comment.outputs.comment-id }}

View File

@@ -80,7 +80,7 @@ jobs:
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v7
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |

View File

@@ -55,7 +55,7 @@ jobs:
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
# Try to get commit SHA from tag object (for annotated tags)
# If it fails, assume it's a lightweight tag pointing directly to commit
LATEST_SHA=$(curl -sL "https://api.github.com/repos/HdrHistogram/HdrHistogram_c/git/tags/$LATEST_TAG_SHA" 2>/dev/null | jq -r '.object.sha // empty')
@@ -83,7 +83,7 @@ jobs:
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v7
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |

View File

@@ -58,7 +58,7 @@ jobs:
TAG_OBJECT_SHA=$(echo "$TAG_REF" | jq -r '.object.sha')
TAG_OBJECT_TYPE=$(echo "$TAG_REF" | jq -r '.object.type')
if [ -z "$TAG_OBJECT_SHA" ] || [ "$TAG_OBJECT_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
@@ -99,7 +99,7 @@ jobs:
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v7
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |

View File

@@ -80,7 +80,7 @@ jobs:
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v7
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |

View File

@@ -80,7 +80,7 @@ jobs:
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v7
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |

View File

@@ -55,12 +55,12 @@ jobs:
TAG_REF_RESPONSE=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/refs/tags/$LATEST_TAG")
LATEST_TAG_SHA=$(echo "$TAG_REF_RESPONSE" | jq -r '.object.sha')
TAG_OBJECT_TYPE=$(echo "$TAG_REF_RESPONSE" | jq -r '.object.type')
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
if [ "$TAG_OBJECT_TYPE" = "tag" ]; then
# This is an annotated tag, we need to get the commit it points to
LATEST_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
@@ -92,7 +92,7 @@ jobs:
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v7
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |

View File

@@ -59,7 +59,7 @@ jobs:
LATEST_TAG_SHA=$(echo "$TAG_REF" | jq -r '.object.sha')
TAG_TYPE=$(echo "$TAG_REF" | jq -r '.object.type')
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
@@ -97,7 +97,7 @@ jobs:
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v7
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |

View File

@@ -91,7 +91,7 @@ jobs:
- name: Create Pull Request
if: success() && steps.check-version.outputs.current_num < steps.check-version.outputs.latest_num
uses: peter-evans/create-pull-request@v7
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |

View File

@@ -1,79 +0,0 @@
name: Update vendor
on:
schedule:
- cron: "0 4 * * 0"
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
strategy:
matrix:
package:
- elysia
steps:
- uses: actions/checkout@v4
- uses: oven-sh/setup-bun@v2
- name: Check version
id: check-version
run: |
set -euo pipefail
# Extract the commit hash from the line after COMMIT
current=$(bun -p '(await Bun.file("test/vendor.json").json()).filter(v=>v.package===process.argv[1])[0].tag' ${{ matrix.package }})
repository=$(bun -p '(await Bun.file("test/vendor.json").json()).filter(v=>v.package===process.argv[1])[0].repository' ${{ matrix.package }} | cut -d'/' -f4,5)
if [ -z "$current" ]; then
echo "Error: Could not find COMMIT line in test/vendor.json"
exit 1
fi
echo "current=$current" >> $GITHUB_OUTPUT
echo "repository=$repository" >> $GITHUB_OUTPUT
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/${repository}/releases/latest)
if [ -z "$LATEST_RELEASE" ]; then
echo "Error: Failed to fetch latest release from GitHub API"
exit 1
fi
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
echo "Error: Could not extract tag name from GitHub API response"
exit 1
fi
echo "latest=$LATEST_TAG" >> $GITHUB_OUTPUT
- name: Update version if needed
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
run: |
set -euo pipefail
bun -e 'await Bun.write("test/vendor.json", JSON.stringify((await Bun.file("test/vendor.json").json()).map(v=>{if(v.package===process.argv[1])v.tag=process.argv[2];return v;}), null, 2) + "\n")' ${{ matrix.package }} ${{ steps.check-version.outputs.latest }}
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v7
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |
test/vendor.json
commit-message: "deps: update ${{ matrix.package }} to ${{ steps.check-version.outputs.latest }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update ${{ matrix.package }} to ${{ steps.check-version.outputs.latest }}"
delete-branch: true
branch: deps/update-${{ matrix.package }}-${{ github.run_number }}
body: |
## What does this PR do?
Updates ${{ matrix.package }} to version ${{ steps.check-version.outputs.latest }}
Compare: https://github.com/${{ steps.check-version.outputs.repository }}/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-vendor.yml)

View File

@@ -80,7 +80,7 @@ jobs:
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v7
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |

2
.gitignore vendored
View File

@@ -189,4 +189,4 @@ scratch*.{js,ts,tsx,cjs,mjs}
scripts/lldb-inline
# We regenerate these in all the build scripts
cmake/sources/*.txt
cmake/sources/*.txt

11
.vscode/launch.json generated vendored
View File

@@ -25,9 +25,6 @@
// "BUN_JSC_validateExceptionChecks": "1",
// "BUN_JSC_dumpSimulatedThrows": "1",
// "BUN_JSC_unexpectedExceptionStackTraceLimit": "20",
// "BUN_DESTRUCT_VM_ON_EXIT": "1",
// "ASAN_OPTIONS": "allow_user_segv_handler=1:disable_coredump=0:detect_leaks=1",
// "LSAN_OPTIONS": "malloc_context_size=100:print_suppressions=1:suppressions=${workspaceFolder}/test/leaksan.supp",
},
"console": "internalConsole",
"sourceMap": {
@@ -60,17 +57,11 @@
"name": "bun run [file]",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["${file}"],
"cwd": "${workspaceFolder}",
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "0",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
// "BUN_JSC_validateExceptionChecks": "1",
// "BUN_JSC_dumpSimulatedThrows": "1",
// "BUN_JSC_unexpectedExceptionStackTraceLimit": "20",
// "BUN_DESTRUCT_VM_ON_EXIT": "1",
// "ASAN_OPTIONS": "allow_user_segv_handler=1:disable_coredump=0:detect_leaks=1",
// "LSAN_OPTIONS": "malloc_context_size=100:print_suppressions=1:suppressions=${workspaceFolder}/test/leaksan.supp",
},
"console": "internalConsole",
"sourceMap": {

View File

@@ -4,14 +4,18 @@ This is the Bun repository - an all-in-one JavaScript runtime & toolkit designed
### Build Commands
- **Build Bun**: `bun bd`
- **Build debug version**: `bun bd`
- Creates a debug build at `./build/debug/bun-debug`
- **CRITICAL**: no need for a timeout, the build is really fast!
- **CRITICAL**: DO NOT set a build timeout. Compilation takes ~5 minutes. Be patient.
- **Run tests with your debug build**: `bun bd test <test-file>`
- **CRITICAL**: Never use `bun test` directly - it won't include your changes
- **Run any command with debug build**: `bun bd <command>`
Tip: Bun is already installed and in $PATH. The `bd` subcommand is a package.json script.
### Other Build Variants
- `bun run build:release` - Release build
Address sanitizer is enabled by default in debug builds of Bun.
## Testing

View File

@@ -31,11 +31,6 @@ include(SetupCcache)
parse_package_json(VERSION_VARIABLE DEFAULT_VERSION)
optionx(VERSION STRING "The version of Bun" DEFAULT ${DEFAULT_VERSION})
project(Bun VERSION ${VERSION})
# Bun uses C++23, which is compatible with BoringSSL's C++17 requirement
set(CMAKE_CXX_STANDARD 23)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
include(Options)
include(CompilerFlags)
@@ -48,9 +43,6 @@ include(SetupEsbuild)
include(SetupZig)
include(SetupRust)
# Generate dependency versions header
include(GenerateDependencyVersions)
# --- Targets ---
include(BuildBun)

2
LATEST
View File

@@ -1 +1 @@
1.2.22
1.2.21

View File

@@ -587,15 +587,9 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
.root_module = root,
});
configureObj(b, opts, obj);
if (enableFastBuild(b)) obj.root_module.strip = true;
return obj;
}
fn enableFastBuild(b: *Build) bool {
const val = b.graph.env_map.get("BUN_BUILD_FAST") orelse return false;
return std.mem.eql(u8, val, "1");
}
fn configureObj(b: *Build, opts: *BunBuildOptions, obj: *Compile) void {
// Flags on root module get used for the compilation
obj.root_module.omit_frame_pointer = false;
@@ -606,7 +600,7 @@ fn configureObj(b: *Build, opts: *BunBuildOptions, obj: *Compile) void {
// Object options
obj.use_llvm = !opts.no_llvm;
obj.use_lld = if (opts.os == .mac) false else !opts.no_llvm;
if (opts.enable_asan and !enableFastBuild(b)) {
if (opts.enable_asan) {
if (@hasField(Build.Module, "sanitize_address")) {
obj.root_module.sanitize_address = true;
} else {

View File

@@ -60,10 +60,10 @@ endif()
# Windows Code Signing Option
if(WIN32)
optionx(ENABLE_WINDOWS_CODESIGNING BOOL "Enable Windows code signing with DigiCert KeyLocker" DEFAULT OFF)
if(ENABLE_WINDOWS_CODESIGNING)
message(STATUS "Windows code signing: ENABLED")
# Check for required environment variables
if(NOT DEFINED ENV{SM_API_KEY})
message(WARNING "SM_API_KEY not set - code signing may fail")

View File

@@ -13,10 +13,7 @@
},
{
"output": "JavaScriptSources.txt",
"paths": [
"src/js/**/*.{js,ts}",
"src/install/PackageManager/scanner-entry.ts"
]
"paths": ["src/js/**/*.{js,ts}"]
},
{
"output": "JavaScriptCodegenSources.txt",

View File

@@ -1,17 +1,9 @@
# https://clang.llvm.org/extra/clang-tidy/
set(CLANG_TIDY_SOURCES ${BUN_C_SOURCES} ${BUN_CXX_SOURCES})
list(REMOVE_ITEM CLANG_TIDY_SOURCES ${CWD}/src/bun.js/bindings/node/http/llhttp/llhttp.c)
list(REMOVE_ITEM CLANG_TIDY_SOURCES ${CWD}/src/bun.js/bindings/node/http/llhttp/http.c)
list(REMOVE_ITEM CLANG_TIDY_SOURCES ${CWD}/src/bun.js/bindings/node/http/llhttp/api.c)
list(REMOVE_ITEM CLANG_TIDY_SOURCES ${CWD}/src/bun.js/bindings/decodeURIComponentSIMD.cpp)
list(REMOVE_ITEM CLANG_TIDY_SOURCES ${CWD}/src/bun.js/bindings/NoOpForTesting.cpp)
list(REMOVE_ITEM CLANG_TIDY_SOURCES ${CWD}/src/bun.js/bindings/ProcessBindingNatives.cpp)
list(REMOVE_ITEM CLANG_TIDY_SOURCES ${CWD}/src/bun.js/bindings/stripANSI.cpp)
list(REMOVE_ITEM CLANG_TIDY_SOURCES ${CWD}/src/bun.js/bindings/Uint8Array.cpp)
set(CLANG_TIDY_COMMAND ${CLANG_TIDY_PROGRAM}
-p ${BUILD_PATH}
-p ${BUILD_PATH}
--config-file=${CWD}/.clang-tidy
)

View File

@@ -25,7 +25,7 @@ register_command(
"Running cppcheck"
COMMAND
${CMAKE_COMMAND} -E make_directory cppcheck
&& ${CPPCHECK_COMMAND}
&& ${CPPCHECK_COMMAND}
CWD
${BUILD_PATH}
TARGETS

View File

@@ -38,7 +38,7 @@ foreach(i RANGE 10)
else()
message(STATUS "Downloading ${DOWNLOAD_URL}... (retry ${i})")
endif()
file(DOWNLOAD
${DOWNLOAD_URL}
${DOWNLOAD_TMP_FILE_${i}}

View File

@@ -10,13 +10,13 @@ function(absolute_sources OUTPUT_VAR INPUT_FILE)
else()
set(BASE_DIR ${CMAKE_CURRENT_SOURCE_DIR})
endif()
# Read the file containing relative paths
file(STRINGS "${INPUT_FILE}" RELATIVE_PATHS)
# Create a list to store absolute paths
set(RESULT_LIST "")
# Convert each relative path to absolute
foreach(REL_PATH ${RELATIVE_PATHS})
# Skip empty lines
@@ -26,10 +26,10 @@ function(absolute_sources OUTPUT_VAR INPUT_FILE)
list(APPEND RESULT_LIST ${ABS_PATH})
endif()
endforeach()
# Set the output variable in the parent scope
set(${OUTPUT_VAR} ${RESULT_LIST} PARENT_SCOPE)
# Tell CMake that the configuration depends on this file
set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS "${INPUT_FILE}")
endfunction()
endfunction()

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
oven-sh/boringssl
COMMIT
f1ffd9e83d4f5c28a9c70d73f9a4e6fcf310062f
7a5d984c69b0c34c4cbb56c6812eaa5b9bef485c
)
register_cmake_command(

View File

@@ -636,7 +636,6 @@ register_command(
SOURCES
${BUN_ZIG_SOURCES}
${BUN_ZIG_GENERATED_SOURCES}
${CWD}/src/install/PackageManager/scanner-entry.ts # Is there a better way to do this?
)
set_property(TARGET bun-zig PROPERTY JOB_POOL compile_pool)
@@ -760,7 +759,7 @@ if (NOT WIN32)
# Only enable in these scenarios:
# 1. NOT in CI, OR
# 2. In CI AND BUN_CPP_ONLY is enabled
if((NOT CI OR (CI AND BUN_CPP_ONLY)) AND NOT ENABLE_ANALYSIS)
if(NOT CI OR (CI AND BUN_CPP_ONLY))
target_precompile_headers(${bun} PRIVATE
"$<$<COMPILE_LANGUAGE:CXX>:${CWD}/src/bun.js/bindings/root.h>"
)
@@ -1126,9 +1125,6 @@ endif()
include_directories(${WEBKIT_INCLUDE_PATH})
# Include the generated dependency versions header
include_directories(${CMAKE_BINARY_DIR})
if(NOT WEBKIT_LOCAL AND NOT APPLE)
include_directories(${WEBKIT_INCLUDE_PATH}/wtf/unicode)
endif()
@@ -1230,32 +1226,32 @@ if(NOT BUN_CPP_ONLY)
OUTPUTS
${BUILD_PATH}/${bunStripExe}
)
# Then sign both executables on Windows
if(WIN32 AND ENABLE_WINDOWS_CODESIGNING)
set(SIGN_SCRIPT "${CMAKE_SOURCE_DIR}/.buildkite/scripts/sign-windows.ps1")
# Verify signing script exists
if(NOT EXISTS "${SIGN_SCRIPT}")
message(FATAL_ERROR "Windows signing script not found: ${SIGN_SCRIPT}")
endif()
# Use PowerShell for Windows code signing (native Windows, no path issues)
find_program(POWERSHELL_EXECUTABLE
find_program(POWERSHELL_EXECUTABLE
NAMES pwsh.exe powershell.exe
PATHS
PATHS
"C:/Program Files/PowerShell/7"
"C:/Program Files (x86)/PowerShell/7"
"C:/Windows/System32/WindowsPowerShell/v1.0"
DOC "Path to PowerShell executable"
)
if(NOT POWERSHELL_EXECUTABLE)
set(POWERSHELL_EXECUTABLE "powershell.exe")
endif()
message(STATUS "Using PowerShell executable: ${POWERSHELL_EXECUTABLE}")
# Sign both bun-profile.exe and bun.exe after stripping
register_command(
TARGET

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
HdrHistogram/HdrHistogram_c
COMMIT
be60a9987ee48d0abf0d7b6a175bad8d6c1585d1
8dcce8f68512fca460b171bccc3a5afce0048779
)
register_cmake_command(
@@ -21,4 +21,4 @@ register_cmake_command(
-DHDR_HISTOGRAM_BUILD_STATIC=ON
-DHDR_LOG_REQUIRED=DISABLED
-DHDR_HISTOGRAM_BUILD_PROGRAMS=OFF
)
)

View File

@@ -30,4 +30,4 @@ register_cmake_command(
INCLUDES
.
hwy
)
)

View File

@@ -14,7 +14,7 @@ set(MIMALLOC_CMAKE_ARGS
-DMI_BUILD_TESTS=OFF
-DMI_USE_CXX=ON
-DMI_SKIP_COLLECT_ON_EXIT=ON
# ```
# mimalloc_allow_large_os_pages=0 BUN_PORT=3004 mem bun http-hello.js
# Started development server: http://localhost:3004
@@ -51,7 +51,7 @@ if(ENABLE_ASAN)
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_DEBUG_UBSAN=ON)
elseif(APPLE OR LINUX)
if(APPLE)
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OVERRIDE=OFF)
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OVERRIDE=OFF)
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_ZONE=OFF)
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_INTERPOSE=OFF)
else()

View File

@@ -3,4 +3,4 @@ set(CMAKE_SYSTEM_PROCESSOR x64)
set(CMAKE_OSX_ARCHITECTURES x86_64)
set(CMAKE_C_COMPILER_WORKS ON)
set(CMAKE_CXX_COMPILER_WORKS ON)
set(CMAKE_CXX_COMPILER_WORKS ON)

View File

@@ -3,4 +3,4 @@ set(CMAKE_SYSTEM_PROCESSOR aarch64)
set(ABI musl)
set(CMAKE_C_COMPILER_WORKS ON)
set(CMAKE_CXX_COMPILER_WORKS ON)
set(CMAKE_CXX_COMPILER_WORKS ON)

View File

@@ -3,4 +3,4 @@ set(CMAKE_SYSTEM_PROCESSOR aarch64)
set(ABI gnu)
set(CMAKE_C_COMPILER_WORKS ON)
set(CMAKE_CXX_COMPILER_WORKS ON)
set(CMAKE_CXX_COMPILER_WORKS ON)

View File

@@ -4,4 +4,4 @@ set(ENABLE_BASELINE ON)
set(ABI gnu)
set(CMAKE_C_COMPILER_WORKS ON)
set(CMAKE_CXX_COMPILER_WORKS ON)
set(CMAKE_CXX_COMPILER_WORKS ON)

View File

@@ -4,4 +4,4 @@ set(ENABLE_BASELINE ON)
set(ABI musl)
set(CMAKE_C_COMPILER_WORKS ON)
set(CMAKE_CXX_COMPILER_WORKS ON)
set(CMAKE_CXX_COMPILER_WORKS ON)

View File

@@ -3,4 +3,4 @@ set(CMAKE_SYSTEM_PROCESSOR x64)
set(ENABLE_BASELINE ON)
set(CMAKE_C_COMPILER_WORKS ON)
set(CMAKE_CXX_COMPILER_WORKS ON)
set(CMAKE_CXX_COMPILER_WORKS ON)

View File

@@ -2,4 +2,4 @@ set(CMAKE_SYSTEM_NAME Windows)
set(CMAKE_SYSTEM_PROCESSOR x64)
set(CMAKE_C_COMPILER_WORKS ON)
set(CMAKE_CXX_COMPILER_WORKS ON)
set(CMAKE_CXX_COMPILER_WORKS ON)

View File

@@ -1,209 +0,0 @@
# GenerateDependencyVersions.cmake
# Generates a header file with all dependency versions
# Function to extract version from git tree object
function(get_git_tree_hash dep_name output_var)
execute_process(
COMMAND git rev-parse HEAD:./src/deps/${dep_name}
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
OUTPUT_VARIABLE commit_hash
OUTPUT_STRIP_TRAILING_WHITESPACE
ERROR_QUIET
RESULT_VARIABLE result
)
if(result EQUAL 0 AND commit_hash)
set(${output_var} "${commit_hash}" PARENT_SCOPE)
else()
set(${output_var} "unknown" PARENT_SCOPE)
endif()
endfunction()
# Function to extract version from header file using regex
function(extract_version_from_header header_file regex_pattern output_var)
if(EXISTS "${header_file}")
file(STRINGS "${header_file}" version_line REGEX "${regex_pattern}")
if(version_line)
string(REGEX MATCH "${regex_pattern}" _match "${version_line}")
if(CMAKE_MATCH_1)
set(${output_var} "${CMAKE_MATCH_1}" PARENT_SCOPE)
else()
set(${output_var} "unknown" PARENT_SCOPE)
endif()
else()
set(${output_var} "unknown" PARENT_SCOPE)
endif()
else()
set(${output_var} "unknown" PARENT_SCOPE)
endif()
endfunction()
# Main function to generate the header file
function(generate_dependency_versions_header)
set(DEPS_PATH "${CMAKE_SOURCE_DIR}/src/deps")
set(VENDOR_PATH "${CMAKE_SOURCE_DIR}/vendor")
# Initialize version variables
set(DEPENDENCY_VERSIONS "")
# WebKit version (from SetupWebKit.cmake or command line)
if(WEBKIT_VERSION)
set(WEBKIT_VERSION_STR "${WEBKIT_VERSION}")
else()
set(WEBKIT_VERSION_STR "0ddf6f47af0a9782a354f61e06d7f83d097d9f84")
endif()
list(APPEND DEPENDENCY_VERSIONS "WEBKIT" "${WEBKIT_VERSION_STR}")
# Track input files so CMake reconfigures when they change
set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS
"${CMAKE_SOURCE_DIR}/package.json"
"${VENDOR_PATH}/libdeflate/libdeflate.h"
"${VENDOR_PATH}/zlib/zlib.h"
"${DEPS_PATH}/zstd/lib/zstd.h"
)
# Hardcoded dependency versions (previously from generated_versions_list.zig)
# These are the commit hashes/tree objects for each dependency
list(APPEND DEPENDENCY_VERSIONS "BORINGSSL" "29a2cd359458c9384694b75456026e4b57e3e567")
list(APPEND DEPENDENCY_VERSIONS "C_ARES" "d1722e6e8acaf10eb73fa995798a9cd421d9f85e")
list(APPEND DEPENDENCY_VERSIONS "LIBARCHIVE" "898dc8319355b7e985f68a9819f182aaed61b53a")
list(APPEND DEPENDENCY_VERSIONS "LIBDEFLATE_HASH" "dc76454a39e7e83b68c3704b6e3784654f8d5ac5")
list(APPEND DEPENDENCY_VERSIONS "LOLHTML" "8d4c273ded322193d017042d1f48df2766b0f88b")
list(APPEND DEPENDENCY_VERSIONS "LSHPACK" "3d0f1fc1d6e66a642e7a98c55deb38aa986eb4b0")
list(APPEND DEPENDENCY_VERSIONS "MIMALLOC" "4c283af60cdae205df5a872530c77e2a6a307d43")
list(APPEND DEPENDENCY_VERSIONS "PICOHTTPPARSER" "066d2b1e9ab820703db0837a7255d92d30f0c9f5")
list(APPEND DEPENDENCY_VERSIONS "TINYCC" "ab631362d839333660a265d3084d8ff060b96753")
list(APPEND DEPENDENCY_VERSIONS "ZLIB_HASH" "886098f3f339617b4243b286f5ed364b9989e245")
list(APPEND DEPENDENCY_VERSIONS "ZSTD_HASH" "794ea1b0afca0f020f4e57b6732332231fb23c70")
# Extract semantic versions from header files where available
extract_version_from_header(
"${VENDOR_PATH}/libdeflate/libdeflate.h"
"#define LIBDEFLATE_VERSION_STRING[ \t]+\"([0-9\\.]+)\""
LIBDEFLATE_VERSION_STRING
)
list(APPEND DEPENDENCY_VERSIONS "LIBDEFLATE_VERSION" "${LIBDEFLATE_VERSION_STRING}")
extract_version_from_header(
"${VENDOR_PATH}/zlib/zlib.h"
"#define[ \t]+ZLIB_VERSION[ \t]+\"([^\"]+)\""
ZLIB_VERSION_STRING
)
list(APPEND DEPENDENCY_VERSIONS "ZLIB_VERSION" "${ZLIB_VERSION_STRING}")
extract_version_from_header(
"${DEPS_PATH}/zstd/lib/zstd.h"
"#define[ \t]+ZSTD_VERSION_STRING[ \t]+\"([^\"]+)\""
ZSTD_VERSION_STRING
)
list(APPEND DEPENDENCY_VERSIONS "ZSTD_VERSION" "${ZSTD_VERSION_STRING}")
# Bun version from package.json
if(EXISTS "${CMAKE_SOURCE_DIR}/package.json")
file(READ "${CMAKE_SOURCE_DIR}/package.json" PACKAGE_JSON)
string(REGEX MATCH "\"version\"[ \t]*:[ \t]*\"([^\"]+)\"" _ ${PACKAGE_JSON})
if(CMAKE_MATCH_1)
set(BUN_VERSION_STRING "${CMAKE_MATCH_1}")
else()
set(BUN_VERSION_STRING "unknown")
endif()
else()
set(BUN_VERSION_STRING "${VERSION}")
endif()
list(APPEND DEPENDENCY_VERSIONS "BUN_VERSION" "${BUN_VERSION_STRING}")
# Node.js compatibility version (hardcoded as in the current implementation)
set(NODEJS_COMPAT_VERSION "22.12.0")
list(APPEND DEPENDENCY_VERSIONS "NODEJS_COMPAT_VERSION" "${NODEJS_COMPAT_VERSION}")
# Get Bun's git SHA for uws/usockets versions (they use Bun's own SHA)
execute_process(
COMMAND git rev-parse HEAD
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
OUTPUT_VARIABLE BUN_GIT_SHA
OUTPUT_STRIP_TRAILING_WHITESPACE
ERROR_QUIET
)
if(NOT BUN_GIT_SHA)
set(BUN_GIT_SHA "unknown")
endif()
list(APPEND DEPENDENCY_VERSIONS "UWS" "${BUN_GIT_SHA}")
list(APPEND DEPENDENCY_VERSIONS "USOCKETS" "${BUN_GIT_SHA}")
# Zig version - hardcoded for now, can be updated as needed
# This should match the version of Zig used to build Bun
list(APPEND DEPENDENCY_VERSIONS "ZIG" "0.14.1")
# Generate the header file content
set(HEADER_CONTENT "// This file is auto-generated by CMake. Do not edit manually.\n")
string(APPEND HEADER_CONTENT "#ifndef BUN_DEPENDENCY_VERSIONS_H\n")
string(APPEND HEADER_CONTENT "#define BUN_DEPENDENCY_VERSIONS_H\n\n")
string(APPEND HEADER_CONTENT "#ifdef __cplusplus\n")
string(APPEND HEADER_CONTENT "extern \"C\" {\n")
string(APPEND HEADER_CONTENT "#endif\n\n")
string(APPEND HEADER_CONTENT "// Dependency versions\n")
# Process the version list
list(LENGTH DEPENDENCY_VERSIONS num_versions)
math(EXPR last_idx "${num_versions} - 1")
set(i 0)
while(i LESS num_versions)
list(GET DEPENDENCY_VERSIONS ${i} name)
math(EXPR value_idx "${i} + 1")
if(value_idx LESS num_versions)
list(GET DEPENDENCY_VERSIONS ${value_idx} value)
# Only emit #define if value is not "unknown"
if(NOT "${value}" STREQUAL "unknown")
string(APPEND HEADER_CONTENT "#define BUN_DEP_${name} \"${value}\"\n")
endif()
endif()
math(EXPR i "${i} + 2")
endwhile()
string(APPEND HEADER_CONTENT "\n")
string(APPEND HEADER_CONTENT "// C string constants for easy access\n")
# Create C string constants
set(i 0)
while(i LESS num_versions)
list(GET DEPENDENCY_VERSIONS ${i} name)
math(EXPR value_idx "${i} + 1")
if(value_idx LESS num_versions)
list(GET DEPENDENCY_VERSIONS ${value_idx} value)
# Only emit constant if value is not "unknown"
if(NOT "${value}" STREQUAL "unknown")
string(APPEND HEADER_CONTENT "static const char* const BUN_VERSION_${name} = \"${value}\";\n")
endif()
endif()
math(EXPR i "${i} + 2")
endwhile()
string(APPEND HEADER_CONTENT "\n#ifdef __cplusplus\n")
string(APPEND HEADER_CONTENT "}\n")
string(APPEND HEADER_CONTENT "#endif\n\n")
string(APPEND HEADER_CONTENT "#endif // BUN_DEPENDENCY_VERSIONS_H\n")
# Write the header file
set(OUTPUT_FILE "${CMAKE_BINARY_DIR}/bun_dependency_versions.h")
file(WRITE "${OUTPUT_FILE}" "${HEADER_CONTENT}")
message(STATUS "Generated dependency versions header: ${OUTPUT_FILE}")
# Also create a more detailed version for debugging
set(DEBUG_OUTPUT_FILE "${CMAKE_BINARY_DIR}/bun_dependency_versions_debug.txt")
set(DEBUG_CONTENT "Bun Dependency Versions\n")
string(APPEND DEBUG_CONTENT "=======================\n\n")
set(i 0)
while(i LESS num_versions)
list(GET DEPENDENCY_VERSIONS ${i} name)
math(EXPR value_idx "${i} + 1")
if(value_idx LESS num_versions)
list(GET DEPENDENCY_VERSIONS ${value_idx} value)
string(APPEND DEBUG_CONTENT "${name}: ${value}\n")
endif()
math(EXPR i "${i} + 2")
endwhile()
file(WRITE "${DEBUG_OUTPUT_FILE}" "${DEBUG_CONTENT}")
endfunction()
# Call the function to generate the header
generate_dependency_versions_header()

View File

@@ -101,7 +101,7 @@ foreach(i RANGE ${BUILDKITE_JOBS_MAX_INDEX})
list(APPEND BUILDKITE_JOBS_NOT_FOUND ${BUILDKITE_JOB_NAME})
continue()
endif()
file(READ ${BUILDKITE_ARTIFACTS_PATH} BUILDKITE_ARTIFACTS)
string(JSON BUILDKITE_ARTIFACTS_LENGTH LENGTH ${BUILDKITE_ARTIFACTS})
if(NOT BUILDKITE_ARTIFACTS_LENGTH GREATER 0)

View File

@@ -17,7 +17,6 @@ set(DEFAULT_LLVM_VERSION "19.1.7")
optionx(LLVM_VERSION STRING "The version of LLVM to use" DEFAULT ${DEFAULT_LLVM_VERSION})
string(REGEX MATCH "([0-9]+)\\.([0-9]+)\\.([0-9]+)" USE_LLVM_VERSION ${LLVM_VERSION})
if(USE_LLVM_VERSION)
set(LLVM_VERSION_MAJOR ${CMAKE_MATCH_1})
set(LLVM_VERSION_MINOR ${CMAKE_MATCH_2})

View File

@@ -53,7 +53,7 @@ endif()
optionx(CMAKE_OSX_SYSROOT STRING "The macOS SDK path to target" DEFAULT ${DEFAULT_CMAKE_OSX_SYSROOT})
list(APPEND CMAKE_ARGS
list(APPEND CMAKE_ARGS
-DCMAKE_OSX_DEPLOYMENT_TARGET=${CMAKE_OSX_DEPLOYMENT_TARGET}
-DCMAKE_OSX_SYSROOT=${CMAKE_OSX_SYSROOT}
)

View File

@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
if(NOT WEBKIT_VERSION)
set(WEBKIT_VERSION 495c25e24927ba03277ae225cd42811588d03ff8)
set(WEBKIT_VERSION f474428677de1fafaf13bb3b9a050fe3504dda25)
endif()
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)

View File

@@ -665,6 +665,7 @@ _bun_test_completion() {
'--timeout[Set the per-test timeout in milliseconds, default is 5000.]:timeout' \
'--update-snapshots[Update snapshot files]' \
'--rerun-each[Re-run each test file <NUMBER> times, helps catch certain bugs]:rerun' \
'--only[Only run tests that are marked with "test.only()"]' \
'--todo[Include tests that are marked with "test.todo()"]' \
'--coverage[Generate a coverage profile]' \
'--bail[Exit the test suite after <NUMBER> failures. If you do not specify a number, it defaults to 1.]:bail' \

View File

@@ -604,12 +604,13 @@ const db = new SQL({
connectionTimeout: 30, // Timeout when establishing new connections
// SSL/TLS options
tls: {
rejectUnauthorized: true,
ca: "path/to/ca.pem",
key: "path/to/key.pem",
cert: "path/to/cert.pem",
},
ssl: "prefer", // or "disable", "require", "verify-ca", "verify-full"
// tls: {
// rejectUnauthorized: true,
// ca: "path/to/ca.pem",
// key: "path/to/key.pem",
// cert: "path/to/cert.pem",
// },
// Callbacks
onconnect: client => {

View File

@@ -184,45 +184,6 @@ const { database, redis } = require("./config.yaml");
console.log(database.port); // 5432
```
### TypeScript Support
While Bun can import YAML files directly, TypeScript doesn't know the types of your YAML files by default. To add TypeScript support for your YAML imports, create a declaration file with `.d.ts` appended to the YAML filename (e.g., `config.yaml` → `config.yaml.d.ts`):
```yaml#config.yaml
features: "advanced"
server:
host: localhost
port: 3000
```
```ts#config.yaml.d.ts
const contents: {
features: string;
server: {
host: string;
port: number;
};
};
export = contents;
```
Now TypeScript will provide proper type checking and auto-completion:
```ts#app.ts
import config from "./config.yaml";
// TypeScript knows the types!
config.server.port; // number
config.server.host; // string
config.features; // string
// TypeScript will catch errors
config.server.unknown; // Error: Property 'unknown' does not exist
```
This approach works for both ES modules and CommonJS, giving you full type safety while Bun continues to handle the actual YAML parsing at runtime.
## Hot Reloading with YAML
One of the most powerful features of Bun's YAML support is hot reloading. When you run your application with `bun --hot`, changes to YAML files are automatically detected and reloaded without closing connections

View File

@@ -733,10 +733,6 @@ Whether to enable minification. Default `false`.
When targeting `bun`, identifiers will be minified by default.
{% /callout %}
{% callout %}
When `minify.syntax` is enabled, unused function and class expression names are removed unless `minify.keepNames` is set to `true` or `--keep-names` flag is used.
{% /callout %}
To enable all minification options:
{% codetabs group="a" %}
@@ -767,16 +763,12 @@ await Bun.build({
whitespace: true,
identifiers: true,
syntax: true,
keepNames: false, // default
},
})
```
```bash#CLI
$ bun build ./index.tsx --outdir ./out --minify-whitespace --minify-identifiers --minify-syntax
# To preserve function and class names during minification:
$ bun build ./index.tsx --outdir ./out --minify --keep-names
```
{% /codetabs %}
@@ -1561,7 +1553,6 @@ interface BuildConfig {
whitespace?: boolean;
syntax?: boolean;
identifiers?: boolean;
keepNames?: boolean;
};
/**
* Ignore dead code elimination/tree-shaking annotations such as @__PURE__ and package.json

View File

@@ -9,9 +9,8 @@ $ bun create next-app
✔ What is your project named? … my-app
✔ Would you like to use TypeScript with this project? … No / Yes
✔ Would you like to use ESLint with this project? … No / Yes
✔ Would you like to use Tailwind CSS? ... No / Yes
✔ Would you like to use `src/` directory with this project? … No / Yes
✔ Would you like to use App Router? (recommended) ... No / Yes
✔ Would you like to use experimental `app/` directory with this project? … No / Yes
✔ What import alias would you like configured? … @/*
Creating a new Next.js app in /path/to/my-app.
```

View File

@@ -73,30 +73,4 @@ console.log(data.hobbies); // => ["reading", "coding"]
---
## TypeScript Support
To add TypeScript support for your YAML imports, create a declaration file with `.d.ts` appended to the YAML filename (e.g., `config.yaml` → `config.yaml.d.ts`);
```ts#config.yaml.d.ts
const contents: {
database: {
host: string;
port: number;
name: string;
};
server: {
port: number;
timeout: number;
};
features: {
auth: boolean;
rateLimit: boolean;
};
};
export = contents;
```
---
See [Docs > API > YAML](https://bun.com/docs/api/yaml) for complete documentation on YAML support in Bun.

View File

@@ -407,9 +407,6 @@ export default {
page("api/cc", "C Compiler", {
description: `Build & run native C from JavaScript with Bun's native C compiler API`,
}), // "`bun:ffi`"),
page("api/secrets", "Secrets", {
description: `Store and retrieve sensitive credentials securely using the operating system's native credential storage APIs.`,
}), // "`Bun.secrets`"),
page("cli/test", "Testing", {
description: `Bun's built-in test runner is fast and uses Jest-compatible syntax.`,
}), // "`bun:test`"),

View File

@@ -521,7 +521,7 @@ When a security scanner is configured:
- Installation is cancelled if fatal issues are found
- Security warnings are displayed during installation
Learn more about [using and writing security scanners](/docs/install/security-scanner-api).
Learn more about [using and writing security scanners](/docs/install/security).
### `install.linker`

View File

@@ -149,6 +149,12 @@ describe.only("only", () => {
The following command will only execute tests #2 and #3.
```sh
$ bun test --only
```
The following command will only execute tests #1, #2 and #3.
```sh
$ bun test
```
@@ -750,76 +756,3 @@ Bun implements the following matchers. Full Jest compatibility is on the roadmap
- [`.toThrowErrorMatchingInlineSnapshot()`](https://jestjs.io/docs/expect#tothrowerrormatchinginlinesnapshotinlinesnapshot)
{% /table %}
## TypeScript Type Safety
Bun's test runner provides enhanced TypeScript support with intelligent type checking for your test assertions. The type system helps catch potential bugs at compile time while still allowing flexibility when needed.
### Strict Type Checking by Default
By default, Bun's test matchers enforce strict type checking between the actual value and expected value:
```ts
import { expect, test } from "bun:test";
test("strict typing", () => {
const str = "hello";
const num = 42;
expect(str).toBe("hello"); // ✅ OK: string to string
expect(num).toBe(42); // ✅ OK: number to number
expect(str).toBe(42); // ❌ TypeScript error: string vs number
});
```
This helps catch common mistakes where you might accidentally compare values of different types.
### Relaxed Type Checking with Type Parameters
Sometimes you need more flexibility in your tests, especially when working with:
- Dynamic data from APIs
- Polymorphic functions that can return multiple types
- Generic utility functions
- Migration of existing test suites
For these cases, you can "opt out" of strict type checking by providing an explicit type parameter to matcher methods:
```ts
import { expect, test } from "bun:test";
test("relaxed typing with type parameters", () => {
const value: unknown = getSomeValue();
// These would normally cause TypeScript errors, but type parameters allow them:
expect(value).toBe<number>(42); // No TS error, runtime check still works
expect(value).toEqual<string>("hello"); // No TS error, runtime check still works
expect(value).toStrictEqual<boolean>(true); // No TS error, runtime check still works
});
test("useful for dynamic data", () => {
const apiResponse: any = { status: "success" };
// Without type parameter: TypeScript error (any vs string)
// expect(apiResponse.status).toBe("success");
// With type parameter: No TypeScript error, runtime assertion still enforced
expect(apiResponse.status).toBe<string>("success"); // ✅ OK
});
```
### Migration from Looser Type Systems
If migrating from a test framework with looser TypeScript integration, you can use type parameters as a stepping stone:
```ts
// Old Jest test that worked but wasn't type-safe
expect(response.data).toBe(200); // No type error in some setups
// Bun equivalent with explicit typing during migration
expect(response.data).toBe<number>(200); // Explicit about expected type
// Ideal Bun test after refactoring
const statusCode: number = response.data;
expect(statusCode).toBe(200); // Type-safe without explicit parameter
```

View File

@@ -1,90 +0,0 @@
// this file is intended to be runnable both from node and bun
var { readFileSync, writeFileSync } = require("fs");
var { join } = require("path");
const destination = join(__dirname, "../src/bun.js/bindings/headers.zig");
const replacements = join(__dirname, "../src/bun.js/bindings/headers-replacements.zig");
console.log("Writing to", destination);
var output = "// GENERATED CODE - DO NOT MODIFY BY HAND\n\n";
var input = readFileSync(destination, "utf8");
const first_extern = input.indexOf("extern fn");
const first_extern_line = input.indexOf("\n", first_extern - 128);
const last_extern_fn = input.lastIndexOf("extern");
const last_extern_fn_line = input.indexOf("\n", last_extern_fn);
const keep = (input.substring(0, first_extern_line) + input.substring(last_extern_fn_line))
.split("\n")
.filter(a => /const (JSC|WTF|Web)_/gi.test(a) && !a.includes("JSValue") && !a.includes("CatchScope"))
.join("\n")
.trim();
input = keep + input.slice(first_extern_line, last_extern_fn_line);
input = input.replaceAll("*WebCore__", "*bindings.");
input = input.replaceAll("*JSC__", "*bindings.");
input = input.replaceAll("[*c] JSC__", "[*c]bindings.");
input = input.replaceAll("[*c]JSC__", "[*c]bindings.");
input = input.replaceAll("[*c]bindings.JSGlobalObject", "*bindings.JSGlobalObject");
input = input.replaceAll("[*c]bindings.JSPromise", "?*bindings.JSPromise");
input = input.replaceAll("[*c]const bindings.JSPromise", "?*const bindings.JSPromise");
input = input.replaceAll("[*c] const JSC__", "[*c]const bindings.");
input = input.replaceAll("[*c]Inspector__ScriptArguments", "[*c]bindings.ScriptArguments");
input = input
.replaceAll("VirtualMachine", "bindings.VirtualMachine")
.replaceAll("bindings.bindings.VirtualMachine", "bindings.VirtualMachine");
input = input.replaceAll("?*JSC__JSGlobalObject", "*bindings.JSGlobalObject");
input = input.replaceAll("?*bindings.CallFrame", "*bindings.CallFrame");
input = input.replaceAll("[*c]bindings.VM", "*bindings.VM");
const hardcode = {
"[*c][*c]JSC__Exception": "*?*JSC__Exception ",
"[*c]?*anyopaque": "[*c]*anyopaque",
"[*c]JSC__JSGlobalObject": "?*JSC__JSGlobalObject",
};
for (let key in hardcode) {
const value = hardcode[key];
input = input.replaceAll(key, value);
}
const remove = [
"pub const __darwin",
"pub const _",
"pub const __builtin",
"pub const int",
"pub const INT",
"pub const uint",
"pub const UINT",
"pub const WCHAR",
"pub const wchar",
"pub const intmax",
"pub const INTMAX",
"pub const uintmax",
"pub const UINTMAX",
"pub const max_align_t",
"pub const ZigErrorCode",
"pub const JSClassRef",
"pub const __",
];
var lines = input.split("\n");
for (let prefix of remove) {
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
if (line.startsWith(prefix)) {
lines[i] = "";
}
}
}
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
if (line.includes("struct_")) {
lines[i] = "";
continue;
}
}
input = lines.filter(a => a.length > 0).join("\n");
writeFileSync(destination, output + "\n" + readFileSync(replacements, "utf8").trim() + "\n" + input.trim() + "\n");

View File

@@ -19,6 +19,3 @@ command script import -c bun_pretty_printer.py
command script delete btjs
command alias btjs p {printf("gathering btjs trace...\n");printf("%s\n", (char*)dumpBtjsTrace())}
# do not pass SIGHUP on to child process. it is often not the real error and the stop point will be nonsensical.
process handle -p false -s false -n true SIGHUP

View File

@@ -1,137 +0,0 @@
const fs = require("fs");
const path = require("path");
const { execSync } = require("child_process");
const exec = (cmd, opts = {}) => {
console.log("$", cmd);
return execSync(cmd, {
...opts,
env: { CI: "true", ...process.env, ...(opts.env || {}) },
});
};
const DRY_RUN = !!process.env.DRY_RUN;
var count = 0;
const examplesFolderEntries = fs.readdirSync(path.join(process.cwd(), "examples"), { withFileTypes: true });
const packageNames = [];
for (let folder of examplesFolderEntries) {
if (!folder.isDirectory()) continue;
const absolute = path.resolve(process.cwd(), "examples", folder.name);
let packageJSONText;
try {
packageJSONText = fs.readFileSync(path.join(absolute, "package.json"), "utf8");
} catch {
continue;
}
let packageJSON = JSON.parse(packageJSONText);
if (!packageJSON.name) continue;
if (!packageJSON.name.startsWith("@bun-examples")) continue;
var version = "0.0.1";
try {
const _versions = exec(`npm view ${packageJSON.name} versions --json`).toString().trim();
if (_versions.length > 0) {
const versionsArray = JSON.parse(_versions);
version = versionsArray[versionsArray.length - 1];
}
} catch (exception) {
console.error(exception);
}
var retryCount = 5;
// Never commit lockfiles
try {
fs.rmSync(path.join(absolute, "package-lock.json"));
} catch (exception) {}
try {
fs.rmSync(path.join(absolute, "yarn.lock"));
} catch (exception) {}
try {
fs.rmSync(path.join(absolute, "pnpm-lock.yaml"));
} catch (exception) {}
try {
fs.copyFileSync(path.join(absolute, ".gitignore"), path.join(absolute, "gitignore"));
} catch (exception) {}
restart: while (retryCount-- > 0) {
packageJSON.version = require("semver").inc(packageJSON.version, "patch");
if ("private" in packageJSON) delete packageJSON.private;
if ("license" in packageJSON) delete packageJSON.license;
if ("main" in packageJSON && !("module" in packageJSON)) {
packageJSON.module = packageJSON.main;
delete packageJSON.main;
}
fs.writeFileSync(path.join(absolute, "package.json"), JSON.stringify(packageJSON, null, 2));
try {
exec(`npm version patch --force --no-commit-hooks --no-git-tag-version`, {
cwd: absolute,
});
packageJSON = JSON.parse(fs.readFileSync(path.join(absolute, "package.json"), "utf8"));
version = packageJSON.version;
} catch (e) {
if (e.code !== "E404") {
throw e;
}
}
try {
exec(`npm publish ${DRY_RUN ? "--dry-run" : ""} --access public --registry https://registry.npmjs.org/`, {
cwd: absolute,
});
packageNames.push([
packageJSON.name,
{
version: packageJSON.version,
description: packageJSON.description || "",
},
]);
count++;
break;
} catch (exception) {
continue restart;
}
}
}
if (packageNames.length > 0) {
const packageJSON = {
name: "bun-examples-all",
private: false,
version: `0.0.${Date.now()}`,
description: "All bun-examples",
examples: Object.fromEntries(packageNames),
};
const dir = path.join(process.cwd(), "examples/bun-examples-all");
try {
fs.rmSync(dir, {
recursive: true,
force: true,
});
} catch (exception) {}
try {
fs.mkdirSync(dir, {
recursive: true,
});
} catch (exception) {}
fs.writeFileSync(path.join(dir, "package.json"), JSON.stringify(packageJSON, null, 2));
exec(`npm publish ${DRY_RUN ? "--dry-run" : ""} --access public --registry https://registry.npmjs.org/`, {
cwd: dir,
});
}
console.log(`Published ${count} packages`);

View File

@@ -1,7 +1,7 @@
{
"private": true,
"name": "bun",
"version": "1.2.23",
"version": "1.2.22",
"workspaces": [
"./packages/bun-types",
"./packages/@types/bun"

View File

@@ -1,14 +0,0 @@
{
"lockfileVersion": 1,
"workspaces": {
"": {
"name": "bun-error",
"dependencies": {
"preact": "^10.27.2",
},
},
},
"packages": {
"preact": ["preact@10.27.2", "", {}, "sha512-5SYSgFKSyhCbk6SrXyMpqjb5+MQBgfvEKE/OC+PujcY34sOpqtr+0AZQtPYx5IA6VxynQ7rUPCtKzyovpj9Bpg=="],
}
}

View File

@@ -1,6 +1,5 @@
import type { JSX } from "preact";
import { createContext, render } from "preact";
import { useCallback, useContext, useEffect, useRef, useState } from "preact/hooks";
import React, { createContext, useContext } from "react";
import { render, unmountComponentAtNode } from "react-dom";
import type {
FallbackMessageContainer,
JSException,
@@ -165,17 +164,17 @@ const maybeBlobFileURL = (filename: string, line?: number, column?: number): str
return srcFileURL(filename, line, column);
};
const openWithoutFlashOfNewTab: JSX.MouseEventHandler<HTMLAnchorElement> = event => {
const target = event.currentTarget as HTMLAnchorElement;
const openWithoutFlashOfNewTab: React.MouseEventHandler<HTMLAnchorElement> = event => {
const target = event.currentTarget;
const href = target.getAttribute("href");
if (!href || event.button !== 0) {
return true;
}
event.preventDefault();
event.preventDefault();
event.stopPropagation();
event.stopImmediatePropagation();
event.nativeEvent.preventDefault();
event.nativeEvent.stopPropagation();
event.nativeEvent.stopImmediatePropagation();
const headers = new Headers();
headers.set("Accept", "text/plain");
@@ -318,17 +317,17 @@ const AsyncSourceLines = ({
highlight: number;
highlightColumnStart: number;
highlightColumnEnd: number;
children?: any;
children?: React.ReactNode;
buildURL: (line?: number, column?: number) => string;
sourceLines: SourceLine[];
setSourceLines: (lines: SourceLine[]) => void;
}) => {
const [loadState, setLoadState] = useState(LoadState.pending);
const [loadState, setLoadState] = React.useState(LoadState.pending);
const controller = useRef<AbortController | null>(null);
const url = useRef<string>(buildURL(0, 0));
const controller = React.useRef<AbortController | null>(null);
const url = React.useRef<string>(buildURL(0, 0));
useEffect(() => {
React.useEffect(() => {
controller.current = new AbortController();
var cancelled = false;
fetch(url.current, {
@@ -433,7 +432,7 @@ const SourceLines = ({
highlight: number;
highlightColumnStart: number;
highlightColumnEnd: number;
children?: any;
children?: React.ReactNode;
buildURL: (line?: number, column?: number) => string;
}) => {
let start = sourceLines.length;
@@ -462,7 +461,7 @@ const SourceLines = ({
const leftPad = maxLineNumber.toString(10).length - minLineNumber.toString(10).length;
const _sourceLines = sourceLines.slice(start, end);
const lines = new Array(_sourceLines.length + (Array.isArray(children) ? children.length : children ? 1 : 0));
const lines = new Array(_sourceLines.length + React.Children.count(children));
let highlightI = 0;
for (let i = 0; i < _sourceLines.length; i++) {
@@ -514,7 +513,7 @@ const SourceLines = ({
const BuildErrorSourceLines = ({ location, filename }: { location: Location; filename: string }) => {
const { line, line_text, column } = location;
const sourceLines: SourceLine[] = [{ line, text: line_text }];
const buildURL = useCallback((line, column) => srcFileURL(filename, line, column), [filename]);
const buildURL = React.useCallback((line, column) => srcFileURL(filename, line, column), [srcFileURL, filename]);
return (
<SourceLines
sourceLines={sourceLines}
@@ -670,15 +669,15 @@ const NativeStackTrace = ({
frames: StackFrame[];
sourceLines: SourceLine[];
setSourceLines: (sourceLines: SourceLine[]) => void;
children?: any;
children?: React.ReactNode;
isClient: boolean;
}) => {
const { file = "", position } = frames[0];
const { cwd } = useContext(ErrorGroupContext);
const filename = normalizedFilename(file, cwd);
const urlBuilder = isClient ? clientURL : maybeBlobFileURL;
const ref = useRef<HTMLDivElement>(null);
const buildURL = useCallback((line, column) => urlBuilder(file, line, column), [file, urlBuilder]);
const ref = React.useRef<HTMLDivElement>(null);
const buildURL = React.useCallback((line, column) => urlBuilder(file, line, column), [file, urlBuilder]);
return (
<div ref={ref} className={`BunError-NativeStackTrace`}>
@@ -733,7 +732,7 @@ const Indent = ({ by, children }) => {
const JSException = ({ value, isClient = false }: { value: JSExceptionType; isClient: boolean }) => {
const tag = isClient ? ErrorTagType.client : ErrorTagType.server;
const [sourceLines, _setSourceLines] = useState(value?.stack?.source_lines ?? []);
const [sourceLines, _setSourceLines] = React.useState(value?.stack?.source_lines ?? []);
var message = value.message || "";
var name = value.name || "";
if (!name && !message) {
@@ -1243,7 +1242,7 @@ export function renderRuntimeError(error: Error) {
export function dismissError() {
if (reactRoot) {
render(null, reactRoot);
unmountComponentAtNode(reactRoot);
const root = document.getElementById("__bun__error-root");
if (root) root.remove();
reactRoot = null;

View File

@@ -5,9 +5,14 @@
"license": "MIT",
"private": true,
"scripts": {
"build": "bun build --production --define:process.env.NODE_ENV=\"'production'\" --minify index.tsx bun-error.css --outdir=dist --target=browser --format=esm"
"build": "esbuild --define:process.env.NODE_ENV=\"'production'\" --minify index.tsx bun-error.css --bundle --outdir=dist --platform=browser --format=esm"
},
"dependencies": {
"preact": "^10.27.2"
"esbuild": "latest",
"react": "^17.0.2",
"react-dom": "^17.0.2"
},
"devDependencies": {
"@types/react": "^17.0.39"
}
}

View File

@@ -1,11 +1,10 @@
{
"compilerOptions": {
"jsx": "react",
"lib": ["ESNext", "DOM"],
"module": "esnext",
"target": "esnext",
"moduleResolution": "node",
"allowSyntheticDefaultImports": true,
"jsx": "react-jsx",
"jsxImportSource": "preact"
"allowSyntheticDefaultImports": true
}
}

View File

@@ -1819,7 +1819,6 @@ declare module "bun" {
whitespace?: boolean;
syntax?: boolean;
identifiers?: boolean;
keepNames?: boolean;
};
/**
@@ -1899,18 +1898,6 @@ declare module "bun" {
*/
tsconfig?: string;
/**
* JSX configuration options
*/
jsx?: {
runtime?: "automatic" | "classic";
importSource?: string;
factory?: string;
fragment?: string;
sideEffects?: boolean;
development?: boolean;
};
outdir?: string;
}

View File

@@ -1556,15 +1556,6 @@ declare var URL: Bun.__internal.UseLibDomIfAvailable<
}
>;
/**
* The **`AbortController`** interface represents a controller object that allows you to abort one or more Web requests as and when desired.
*
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortController)
*/
interface AbortController {
readonly signal: AbortSignal;
abort(reason?: any): void;
}
declare var AbortController: Bun.__internal.UseLibDomIfAvailable<
"AbortController",
{

View File

@@ -26,6 +26,6 @@
/// <reference path="./bun.ns.d.ts" />
// Must disable this so it doesn't conflict with the DOM onmessage type, but still
// @ts-ignore Must disable this so it doesn't conflict with the DOM onmessage type, but still
// allows us to declare our own globals that Node's types can "see" and not conflict with
declare var onmessage: Bun.__internal.UseLibDomIfAvailable<"onmessage", never>;
declare var onmessage: never;

View File

@@ -270,14 +270,6 @@ declare module "bun" {
*/
hmset(key: RedisClient.KeyLike, fieldValues: string[]): Promise<string>;
/**
* Get the value of a hash field
* @param key The hash key
* @param field The field to get
* @returns Promise that resolves with the field value or null if the field doesn't exist
*/
hget(key: RedisClient.KeyLike, field: RedisClient.KeyLike): Promise<string | null>;
/**
* Get the values of all the given hash fields
* @param key The hash key

View File

@@ -58,7 +58,7 @@ declare module "bun" {
* // "bun"
* ```
*/
function env(newEnv?: Record<string, string | undefined> | NodeJS.Dict<string> | undefined): $;
function env(newEnv?: Record<string, string | undefined>): $;
/**
*
@@ -106,7 +106,7 @@ declare module "bun" {
* expect(stdout.toString()).toBe("LOL!");
* ```
*/
env(newEnv: Record<string, string | undefined> | NodeJS.Dict<string> | undefined): this;
env(newEnv: Record<string, string> | undefined): this;
/**
* By default, the shell will write to the current process's stdout and stderr, as well as buffering that output.

View File

@@ -41,22 +41,22 @@ declare module "bun" {
class PostgresError extends SQLError {
public readonly code: string;
public readonly errno?: string | undefined;
public readonly detail?: string | undefined;
public readonly hint?: string | undefined;
public readonly severity?: string | undefined;
public readonly position?: string | undefined;
public readonly internalPosition?: string | undefined;
public readonly internalQuery?: string | undefined;
public readonly where?: string | undefined;
public readonly schema?: string | undefined;
public readonly table?: string | undefined;
public readonly column?: string | undefined;
public readonly dataType?: string | undefined;
public readonly constraint?: string | undefined;
public readonly file?: string | undefined;
public readonly line?: string | undefined;
public readonly routine?: string | undefined;
public readonly errno: string | undefined;
public readonly detail: string | undefined;
public readonly hint: string | undefined;
public readonly severity: string | undefined;
public readonly position: string | undefined;
public readonly internalPosition: string | undefined;
public readonly internalQuery: string | undefined;
public readonly where: string | undefined;
public readonly schema: string | undefined;
public readonly table: string | undefined;
public readonly column: string | undefined;
public readonly dataType: string | undefined;
public readonly constraint: string | undefined;
public readonly file: string | undefined;
public readonly line: string | undefined;
public readonly routine: string | undefined;
constructor(
message: string,
@@ -84,8 +84,8 @@ declare module "bun" {
class MySQLError extends SQLError {
public readonly code: string;
public readonly errno?: number | undefined;
public readonly sqlState?: string | undefined;
public readonly errno: number | undefined;
public readonly sqlState: string | undefined;
constructor(message: string, options: { code: string; errno: number | undefined; sqlState: string | undefined });
}
@@ -143,13 +143,13 @@ declare module "bun" {
/**
* Database server hostname
* @deprecated Prefer {@link hostname}
* @default "localhost"
*/
host?: string | undefined;
/**
* Database server hostname
* Database server hostname (alias for host)
* @deprecated Prefer {@link host}
* @default "localhost"
*/
hostname?: string | undefined;
@@ -264,14 +264,13 @@ declare module "bun" {
* Whether to use TLS/SSL for the connection
* @default false
*/
tls?: Bun.BunFile | TLSOptions | boolean | undefined;
tls?: TLSOptions | boolean | undefined;
/**
* Whether to use TLS/SSL for the connection (alias for tls)
* @deprecated Prefer {@link tls}
* @default false
*/
ssl?: Bun.BunFile | TLSOptions | boolean | undefined;
ssl?: TLSOptions | boolean | undefined;
/**
* Unix domain socket path for connection

View File

@@ -14,6 +14,11 @@
* ```
*/
declare module "bun:test" {
/**
* -- Mocks --
*
* @category Testing
*/
export type Mock<T extends (...args: any[]) => any> = JestMock.Mock<T>;
export const mock: {
@@ -91,7 +96,6 @@ declare module "bun:test" {
export namespace jest {
function restoreAllMocks(): void;
function clearAllMocks(): void;
function resetAllMocks(): void;
function fn<T extends (...args: any[]) => any>(func?: T): Mock<T>;
function setSystemTime(now?: number | Date): void;
function setTimeout(milliseconds: number): void;
@@ -181,9 +185,6 @@ declare module "bun:test" {
* Clear all mock state (calls, results, etc.) without restoring original implementation
*/
clearAllMocks: typeof jest.clearAllMocks;
resetAllMocks: typeof jest.resetAllMocks;
useFakeTimers: typeof jest.useFakeTimers;
useRealTimers: typeof jest.useRealTimers;
};
interface FunctionLike {
@@ -210,26 +211,31 @@ declare module "bun:test" {
*
* @category Testing
*/
export interface Describe<T extends Readonly<any[]>> {
export interface Describe {
(fn: () => void): void;
(label: DescribeLabel, fn: (...args: T) => void): void;
(label: DescribeLabel, fn: () => void): void;
/**
* Skips all other tests, except this group of tests.
*
* @param label the label for the tests
* @param fn the function that defines the tests
*/
only: Describe<T>;
only(label: DescribeLabel, fn: () => void): void;
/**
* Skips this group of tests.
*
* @param label the label for the tests
* @param fn the function that defines the tests
*/
skip: Describe<T>;
skip(label: DescribeLabel, fn: () => void): void;
/**
* Marks this group of tests as to be written or to be fixed.
*
* @param label the label for the tests
* @param fn the function that defines the tests
*/
todo: Describe<T>;
/**
* Marks this group of tests to be executed concurrently.
*/
concurrent: Describe<T>;
todo(label: DescribeLabel, fn?: () => void): void;
/**
* Runs this group of tests, only if `condition` is true.
*
@@ -237,27 +243,37 @@ declare module "bun:test" {
*
* @param condition if these tests should run
*/
if(condition: boolean): Describe<T>;
if(condition: boolean): (label: DescribeLabel, fn: () => void) => void;
/**
* Skips this group of tests, if `condition` is true.
*
* @param condition if these tests should be skipped
*/
skipIf(condition: boolean): Describe<T>;
skipIf(condition: boolean): (label: DescribeLabel, fn: () => void) => void;
/**
* Marks this group of tests as to be written or to be fixed, if `condition` is true.
*
* @param condition if these tests should be skipped
*/
todoIf(condition: boolean): Describe<T>;
todoIf(condition: boolean): (label: DescribeLabel, fn: () => void) => void;
/**
* Returns a function that runs for each item in `table`.
*
* @param table Array of Arrays with the arguments that are passed into the test fn for each row.
*/
each<T extends Readonly<[any, ...any[]]>>(table: readonly T[]): Describe<[...T]>;
each<T extends any[]>(table: readonly T[]): Describe<[...T]>;
each<T>(table: T[]): Describe<[T]>;
each<T extends Readonly<[any, ...any[]]>>(
table: readonly T[],
): (label: DescribeLabel, fn: (...args: [...T]) => void | Promise<unknown>, options?: number | TestOptions) => void;
each<T extends any[]>(
table: readonly T[],
): (
label: DescribeLabel,
fn: (...args: Readonly<T>) => void | Promise<unknown>,
options?: number | TestOptions,
) => void;
each<T>(
table: T[],
): (label: DescribeLabel, fn: (...args: T[]) => void | Promise<unknown>, options?: number | TestOptions) => void;
}
/**
* Describes a group of related tests.
@@ -275,7 +291,7 @@ declare module "bun:test" {
* @param label the label for the tests
* @param fn the function that defines the tests
*/
export const describe: Describe<[]>;
export const describe: Describe;
/**
* Skips a group of related tests.
*
@@ -284,9 +300,7 @@ declare module "bun:test" {
* @param label the label for the tests
* @param fn the function that defines the tests
*/
export const xdescribe: Describe<[]>;
type HookOptions = number | { timeout?: number };
export const xdescribe: Describe;
/**
* Runs a function, once, before all the tests.
*
@@ -303,10 +317,7 @@ declare module "bun:test" {
*
* @param fn the function to run
*/
export function beforeAll(
fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
options?: HookOptions,
): void;
export function beforeAll(fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void)): void;
/**
* Runs a function before each test.
*
@@ -317,10 +328,7 @@ declare module "bun:test" {
*
* @param fn the function to run
*/
export function beforeEach(
fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
options?: HookOptions,
): void;
export function beforeEach(fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void)): void;
/**
* Runs a function, once, after all the tests.
*
@@ -337,10 +345,7 @@ declare module "bun:test" {
*
* @param fn the function to run
*/
export function afterAll(
fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
options?: HookOptions,
): void;
export function afterAll(fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void)): void;
/**
* Runs a function after each test.
*
@@ -349,10 +354,7 @@ declare module "bun:test" {
*
* @param fn the function to run
*/
export function afterEach(
fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
options?: HookOptions,
): void;
export function afterEach(fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void)): void;
/**
* Sets the default timeout for all tests in the current file. If a test specifies a timeout, it will
* override this value. The default timeout is 5000ms (5 seconds).
@@ -385,11 +387,6 @@ declare module "bun:test" {
*/
repeats?: number;
}
type IsTuple<T> = T extends readonly unknown[]
? number extends T["length"]
? false // It's an array with unknown length, not a tuple
: true // It's an array with a fixed length (a tuple)
: false; // Not an array at all
/**
* Runs a test.
*
@@ -413,10 +410,10 @@ declare module "bun:test" {
*
* @category Testing
*/
export interface Test<T extends Readonly<any[]>> {
export interface Test {
(
label: string,
fn: (...args: IsTuple<T> extends true ? [...T, (err?: unknown) => void] : T) => void | Promise<unknown>,
fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
/**
* - If a `number`, sets the timeout for the test in milliseconds.
* - If an `object`, sets the options for the test.
@@ -427,13 +424,29 @@ declare module "bun:test" {
options?: number | TestOptions,
): void;
/**
* Skips all other tests, except this test.
* Skips all other tests, except this test when run with the `--only` option.
*
* @param label the label for the test
* @param fn the test function
* @param options the test timeout or options
*/
only: Test<T>;
only(
label: string,
fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
options?: number | TestOptions,
): void;
/**
* Skips this test.
*
* @param label the label for the test
* @param fn the test function
* @param options the test timeout or options
*/
skip: Test<T>;
skip(
label: string,
fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
options?: number | TestOptions,
): void;
/**
* Marks this test as to be written or to be fixed.
*
@@ -441,8 +454,16 @@ declare module "bun:test" {
* if the test passes, the test will be marked as `fail` in the results; you will have to
* remove the `.todo` or check that your test
* is implemented correctly.
*
* @param label the label for the test
* @param fn the test function
* @param options the test timeout or options
*/
todo: Test<T>;
todo(
label: string,
fn?: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
options?: number | TestOptions,
): void;
/**
* Marks this test as failing.
*
@@ -453,12 +474,16 @@ declare module "bun:test" {
*
* `test.failing` is very similar to {@link test.todo} except that it always
* runs, regardless of the `--todo` flag.
*
* @param label the label for the test
* @param fn the test function
* @param options the test timeout or options
*/
failing: Test<T>;
/**
* Runs the test concurrently with other concurrent tests.
*/
concurrent: Test<T>;
failing(
label: string,
fn?: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
options?: number | TestOptions,
): void;
/**
* Runs this test, if `condition` is true.
*
@@ -466,39 +491,51 @@ declare module "bun:test" {
*
* @param condition if the test should run
*/
if(condition: boolean): Test<T>;
if(
condition: boolean,
): (
label: string,
fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
options?: number | TestOptions,
) => void;
/**
* Skips this test, if `condition` is true.
*
* @param condition if the test should be skipped
*/
skipIf(condition: boolean): Test<T>;
skipIf(
condition: boolean,
): (
label: string,
fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
options?: number | TestOptions,
) => void;
/**
* Marks this test as to be written or to be fixed, if `condition` is true.
*
* @param condition if the test should be marked TODO
*/
todoIf(condition: boolean): Test<T>;
/**
* Marks this test as failing, if `condition` is true.
*
* @param condition if the test should be marked as failing
*/
failingIf(condition: boolean): Test<T>;
/**
* Runs the test concurrently with other concurrent tests, if `condition` is true.
*
* @param condition if the test should run concurrently
*/
concurrentIf(condition: boolean): Test<T>;
todoIf(
condition: boolean,
): (
label: string,
fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
options?: number | TestOptions,
) => void;
/**
* Returns a function that runs for each item in `table`.
*
* @param table Array of Arrays with the arguments that are passed into the test fn for each row.
*/
each<T extends Readonly<[any, ...any[]]>>(table: readonly T[]): Test<[...T]>;
each<T extends any[]>(table: readonly T[]): Test<[...T]>;
each<T>(table: T[]): Test<[T]>;
each<T extends Readonly<[any, ...any[]]>>(
table: readonly T[],
): (label: string, fn: (...args: [...T]) => void | Promise<unknown>, options?: number | TestOptions) => void;
each<T extends any[]>(
table: readonly T[],
): (label: string, fn: (...args: Readonly<T>) => void | Promise<unknown>, options?: number | TestOptions) => void;
each<T>(
table: T[],
): (label: string, fn: (...args: T[]) => void | Promise<unknown>, options?: number | TestOptions) => void;
}
/**
* Runs a test.
@@ -516,7 +553,7 @@ declare module "bun:test" {
* @param label the label for the test
* @param fn the test function
*/
export const test: Test<[]>;
export const test: Test;
export { test as it, xtest as xit };
/**
@@ -527,7 +564,7 @@ declare module "bun:test" {
* @param label the label for the test
* @param fn the test function
*/
export const xtest: Test<[]>;
export const xtest: Test;
/**
* Asserts that a value matches some criteria.
@@ -551,9 +588,7 @@ declare module "bun:test" {
* @param customFailMessage an optional custom message to display if the test fails.
* */
(actual?: never, customFailMessage?: string): Matchers<undefined>;
<T = unknown>(actual: T, customFailMessage?: string): Matchers<T>;
<T = unknown>(actual?: T, customFailMessage?: string): Matchers<T | undefined>;
<T = unknown>(actual?: T, customFailMessage?: string): Matchers<T>;
/**
* Access to negated asymmetric matchers.
@@ -871,7 +906,6 @@ declare module "bun:test" {
* @param message the message to display if the test fails (optional)
*/
pass: (message?: string) => void;
/**
* Assertion which fails.
*
@@ -883,7 +917,6 @@ declare module "bun:test" {
* expect().not.fail("hi");
*/
fail: (message?: string) => void;
/**
* Asserts that a value equals what is expected.
*
@@ -897,15 +930,9 @@ declare module "bun:test" {
* expect([123]).toBe([123]); // fail, use toEqual()
* expect(3 + 0.14).toBe(3.14); // fail, use toBeCloseTo()
*
* // TypeScript errors:
* expect("hello").toBe(3.14); // typescript error + fail
* expect("hello").toBe<number>(3.14); // no typescript error, but still fails
*
* @param expected the expected value
*/
toBe(expected: T): void;
toBe<X = T>(expected: NoInfer<X>): void;
/**
* Asserts that a number is odd.
*
@@ -915,7 +942,6 @@ declare module "bun:test" {
* expect(2).not.toBeOdd();
*/
toBeOdd(): void;
/**
* Asserts that a number is even.
*
@@ -925,7 +951,6 @@ declare module "bun:test" {
* expect(1).not.toBeEven();
*/
toBeEven(): void;
/**
* Asserts that value is close to the expected by floating point precision.
*
@@ -944,7 +969,6 @@ declare module "bun:test" {
* @param numDigits the number of digits to check after the decimal point. Default is `2`
*/
toBeCloseTo(expected: number, numDigits?: number): void;
/**
* Asserts that a value is deeply equal to what is expected.
*
@@ -957,8 +981,6 @@ declare module "bun:test" {
* @param expected the expected value
*/
toEqual(expected: T): void;
toEqual<X = T>(expected: NoInfer<X>): void;
/**
* Asserts that a value is deeply and strictly equal to
* what is expected.
@@ -983,8 +1005,6 @@ declare module "bun:test" {
* @param expected the expected value
*/
toStrictEqual(expected: T): void;
toStrictEqual<X = T>(expected: NoInfer<X>): void;
/**
* Asserts that the value is deep equal to an element in the expected array.
*
@@ -997,9 +1017,7 @@ declare module "bun:test" {
*
* @param expected the expected value
*/
toBeOneOf(expected: Iterable<T>): void;
toBeOneOf<X = T>(expected: NoInfer<Iterable<X>>): void;
toBeOneOf(expected: Array<unknown> | Iterable<unknown>): void;
/**
* Asserts that a value contains what is expected.
*
@@ -1013,9 +1031,7 @@ declare module "bun:test" {
*
* @param expected the expected value
*/
toContain(expected: T extends Iterable<infer U> ? U : T): void;
toContain<X = T>(expected: NoInfer<X extends Iterable<infer U> ? U : X>): void;
toContain(expected: unknown): void;
/**
* Asserts that an `object` contains a key.
*
@@ -1029,9 +1045,7 @@ declare module "bun:test" {
*
* @param expected the expected value
*/
toContainKey(expected: keyof T): void;
toContainKey<X = T>(expected: NoInfer<keyof X>): void;
toContainKey(expected: unknown): void;
/**
* Asserts that an `object` contains all the provided keys.
*
@@ -1046,9 +1060,7 @@ declare module "bun:test" {
*
* @param expected the expected value
*/
toContainAllKeys(expected: Array<keyof T>): void;
toContainAllKeys<X = T>(expected: NoInfer<Array<keyof X>>): void;
toContainAllKeys(expected: unknown): void;
/**
* Asserts that an `object` contains at least one of the provided keys.
* Asserts that an `object` contains all the provided keys.
@@ -1063,16 +1075,12 @@ declare module "bun:test" {
*
* @param expected the expected value
*/
toContainAnyKeys(expected: Array<keyof T>): void;
toContainAnyKeys<X = T>(expected: NoInfer<Array<keyof X>>): void;
toContainAnyKeys(expected: unknown): void;
/**
* Asserts that an `object` contain the provided value.
*
* This method is deep and will look through child properties to find the
* expected value.
*
* The input value must be an object.
* The value must be an object
*
* @example
* const shallow = { hello: "world" };
@@ -1096,16 +1104,11 @@ declare module "bun:test" {
*
* @param expected the expected value
*/
// Contributor note: In theory we could type this better but it would be a
// slow union to compute...
toContainValue(expected: unknown): void;
/**
* Asserts that an `object` contain the provided value.
*
* This is the same as {@link toContainValue}, but accepts an array of
* values instead.
*
* The value must be an object
*
* @example
@@ -1115,7 +1118,7 @@ declare module "bun:test" {
* expect(o).not.toContainValues(['qux', 'foo']);
* @param expected the expected value
*/
toContainValues(expected: Array<unknown>): void;
toContainValues(expected: unknown): void;
/**
* Asserts that an `object` contain all the provided values.
@@ -1129,7 +1132,7 @@ declare module "bun:test" {
* expect(o).not.toContainAllValues(['bar', 'foo']);
* @param expected the expected value
*/
toContainAllValues(expected: Array<unknown>): void;
toContainAllValues(expected: unknown): void;
/**
* Asserts that an `object` contain any provided value.
@@ -1144,7 +1147,7 @@ declare module "bun:test" {
* expect(o).not.toContainAnyValues(['qux']);
* @param expected the expected value
*/
toContainAnyValues(expected: Array<unknown>): void;
toContainAnyValues(expected: unknown): void;
/**
* Asserts that an `object` contains all the provided keys.
@@ -1156,9 +1159,7 @@ declare module "bun:test" {
*
* @param expected the expected value
*/
toContainKeys(expected: Array<keyof T>): void;
toContainKeys<X = T>(expected: NoInfer<Array<keyof X>>): void;
toContainKeys(expected: unknown): void;
/**
* Asserts that a value contains and equals what is expected.
*
@@ -1171,9 +1172,7 @@ declare module "bun:test" {
*
* @param expected the expected value
*/
toContainEqual(expected: T extends Iterable<infer U> ? U : T): void;
toContainEqual<X = T>(expected: NoInfer<X extends Iterable<infer U> ? U : X>): void;
toContainEqual(expected: unknown): void;
/**
* Asserts that a value has a `.length` property
* that is equal to the expected length.
@@ -1185,7 +1184,6 @@ declare module "bun:test" {
* @param length the expected length
*/
toHaveLength(length: number): void;
/**
* Asserts that a value has a property with the
* expected name, and value if provided.
@@ -1200,7 +1198,6 @@ declare module "bun:test" {
* @param value the expected property value, if provided
*/
toHaveProperty(keyPath: string | number | Array<string | number>, value?: unknown): void;
/**
* Asserts that a value is "truthy".
*
@@ -1213,7 +1210,6 @@ declare module "bun:test" {
* expect({}).toBeTruthy();
*/
toBeTruthy(): void;
/**
* Asserts that a value is "falsy".
*
@@ -1226,7 +1222,6 @@ declare module "bun:test" {
* expect({}).toBeTruthy();
*/
toBeFalsy(): void;
/**
* Asserts that a value is defined. (e.g. is not `undefined`)
*
@@ -1235,7 +1230,6 @@ declare module "bun:test" {
* expect(undefined).toBeDefined(); // fail
*/
toBeDefined(): void;
/**
* Asserts that the expected value is an instance of value
*
@@ -1244,7 +1238,6 @@ declare module "bun:test" {
* expect(null).toBeInstanceOf(Array); // fail
*/
toBeInstanceOf(value: unknown): void;
/**
* Asserts that a value is `undefined`.
*
@@ -1253,7 +1246,6 @@ declare module "bun:test" {
* expect(null).toBeUndefined(); // fail
*/
toBeUndefined(): void;
/**
* Asserts that a value is `null`.
*
@@ -1262,7 +1254,6 @@ declare module "bun:test" {
* expect(undefined).toBeNull(); // fail
*/
toBeNull(): void;
/**
* Asserts that a value is `NaN`.
*
@@ -1274,7 +1265,6 @@ declare module "bun:test" {
* expect("notanumber").toBeNaN(); // fail
*/
toBeNaN(): void;
/**
* Asserts that a value is a `number` and is greater than the expected value.
*
@@ -1286,7 +1276,6 @@ declare module "bun:test" {
* @param expected the expected number
*/
toBeGreaterThan(expected: number | bigint): void;
/**
* Asserts that a value is a `number` and is greater than or equal to the expected value.
*
@@ -1298,7 +1287,6 @@ declare module "bun:test" {
* @param expected the expected number
*/
toBeGreaterThanOrEqual(expected: number | bigint): void;
/**
* Asserts that a value is a `number` and is less than the expected value.
*
@@ -1310,7 +1298,6 @@ declare module "bun:test" {
* @param expected the expected number
*/
toBeLessThan(expected: number | bigint): void;
/**
* Asserts that a value is a `number` and is less than or equal to the expected value.
*
@@ -1322,7 +1309,6 @@ declare module "bun:test" {
* @param expected the expected number
*/
toBeLessThanOrEqual(expected: number | bigint): void;
/**
* Asserts that a function throws an error.
*
@@ -1343,7 +1329,6 @@ declare module "bun:test" {
* @param expected the expected error, error message, or error pattern
*/
toThrow(expected?: unknown): void;
/**
* Asserts that a function throws an error.
*
@@ -1365,7 +1350,6 @@ declare module "bun:test" {
* @alias toThrow
*/
toThrowError(expected?: unknown): void;
/**
* Asserts that a value matches a regular expression or includes a substring.
*
@@ -1376,7 +1360,6 @@ declare module "bun:test" {
* @param expected the expected substring or pattern.
*/
toMatch(expected: string | RegExp): void;
/**
* Asserts that a value matches the most recent snapshot.
*
@@ -1385,7 +1368,6 @@ declare module "bun:test" {
* @param hint Hint used to identify the snapshot in the snapshot file.
*/
toMatchSnapshot(hint?: string): void;
/**
* Asserts that a value matches the most recent snapshot.
*
@@ -1398,7 +1380,6 @@ declare module "bun:test" {
* @param hint Hint used to identify the snapshot in the snapshot file.
*/
toMatchSnapshot(propertyMatchers?: object, hint?: string): void;
/**
* Asserts that a value matches the most recent inline snapshot.
*
@@ -1409,7 +1390,6 @@ declare module "bun:test" {
* @param value The latest automatically-updated snapshot value.
*/
toMatchInlineSnapshot(value?: string): void;
/**
* Asserts that a value matches the most recent inline snapshot.
*
@@ -1425,7 +1405,6 @@ declare module "bun:test" {
* @param value The latest automatically-updated snapshot value.
*/
toMatchInlineSnapshot(propertyMatchers?: object, value?: string): void;
/**
* Asserts that a function throws an error matching the most recent snapshot.
*
@@ -1439,7 +1418,6 @@ declare module "bun:test" {
* @param value The latest automatically-updated snapshot value.
*/
toThrowErrorMatchingSnapshot(hint?: string): void;
/**
* Asserts that a function throws an error matching the most recent snapshot.
*
@@ -1453,7 +1431,6 @@ declare module "bun:test" {
* @param value The latest automatically-updated snapshot value.
*/
toThrowErrorMatchingInlineSnapshot(value?: string): void;
/**
* Asserts that an object matches a subset of properties.
*
@@ -1464,7 +1441,6 @@ declare module "bun:test" {
* @param subset Subset of properties to match with.
*/
toMatchObject(subset: object): void;
/**
* Asserts that a value is empty.
*
@@ -1475,7 +1451,6 @@ declare module "bun:test" {
* expect(new Set()).toBeEmpty();
*/
toBeEmpty(): void;
/**
* Asserts that a value is an empty `object`.
*
@@ -1484,7 +1459,6 @@ declare module "bun:test" {
* expect({ a: 'hello' }).not.toBeEmptyObject();
*/
toBeEmptyObject(): void;
/**
* Asserts that a value is `null` or `undefined`.
*
@@ -1493,7 +1467,6 @@ declare module "bun:test" {
* expect(undefined).toBeNil();
*/
toBeNil(): void;
/**
* Asserts that a value is a `array`.
*
@@ -1504,7 +1477,6 @@ declare module "bun:test" {
* expect({}).not.toBeArray();
*/
toBeArray(): void;
/**
* Asserts that a value is a `array` of a certain length.
*
@@ -1516,7 +1488,6 @@ declare module "bun:test" {
* expect({}).not.toBeArrayOfSize(0);
*/
toBeArrayOfSize(size: number): void;
/**
* Asserts that a value is a `boolean`.
*
@@ -1527,7 +1498,6 @@ declare module "bun:test" {
* expect(0).not.toBeBoolean();
*/
toBeBoolean(): void;
/**
* Asserts that a value is `true`.
*
@@ -1537,7 +1507,6 @@ declare module "bun:test" {
* expect(1).not.toBeTrue();
*/
toBeTrue(): void;
/**
* Asserts that a value matches a specific type.
*
@@ -1548,7 +1517,6 @@ declare module "bun:test" {
* expect([]).not.toBeTypeOf("boolean");
*/
toBeTypeOf(type: "bigint" | "boolean" | "function" | "number" | "object" | "string" | "symbol" | "undefined"): void;
/**
* Asserts that a value is `false`.
*
@@ -1558,7 +1526,6 @@ declare module "bun:test" {
* expect(0).not.toBeFalse();
*/
toBeFalse(): void;
/**
* Asserts that a value is a `number`.
*
@@ -1569,7 +1536,6 @@ declare module "bun:test" {
* expect(BigInt(1)).not.toBeNumber();
*/
toBeNumber(): void;
/**
* Asserts that a value is a `number`, and is an integer.
*
@@ -1579,7 +1545,6 @@ declare module "bun:test" {
* expect(NaN).not.toBeInteger();
*/
toBeInteger(): void;
/**
* Asserts that a value is an `object`.
*
@@ -1589,7 +1554,6 @@ declare module "bun:test" {
* expect(NaN).not.toBeObject();
*/
toBeObject(): void;
/**
* Asserts that a value is a `number`, and is not `NaN` or `Infinity`.
*
@@ -1600,7 +1564,6 @@ declare module "bun:test" {
* expect(Infinity).not.toBeFinite();
*/
toBeFinite(): void;
/**
* Asserts that a value is a positive `number`.
*
@@ -1610,7 +1573,6 @@ declare module "bun:test" {
* expect(NaN).not.toBePositive();
*/
toBePositive(): void;
/**
* Asserts that a value is a negative `number`.
*
@@ -1620,7 +1582,6 @@ declare module "bun:test" {
* expect(NaN).not.toBeNegative();
*/
toBeNegative(): void;
/**
* Asserts that a value is a number between a start and end value.
*
@@ -1628,7 +1589,6 @@ declare module "bun:test" {
* @param end the end number (exclusive)
*/
toBeWithin(start: number, end: number): void;
/**
* Asserts that a value is equal to the expected string, ignoring any whitespace.
*
@@ -1639,7 +1599,6 @@ declare module "bun:test" {
* @param expected the expected string
*/
toEqualIgnoringWhitespace(expected: string): void;
/**
* Asserts that a value is a `symbol`.
*
@@ -1648,7 +1607,6 @@ declare module "bun:test" {
* expect("foo").not.toBeSymbol();
*/
toBeSymbol(): void;
/**
* Asserts that a value is a `function`.
*
@@ -1656,7 +1614,6 @@ declare module "bun:test" {
* expect(() => {}).toBeFunction();
*/
toBeFunction(): void;
/**
* Asserts that a value is a `Date` object.
*
@@ -1668,7 +1625,6 @@ declare module "bun:test" {
* expect("2020-03-01").not.toBeDate();
*/
toBeDate(): void;
/**
* Asserts that a value is a valid `Date` object.
*
@@ -1678,7 +1634,6 @@ declare module "bun:test" {
* expect("2020-03-01").not.toBeValidDate();
*/
toBeValidDate(): void;
/**
* Asserts that a value is a `string`.
*
@@ -1688,7 +1643,6 @@ declare module "bun:test" {
* expect(123).not.toBeString();
*/
toBeString(): void;
/**
* Asserts that a value includes a `string`.
*
@@ -1697,14 +1651,12 @@ declare module "bun:test" {
* @param expected the expected substring
*/
toInclude(expected: string): void;
/**
* Asserts that a value includes a `string` {times} times.
* @param expected the expected substring
* @param times the number of times the substring should occur
*/
toIncludeRepeated(expected: string, times: number): void;
/**
* Checks whether a value satisfies a custom condition.
* @param {Function} predicate - The custom condition to be satisfied. It should be a function that takes a value as an argument (in this case the value from expect) and returns a boolean.
@@ -1716,21 +1668,18 @@ declare module "bun:test" {
* @link https://jest-extended.jestcommunity.dev/docs/matchers/toSatisfy
*/
toSatisfy(predicate: (value: T) => boolean): void;
/**
* Asserts that a value starts with a `string`.
*
* @param expected the string to start with
*/
toStartWith(expected: string): void;
/**
* Asserts that a value ends with a `string`.
*
* @param expected the string to end with
*/
toEndWith(expected: string): void;
/**
* Ensures that a mock function has returned successfully at least once.
*
@@ -1771,51 +1720,42 @@ declare module "bun:test" {
* Ensures that a mock function is called.
*/
toHaveBeenCalled(): void;
/**
* Ensures that a mock function is called an exact number of times.
* @alias toHaveBeenCalled
*/
toBeCalled(): void;
/**
* Ensures that a mock function is called an exact number of times.
*/
toHaveBeenCalledTimes(expected: number): void;
/**
* Ensure that a mock function is called with specific arguments.
* @alias toHaveBeenCalledTimes
*/
toBeCalledTimes(expected: number): void;
/**
* Ensure that a mock function is called with specific arguments.
*/
toHaveBeenCalledWith(...expected: unknown[]): void;
/**
* Ensure that a mock function is called with specific arguments.
* @alias toHaveBeenCalledWith
*/
toBeCalledWith(...expected: unknown[]): void;
/**
* Ensure that a mock function is called with specific arguments for the last call.
*/
toHaveBeenLastCalledWith(...expected: unknown[]): void;
/**
* Ensure that a mock function is called with specific arguments for the nth call.
* @alias toHaveBeenCalledWith
*/
lastCalledWith(...expected: unknown[]): void;
/**
* Ensure that a mock function is called with specific arguments for the nth call.
*/
toHaveBeenNthCalledWith(n: number, ...expected: unknown[]): void;
/**
* Ensure that a mock function is called with specific arguments for the nth call.
* @alias toHaveBeenCalledWith

View File

@@ -17,7 +17,6 @@
/* Todo: this file should lie in networking/bsd.c */
// NOLINTNEXTLINE(bugprone-reserved-identifier)
#define __APPLE_USE_RFC_3542
#include "libusockets.h"
@@ -26,23 +25,6 @@
#include <stdio.h>
#include <stdlib.h>
#if BUN_DEBUG
// Debug network traffic logging
static FILE *debug_recv_file = NULL;
static FILE *debug_send_file = NULL;
static int debug_logging_initialized = 0;
static void init_debug_logging() {
if (debug_logging_initialized) return;
debug_logging_initialized = 1;
const char *recv_path = getenv("BUN_RECV");
const char *send_path = getenv("BUN_SEND");
if (recv_path) if (!debug_recv_file) debug_recv_file = fopen(recv_path, "w");
if (send_path) if (!debug_send_file) debug_send_file = fopen(send_path, "w");
}
#endif
#ifndef _WIN32
// Necessary for the stdint include
#ifndef _GNU_SOURCE
@@ -593,7 +575,6 @@ LIBUS_SOCKET_DESCRIPTOR bsd_create_socket(int domain, int type, int protocol, in
*err = 0;
}
// NOLINTNEXTLINE(cppcoreguidelines-init-variables)
LIBUS_SOCKET_DESCRIPTOR created_fd;
#if defined(SOCK_CLOEXEC) && defined(SOCK_NONBLOCK)
const int flags = SOCK_CLOEXEC | SOCK_NONBLOCK;
@@ -697,7 +678,6 @@ int bsd_addr_get_port(struct bsd_addr_t *addr) {
// called by dispatch_ready_poll
LIBUS_SOCKET_DESCRIPTOR bsd_accept_socket(LIBUS_SOCKET_DESCRIPTOR fd, struct bsd_addr_t *addr) {
// NOLINTNEXTLINE(cppcoreguidelines-init-variables)
LIBUS_SOCKET_DESCRIPTOR accepted_fd;
while (1) {
@@ -741,17 +721,6 @@ ssize_t bsd_recv(LIBUS_SOCKET_DESCRIPTOR fd, void *buf, int length, int flags) {
continue;
}
#if BUN_DEBUG
// Debug logging for received data
if (ret > 0) {
init_debug_logging();
if (debug_recv_file) {
fwrite(buf, 1, ret, debug_recv_file);
fflush(debug_recv_file);
}
}
#endif
return ret;
}
}
@@ -819,17 +788,6 @@ ssize_t bsd_send(LIBUS_SOCKET_DESCRIPTOR fd, const char *buf, int length) {
continue;
}
#if BUN_DEBUG
// Debug logging for sent data
if (rc > 0) {
init_debug_logging();
if (debug_send_file) {
fwrite(buf, 1, rc, debug_send_file);
fflush(debug_send_file);
}
}
#endif
return rc;
}
}
@@ -857,7 +815,7 @@ int bsd_would_block() {
}
static int us_internal_bind_and_listen(LIBUS_SOCKET_DESCRIPTOR listenFd, struct sockaddr *listenAddr, socklen_t listenAddrLength, int backlog, int* error) {
int result = 0;
int result;
do
result = bind(listenFd, listenAddr, listenAddrLength);
while (IS_EINTR(result));
@@ -975,7 +933,7 @@ inline __attribute__((always_inline)) LIBUS_SOCKET_DESCRIPTOR bsd_bind_listen_fd
// return LIBUS_SOCKET_ERROR or the fd that represents listen socket
// listen both on ipv6 and ipv4
LIBUS_SOCKET_DESCRIPTOR bsd_create_listen_socket(const char *host, int port, int options, int* error) {
struct addrinfo hints, *result = NULL;
struct addrinfo hints, *result;
memset(&hints, 0, sizeof(struct addrinfo));
hints.ai_flags = AI_PASSIVE;
@@ -990,7 +948,7 @@ LIBUS_SOCKET_DESCRIPTOR bsd_create_listen_socket(const char *host, int port, int
}
LIBUS_SOCKET_DESCRIPTOR listenFd = LIBUS_SOCKET_ERROR;
struct addrinfo* listenAddr = NULL;
struct addrinfo *listenAddr;
for (struct addrinfo *a = result; a != NULL; a = a->ai_next) {
if (a->ai_family == AF_INET6) {
listenFd = bsd_create_socket(a->ai_family, a->ai_socktype, a->ai_protocol, NULL);
@@ -1176,7 +1134,7 @@ LIBUS_SOCKET_DESCRIPTOR bsd_create_udp_socket(const char *host, int port, int op
*err = 0;
}
struct addrinfo hints, *result = NULL;
struct addrinfo hints, *result;
memset(&hints, 0, sizeof(struct addrinfo));
hints.ai_flags = AI_PASSIVE;
@@ -1286,7 +1244,7 @@ LIBUS_SOCKET_DESCRIPTOR bsd_create_udp_socket(const char *host, int port, int op
}
int bsd_connect_udp_socket(LIBUS_SOCKET_DESCRIPTOR fd, const char *host, int port) {
struct addrinfo hints, *result = NULL;
struct addrinfo hints, *result;
memset(&hints, 0, sizeof(struct addrinfo));
hints.ai_family = AF_UNSPEC;
@@ -1396,8 +1354,8 @@ static int bsd_do_connect_raw(LIBUS_SOCKET_DESCRIPTOR fd, struct sockaddr *addr,
#else
int r = 0;
do {
int r;
do {
errno = 0;
r = connect(fd, (struct sockaddr *)addr, namelen);
} while (IS_EINTR(r));

View File

@@ -517,7 +517,7 @@ void *us_socket_context_connect(int ssl, struct us_socket_context_t *context, co
return us_socket_context_connect_resolved_dns(context, &addr, options, socket_ext_size);
}
struct addrinfo_request* ai_req = NULL;
struct addrinfo_request* ai_req;
if (Bun__addrinfo_get(loop, host, (uint16_t)port, &ai_req) == 0) {
// fast path for cached results
struct addrinfo_result *result = Bun__addrinfo_getRequestResult(ai_req);

View File

@@ -113,7 +113,7 @@ int passphrase_cb(char *buf, int size, int rwflag, void *u) {
const char *passphrase = (const char *)u;
size_t passphrase_length = strlen(passphrase);
memcpy(buf, passphrase, passphrase_length);
buf[passphrase_length] = 0;
// put null at end? no?
return (int)passphrase_length;
}
@@ -807,7 +807,7 @@ create_ssl_context_from_options(struct us_socket_context_options_t options) {
}
if (options.ca_file_name) {
STACK_OF(X509_NAME) * ca_list = NULL;
STACK_OF(X509_NAME) * ca_list;
ca_list = SSL_load_client_CA_file(options.ca_file_name);
if (ca_list == NULL) {
free_ssl_context(ssl_context);
@@ -825,7 +825,7 @@ create_ssl_context_from_options(struct us_socket_context_options_t options) {
if (options.dh_params_file_name) {
/* Set up ephemeral DH parameters. */
DH *dh_2048 = NULL;
FILE *paramfile = NULL;
FILE *paramfile;
paramfile = fopen(options.dh_params_file_name, "r");
if (paramfile) {
@@ -857,7 +857,7 @@ create_ssl_context_from_options(struct us_socket_context_options_t options) {
if (options.ssl_ciphers) {
if (!SSL_CTX_set_cipher_list(ssl_context, options.ssl_ciphers)) {
unsigned long ssl_err = ERR_get_error();
unsigned long ssl_err = ERR_get_error();
if (!(strlen(options.ssl_ciphers) == 0 && ERR_GET_REASON(ssl_err) == SSL_R_NO_CIPHER_MATCH)) {
// TLS1.2 ciphers were deliberately cleared, so don't consider
// SSL_R_NO_CIPHER_MATCH to be an error (this is how _set_cipher_suites()
@@ -881,8 +881,8 @@ create_ssl_context_from_options(struct us_socket_context_options_t options) {
int us_ssl_ctx_use_privatekey_content(SSL_CTX *ctx, const char *content,
int type) {
int reason_code = 0, ret = 0;
BIO *in = NULL;
int reason_code, ret = 0;
BIO *in;
EVP_PKEY *pkey = NULL;
in = BIO_new_mem_buf(content, strlen(content));
if (in == NULL) {
@@ -947,7 +947,7 @@ end:
}
int us_ssl_ctx_use_certificate_chain(SSL_CTX *ctx, const char *content) {
BIO *in = NULL;
BIO *in;
int ret = 0;
X509 *x = NULL;
@@ -975,9 +975,9 @@ int us_ssl_ctx_use_certificate_chain(SSL_CTX *ctx, const char *content) {
if (ret) {
// If we could set up our certificate, now proceed to the CA
// certificates.
X509 *ca = NULL;
int r = 0;
uint32_t err = 0;
X509 *ca;
int r;
uint32_t err;
SSL_CTX_clear_chain_certs(ctx);
@@ -1202,7 +1202,7 @@ SSL_CTX *create_ssl_context_from_bun_options(
if (options.ca_file_name) {
SSL_CTX_set_cert_store(ssl_context, us_get_default_ca_store());
STACK_OF(X509_NAME) * ca_list = NULL;
STACK_OF(X509_NAME) * ca_list;
ca_list = SSL_load_client_CA_file(options.ca_file_name);
if (ca_list == NULL) {
*err = CREATE_BUN_SOCKET_ERROR_LOAD_CA_FILE;
@@ -1268,7 +1268,7 @@ SSL_CTX *create_ssl_context_from_bun_options(
if (options.dh_params_file_name) {
/* Set up ephemeral DH parameters. */
DH *dh_2048 = NULL;
FILE *paramfile = NULL;
FILE *paramfile;
paramfile = fopen(options.dh_params_file_name, "r");
if (paramfile) {
@@ -1300,7 +1300,7 @@ SSL_CTX *create_ssl_context_from_bun_options(
if (options.ssl_ciphers) {
if (!SSL_CTX_set_cipher_list(ssl_context, options.ssl_ciphers)) {
unsigned long ssl_err = ERR_get_error();
unsigned long ssl_err = ERR_get_error();
if (!(strlen(options.ssl_ciphers) == 0 && ERR_GET_REASON(ssl_err) == SSL_R_NO_CIPHER_MATCH)) {
char error_msg[256];
ERR_error_string_n(ERR_peek_last_error(), error_msg, sizeof(error_msg));
@@ -1308,7 +1308,7 @@ SSL_CTX *create_ssl_context_from_bun_options(
// SSL_R_NO_CIPHER_MATCH to be an error (this is how _set_cipher_suites()
// works). If the user actually sets a value (like "no-such-cipher"), then
// that's actually an error.
*err = CREATE_BUN_SOCKET_ERROR_INVALID_CIPHERS;
*err = CREATE_BUN_SOCKET_ERROR_INVALID_CIPHERS;
free_ssl_context(ssl_context);
return NULL;
}

View File

@@ -44,7 +44,7 @@ void us_loop_run_bun_tick(struct us_loop_t *loop, const struct timespec* timeout
#define GET_READY_POLL(loop, index) (struct us_poll_t *) loop->ready_polls[index].data.ptr
#define SET_READY_POLL(loop, index, poll) loop->ready_polls[index].data.ptr = (void*)poll
#else
#define GET_READY_POLL(loop, index) (struct us_poll_t *) (loop)->ready_polls[index].udata
#define GET_READY_POLL(loop, index) (struct us_poll_t *) loop->ready_polls[index].udata
#define SET_READY_POLL(loop, index, poll) loop->ready_polls[index].udata = (uint64_t)poll
#endif
@@ -264,7 +264,7 @@ void us_loop_run_bun_tick(struct us_loop_t *loop, const struct timespec* timeout
us_internal_loop_pre(loop);
if (loop->data.jsc_vm)
if (loop->data.jsc_vm)
Bun__JSC_onBeforeWait(loop->data.jsc_vm);
/* Fetch ready polls */
@@ -336,7 +336,7 @@ void us_internal_loop_update_pending_ready_polls(struct us_loop_t *loop, struct
// if new events does not contain the ready events of this poll then remove (no we filter that out later on)
SET_READY_POLL(loop, i, new_poll);
num_entries_possibly_remaining--;
}
}
@@ -366,7 +366,7 @@ int kqueue_change(int kqfd, int fd, int old_events, int new_events, void *user_d
/* Do they differ in writable? */
EV_SET64(&change_list[change_length++], fd, EVFILT_WRITE, (new_events & LIBUS_SOCKET_WRITABLE) ? EV_ADD : EV_DELETE, 0, 0, (uint64_t)(void*)user_data, 0, 0);
}
int ret = 0;
int ret;
do {
ret = kevent64(kqfd, change_list, change_length, change_list, change_length, KEVENT_FLAG_ERROR_EVENTS, NULL);
} while (IS_EINTR(ret));
@@ -379,7 +379,7 @@ int kqueue_change(int kqfd, int fd, int old_events, int new_events, void *user_d
struct us_poll_t *us_poll_resize(struct us_poll_t *p, struct us_loop_t *loop, unsigned int ext_size) {
int events = us_poll_events(p);
struct us_poll_t *new_p = us_realloc(p, sizeof(struct us_poll_t) + ext_size);
if (p != new_p) {
@@ -391,7 +391,6 @@ struct us_poll_t *us_poll_resize(struct us_poll_t *p, struct us_loop_t *loop, un
/* Forcefully update poll by resetting them with new_p as user data */
kqueue_change(loop->fd, new_p->state.fd, 0, LIBUS_SOCKET_WRITABLE | LIBUS_SOCKET_READABLE, new_p);
#endif /* This is needed for epoll also (us_change_poll doesn't update the old poll) */
// NOLINTNEXTLINE(clang-analyzer-unix.Malloc)
us_internal_loop_update_pending_ready_polls(loop, p, new_p, events, events);
}
@@ -563,8 +562,8 @@ void us_timer_close(struct us_timer_t *timer, int fallthrough) {
struct us_internal_callback_t *internal_cb = (struct us_internal_callback_t *) timer;
struct kevent64_s event;
EV_SET64(&event, (uint64_t) internal_cb, EVFILT_TIMER, EV_DELETE, 0, 0, (uint64_t)internal_cb, 0, 0);
int ret = 0;
EV_SET64(&event, (uint64_t) (void*) internal_cb, EVFILT_TIMER, EV_DELETE, 0, 0, (uint64_t)internal_cb, 0, 0);
int ret;
do {
ret = kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
} while (IS_EINTR(ret));
@@ -585,10 +584,10 @@ void us_timer_set(struct us_timer_t *t, void (*cb)(struct us_timer_t *t), int ms
/* Bug: repeat_ms must be the same as ms, or 0 */
struct kevent64_s event;
uint64_t ptr = (uint64_t)internal_cb;
uint64_t ptr = (uint64_t)(void*)internal_cb;
EV_SET64(&event, ptr, EVFILT_TIMER, EV_ADD | (repeat_ms ? 0 : EV_ONESHOT), 0, ms, (uint64_t)internal_cb, 0, 0);
int ret = 0;
int ret;
do {
ret = kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
} while (IS_EINTR(ret));
@@ -684,10 +683,10 @@ void us_internal_async_close(struct us_internal_async *a) {
struct us_internal_callback_t *internal_cb = (struct us_internal_callback_t *) a;
struct kevent64_s event;
uint64_t ptr = (uint64_t)internal_cb;
EV_SET64(&event, ptr, EVFILT_MACHPORT, EV_DELETE, 0, 0, (uint64_t)internal_cb, 0,0);
uint64_t ptr = (uint64_t)(void*)internal_cb;
EV_SET64(&event, ptr, EVFILT_MACHPORT, EV_DELETE, 0, 0, (uint64_t)(void*)internal_cb, 0,0);
int ret = 0;
int ret;
do {
ret = kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
} while (IS_EINTR(ret));
@@ -714,11 +713,11 @@ void us_internal_async_set(struct us_internal_async *a, void (*cb)(struct us_int
event.filter = EVFILT_MACHPORT;
event.flags = EV_ADD | EV_ENABLE;
event.fflags = MACH_RCV_MSG | MACH_RCV_OVERWRITE;
event.ext[0] = (uint64_t)internal_cb->machport_buf;
event.ext[0] = (uint64_t)(void*)internal_cb->machport_buf;
event.ext[1] = MACHPORT_BUF_LEN;
event.udata = (uint64_t)internal_cb;
event.udata = (uint64_t)(void*)internal_cb;
int ret = 0;
int ret;
do {
ret = kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
} while (IS_EINTR(ret));
@@ -750,14 +749,20 @@ void us_internal_async_wakeup(struct us_internal_async *a) {
);
switch (kr) {
case KERN_SUCCESS:
case KERN_SUCCESS: {
break;
}
// This means that the send would've blocked because the
// queue is full. We assume success because the port is full.
case MACH_SEND_TIMED_OUT:
case MACH_SEND_TIMED_OUT: {
break;
}
// No space means it will wake up.
case MACH_SEND_NO_BUFFER:
case MACH_SEND_NO_BUFFER: {
break;
}
default: {
break;

View File

@@ -22,16 +22,7 @@
#ifndef WIN32
#include <sys/ioctl.h>
#endif
#if __has_include("wtf/Platform.h")
#include "wtf/Platform.h"
#elif !defined(ASSERT_ENABLED)
#if defined(BUN_DEBUG) || defined(__has_feature) && __has_feature(address_sanitizer) || defined(__SANITIZE_ADDRESS__)
#define ASSERT_ENABLED 1
#else
#define ASSERT_ENABLED 0
#endif
#endif
#if ASSERT_ENABLED
extern const size_t Bun__lock__size;
@@ -181,7 +172,7 @@ static const int MAX_LOW_PRIO_SOCKETS_PER_LOOP_ITERATION = 5;
void us_internal_handle_low_priority_sockets(struct us_loop_t *loop) {
struct us_internal_loop_data_t *loop_data = &loop->data;
struct us_socket_t *s = NULL;
struct us_socket_t *s;
loop_data->low_prio_budget = MAX_LOW_PRIO_SOCKETS_PER_LOOP_ITERATION;
@@ -425,7 +416,7 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int eof, in
const int recv_flags = MSG_DONTWAIT | MSG_NOSIGNAL;
#endif
int length = 0;
int length;
#if !defined(_WIN32)
if(s->flags.is_ipc) {
struct msghdr msg = {0};

View File

@@ -30,7 +30,7 @@ describe("BunTestController", () => {
const pattern = internal.buildTestNamePattern(mockTests);
expect(pattern).toContain(".*?");
expect(pattern).toBe("(^ ?test with .*?$)|(^ ?test with \\.*?$)");
expect(pattern).toBe("(^ test with .*?$)|(^ test with \\.*?$)");
});
test("should escape % formatters", () => {
@@ -41,7 +41,7 @@ describe("BunTestController", () => {
const pattern = internal.buildTestNamePattern(mockTests);
expect(pattern).toBe("(^ ?test with .*?$)|(^ ?test with .*?$)");
expect(pattern).toBe("(^ test with .*?$)|(^ test with .*?$)");
});
test("should join multiple patterns with |", () => {
@@ -53,7 +53,7 @@ describe("BunTestController", () => {
const pattern = internal.buildTestNamePattern(mockTests);
expect(pattern).toBe("(^ ?test 1$)|(^ ?test 2$)|(^ ?test 3$)");
expect(pattern).toBe("(^ test 1$)|(^ test 2$)|(^ test 3$)");
});
test("should handle describe blocks differently", () => {
@@ -61,7 +61,7 @@ describe("BunTestController", () => {
const pattern = internal.buildTestNamePattern(mockTests);
expect(pattern).toBe("(^ ?describe block )");
expect(pattern).toBe("(^ describe block )");
});
test("should handle complex nested test names", () => {

View File

@@ -1339,9 +1339,9 @@ export class BunTestController implements vscode.Disposable {
t = t.replaceAll(/\$[\w\.\[\]]+/g, ".*?");
if (test?.tags?.some(tag => tag.id === "test" || tag.id === "it")) {
testNames.push(`^ ?${t}$`);
testNames.push(`^ ${t}$`);
} else if (test?.tags?.some(tag => tag.id === "describe")) {
testNames.push(`^ ?${t} `);
testNames.push(`^ ${t} `);
} else {
testNames.push(t);
}

View File

@@ -117,7 +117,7 @@ async function countReactions(issueNumbers: number[], verbose = false): Promise<
}
// Small delay to avoid rate limiting
await Bun.sleep(1);
await Bun.sleep(50);
}
return totalReactions;

View File

@@ -1,72 +0,0 @@
#!/usr/bin/env bun
const body = process.env.GITHUB_ISSUE_BODY || "";
const title = process.env.GITHUB_ISSUE_TITLE || "";
const issueNumber = process.env.GITHUB_ISSUE_NUMBER;
if (!issueNumber) {
throw new Error("GITHUB_ISSUE_NUMBER must be set");
}
interface CloseAction {
reason: "not_planned" | "completed";
comment: string;
}
let closeAction: CloseAction | null = null;
// Check for workers_terminated
if (body.includes("workers_terminated")) {
closeAction = {
reason: "not_planned",
comment: `Duplicate of #15964
We are tracking worker stability issues in https://github.com/oven-sh/bun/issues/15964. For now, I recommend against terminating workers when possible.`,
};
}
// Check for better-sqlite3 with RunCommand or AutoCommand
else if (body.includes("better-sqlite3") && (body.includes("[RunCommand]") || body.includes("[AutoCommand]"))) {
closeAction = {
reason: "not_planned",
comment: `Duplicate of #4290.
better-sqlite3 is not supported yet in Bun due to missing V8 C++ APIs. For now, you can try [bun:sqlite](https://bun.com/docs/api/sqlite) for an almost drop-in replacement.`,
};
}
// Check for CPU architecture issues (Segmentation Fault/Illegal Instruction with no_avx)
else if (
(body.includes("Segmentation Fault") ||
body.includes("Illegal Instruction") ||
body.includes("IllegalInstruction")) &&
body.includes("no_avx")
) {
let comment = `Bun requires a CPU with the micro-architecture [\`nehalem\`](https://en.wikipedia.org/wiki/Nehalem_(microarchitecture)) or later (released in 2008). If you're using a CPU emulator like qemu, then try enabling x86-64-v2.`;
// Check if it's macOS
const platformMatch = body.match(/Platform:\s*([^\n]+)/i) || body.match(/on\s+(macos|darwin)/i);
const isMacOS =
platformMatch &&
(platformMatch[1]?.toLowerCase().includes("darwin") || platformMatch[1]?.toLowerCase().includes("macos"));
if (isMacOS) {
comment += `\n\nIf you're on a macOS silicon device, you're running Bun via the Rosetta CPU emulator and your best option is to run Bun natively instead.`;
}
closeAction = {
reason: "not_planned",
comment,
};
}
if (closeAction) {
// Output the action to take
console.write(
JSON.stringify({
close: true,
reason: closeAction.reason,
comment: closeAction.comment,
}),
);
} else {
console.write(JSON.stringify({ close: false }));
}

View File

@@ -6,9 +6,6 @@ if (!body) {
const latest = (await Bun.file(join(import.meta.dir, "..", "LATEST")).text()).trim();
// Check if this is a standalone executable
const isStandalone = body.includes("standalone_executable");
const lines = body.split("\n").reverse();
for (let line of lines) {
@@ -42,11 +39,6 @@ for (let line of lines) {
await Bun.write("is-outdated.txt", "true");
await Bun.write("outdated.txt", version);
// Write flag for standalone executables
if (isStandalone) {
await Bun.write("is-standalone.txt", "true");
}
const isVeryOutdated =
major !== latestMajor || minor !== latestMinor || (latestPatch > patch && latestPatch - patch > 3);

View File

@@ -88,13 +88,13 @@ if [ "$MODE" = "check" ]; then
fi
dir=$(dirname "$dir")
done
if ! $CLANG_FORMAT --dry-run --Werror "$file" 2>/dev/null; then
echo "Format check failed: $file"
FAILED=1
fi
done
if [ $FAILED -eq 1 ]; then
echo "Some files need formatting. Run 'bun run clang-format' to fix."
exit 1
@@ -123,4 +123,4 @@ else
echo " fix - Same as format" >&2
echo " diff - Show formatting differences" >&2
exit 1
fi
fi

View File

@@ -72,7 +72,6 @@ const cwd = import.meta.dirname ? dirname(import.meta.dirname) : process.cwd();
const testsPath = join(cwd, "test");
const spawnTimeout = 5_000;
const spawnBunTimeout = 20_000; // when running with ASAN/LSAN bun can take a bit longer to exit, not a bug.
const testTimeout = 3 * 60_000;
const integrationTimeout = 5 * 60_000;
@@ -299,7 +298,7 @@ function getTestExpectations() {
return expectations;
}
const skipsForExceptionValidation = (() => {
const skipArray = (() => {
const path = join(cwd, "test/no-validate-exceptions.txt");
if (!existsSync(path)) {
return [];
@@ -310,32 +309,13 @@ const skipsForExceptionValidation = (() => {
.filter(line => !line.startsWith("#") && line.length > 0);
})();
const skipsForLeaksan = (() => {
const path = join(cwd, "test/no-validate-leaksan.txt");
if (!existsSync(path)) {
return [];
}
return readFileSync(path, "utf-8")
.split("\n")
.filter(line => !line.startsWith("#") && line.length > 0);
})();
/**
* Returns whether we should validate exception checks running the given test
* @param {string} test
* @returns {boolean}
*/
const shouldValidateExceptions = test => {
return !(skipsForExceptionValidation.includes(test) || skipsForExceptionValidation.includes("test/" + test));
};
/**
* Returns whether we should validate exception checks running the given test
* @param {string} test
* @returns {boolean}
*/
const shouldValidateLeakSan = test => {
return !(skipsForLeaksan.includes(test) || skipsForLeaksan.includes("test/" + test));
return !(skipArray.includes(test) || skipArray.includes("test/" + test));
};
/**
@@ -420,9 +400,7 @@ async function runTests() {
const okResults = [];
const flakyResults = [];
const flakyResultsTitles = [];
const failedResults = [];
const failedResultsTitles = [];
const maxAttempts = 1 + (parseInt(options["retries"]) || 0);
const parallelism = options["parallel"] ? availableParallelism() : 1;
@@ -458,7 +436,6 @@ async function runTests() {
if (ok) {
if (failure) {
flakyResults.push(failure);
flakyResultsTitles.push(title);
} else {
okResults.push(result);
}
@@ -478,7 +455,6 @@ async function runTests() {
if (attempt >= maxAttempts || isAlwaysFailure(error)) {
flaky = false;
failedResults.push(failure);
failedResultsTitles.push(title);
break;
}
}
@@ -591,12 +567,6 @@ async function runTests() {
env.BUN_JSC_validateExceptionChecks = "1";
env.BUN_JSC_dumpSimulatedThrows = "1";
}
if ((basename(execPath).includes("asan") || !isCI) && shouldValidateLeakSan(testPath)) {
env.BUN_DESTRUCT_VM_ON_EXIT = "1";
env.ASAN_OPTIONS = "allow_user_segv_handler=1:disable_coredump=0:detect_leaks=1";
// prettier-ignore
env.LSAN_OPTIONS = `malloc_context_size=100:print_suppressions=0:suppressions=${process.cwd()}/test/leaksan.supp`;
}
return runTest(title, async () => {
const { ok, error, stdout, crashes } = await spawnBun(execPath, {
cwd: cwd,
@@ -654,15 +624,6 @@ async function runTests() {
throw new Error(`Unsupported package manager: ${packageManager}`);
}
// build
const buildResult = await spawnBun(execPath, {
cwd: vendorPath,
args: ["run", "build"],
});
if (!buildResult.ok) {
throw new Error(`Failed to build vendor: ${buildResult.error}`);
}
for (const testPath of testPaths) {
const title = join(relative(cwd, vendorPath), testPath).replace(/\\/g, "/");
@@ -848,14 +809,14 @@ async function runTests() {
if (failedResults.length) {
console.log(`${getAnsi("red")}Failing Tests:${getAnsi("reset")}`);
for (const testPath of failedResultsTitles) {
for (const { testPath } of failedResults) {
console.log(`${getAnsi("red")}- ${testPath}${getAnsi("reset")}`);
}
}
if (flakyResults.length) {
console.log(`${getAnsi("yellow")}Flaky Tests:${getAnsi("reset")}`);
for (const testPath of flakyResultsTitles) {
for (const { testPath } of flakyResults) {
console.log(`${getAnsi("yellow")}- ${testPath}${getAnsi("reset")}`);
}
}
@@ -1133,7 +1094,7 @@ async function spawnBun(execPath, { args, cwd, timeout, env, stdout, stderr }) {
: { BUN_ENABLE_CRASH_REPORTING: "0" }),
};
if (basename(execPath).includes("asan") && bunEnv.ASAN_OPTIONS === undefined) {
if (basename(execPath).includes("asan")) {
bunEnv.ASAN_OPTIONS = "allow_user_segv_handler=1:disable_coredump=0";
}
@@ -1153,9 +1114,6 @@ async function spawnBun(execPath, { args, cwd, timeout, env, stdout, stderr }) {
}
bunEnv["TEMP"] = tmpdirPath;
}
if (timeout === undefined) {
timeout = spawnBunTimeout;
}
try {
const existingCores = options["coredump-upload"] ? readdirSync(coresDir) : [];
const result = await spawnSafe({
@@ -1292,17 +1250,17 @@ async function spawnBun(execPath, { args, cwd, timeout, env, stdout, stderr }) {
*
* @param {string} execPath
* @param {string} testPath
* @param {object} [opts]
* @param {string} [opts.cwd]
* @param {string[]} [opts.args]
* @param {object} [options]
* @param {string} [options.cwd]
* @param {string[]} [options.args]
* @returns {Promise<TestResult>}
*/
async function spawnBunTest(execPath, testPath, opts = { cwd }) {
async function spawnBunTest(execPath, testPath, options = { cwd }) {
const timeout = getTestTimeout(testPath);
const perTestTimeout = Math.ceil(timeout / 2);
const absPath = join(opts["cwd"], testPath);
const absPath = join(options["cwd"], testPath);
const isReallyTest = isTestStrict(testPath) || absPath.includes("vendor");
const args = opts["args"] ?? [];
const args = options["args"] ?? [];
const testArgs = ["test", ...args, `--timeout=${perTestTimeout}`];
@@ -1333,16 +1291,10 @@ async function spawnBunTest(execPath, testPath, opts = { cwd }) {
env.BUN_JSC_validateExceptionChecks = "1";
env.BUN_JSC_dumpSimulatedThrows = "1";
}
if ((basename(execPath).includes("asan") || !isCI) && shouldValidateLeakSan(relative(cwd, absPath))) {
env.BUN_DESTRUCT_VM_ON_EXIT = "1";
env.ASAN_OPTIONS = "allow_user_segv_handler=1:disable_coredump=0:detect_leaks=1";
// prettier-ignore
env.LSAN_OPTIONS = `malloc_context_size=100:print_suppressions=0:suppressions=${process.cwd()}/test/leaksan.supp`;
}
const { ok, error, stdout, crashes } = await spawnBun(execPath, {
args: isReallyTest ? testArgs : [...args, absPath],
cwd: opts["cwd"],
cwd: options["cwd"],
timeout: isReallyTest ? timeout : 30_000,
env,
stdout: options.stdout,
@@ -1576,11 +1528,7 @@ function isNodeTest(path) {
return false;
}
const unixPath = path.replaceAll(sep, "/");
return (
unixPath.includes("js/node/test/parallel/") ||
unixPath.includes("js/node/test/sequential/") ||
unixPath.includes("js/bun/test/parallel/")
);
return unixPath.includes("js/node/test/parallel/") || unixPath.includes("js/node/test/sequential/");
}
/**
@@ -2269,7 +2217,7 @@ function getExitCode(outcome) {
return 1;
}
// A flaky segfault, sigtrap, or sigkill must never be ignored.
// A flaky segfault, sigtrap, or sigill must never be ignored.
// If it happens in CI, it will happen to our users.
// Flaky AddressSanitizer errors cannot be ignored since they still represent real bugs.
function isAlwaysFailure(error) {
@@ -2278,7 +2226,6 @@ function isAlwaysFailure(error) {
error.includes("segmentation fault") ||
error.includes("illegal instruction") ||
error.includes("sigtrap") ||
error.includes("sigkill") ||
error.includes("error: addresssanitizer") ||
error.includes("internal assertion failure") ||
error.includes("core dumped") ||

View File

@@ -2808,7 +2808,6 @@ export function endGroup() {
} else {
console.groupEnd();
}
console.log();
}
export function printEnvironment() {

35
scripts/write-versions.sh Executable file
View File

@@ -0,0 +1,35 @@
#!/bin/bash
set -exo pipefail
WEBKIT_VERSION=$(grep 'set(WEBKIT_TAG' "CMakeLists.txt" | awk '{print $2}' | cut -f 1 -d ')')
MIMALLOC_VERSION=$(git rev-parse HEAD:./src/deps/mimalloc)
LIBARCHIVE_VERSION=$(git rev-parse HEAD:./src/deps/libarchive)
PICOHTTPPARSER_VERSION=$(git rev-parse HEAD:./src/deps/picohttpparser)
BORINGSSL_VERSION=$(git rev-parse HEAD:./src/deps/boringssl)
ZLIB_VERSION=$(git rev-parse HEAD:./src/deps/zlib)
LOLHTML=$(git rev-parse HEAD:./src/deps/lol-html)
TINYCC=$(git rev-parse HEAD:./src/deps/tinycc)
C_ARES=$(git rev-parse HEAD:./src/deps/c-ares)
ZSTD=$(git rev-parse HEAD:./src/deps/zstd)
LSHPACK=$(git rev-parse HEAD:./src/deps/ls-hpack)
LIBDEFLATE=$(git rev-parse HEAD:./src/deps/libdeflate)
rm -rf src/generated_versions_list.zig
echo "// AUTO-GENERATED FILE. Created via .scripts/write-versions.sh" >src/generated_versions_list.zig
echo "" >>src/generated_versions_list.zig
echo "pub const boringssl = \"$BORINGSSL_VERSION\";" >>src/generated_versions_list.zig
echo "pub const libarchive = \"$LIBARCHIVE_VERSION\";" >>src/generated_versions_list.zig
echo "pub const mimalloc = \"$MIMALLOC_VERSION\";" >>src/generated_versions_list.zig
echo "pub const picohttpparser = \"$PICOHTTPPARSER_VERSION\";" >>src/generated_versions_list.zig
echo "pub const webkit = \"$WEBKIT_VERSION\";" >>src/generated_versions_list.zig
echo "pub const zig = @import(\"std\").fmt.comptimePrint(\"{}\", .{@import(\"builtin\").zig_version});" >>src/generated_versions_list.zig
echo "pub const zlib = \"$ZLIB_VERSION\";" >>src/generated_versions_list.zig
echo "pub const tinycc = \"$TINYCC\";" >>src/generated_versions_list.zig
echo "pub const lolhtml = \"$LOLHTML\";" >>src/generated_versions_list.zig
echo "pub const c_ares = \"$C_ARES\";" >>src/generated_versions_list.zig
echo "pub const libdeflate = \"$LIBDEFLATE\";" >>src/generated_versions_list.zig
echo "pub const zstd = \"$ZSTD\";" >>src/generated_versions_list.zig
echo "pub const lshpack = \"$LSHPACK\";" >>src/generated_versions_list.zig
echo "" >>src/generated_versions_list.zig
zig fmt src/generated_versions_list.zig

View File

@@ -159,7 +159,7 @@ pub inline fn mimalloc_cleanup(force: bool) void {
Mimalloc.mi_collect(force);
}
}
// Versions are now handled by CMake-generated header (bun_dependency_versions.h)
pub const versions = @import("./generated_versions_list.zig");
// Enabling huge pages slows down bun by 8x or so
// Keeping this code for:

View File

@@ -18,7 +18,7 @@ pub fn deinit(this: *HTMLScanner) void {
for (this.import_records.slice()) |*record| {
this.allocator.free(record.path.text);
}
this.import_records.deinit(this.allocator);
this.import_records.deinitWithAllocator(this.allocator);
}
fn createImportRecord(this: *HTMLScanner, input_path: []const u8, kind: ImportKind) !void {
@@ -44,7 +44,7 @@ fn createImportRecord(this: *HTMLScanner, input_path: []const u8, kind: ImportKi
.range = logger.Range.None,
};
try this.import_records.append(this.allocator, record);
try this.import_records.push(this.allocator, record);
}
const debug = bun.Output.scoped(.HTMLScanner, .hidden);

View File

@@ -229,11 +229,6 @@ pub fn BSSList(comptime ValueType: type, comptime _count: anytype) type {
this.data[index] = item;
return &this.data[index];
}
pub fn deinit(this: *OverflowBlock) void {
if (this.prev) |p| p.deinit();
bun.default_allocator.destroy(this);
}
};
const Self = @This();
@@ -269,12 +264,6 @@ pub fn BSSList(comptime ValueType: type, comptime _count: anytype) type {
return instance;
}
pub fn deinit(self: *Self) void {
self.head.deinit();
bun.default_allocator.destroy(instance);
loaded = false;
}
pub fn isOverflowing() bool {
return instance.used >= @as(u16, count);
}
@@ -361,12 +350,6 @@ pub fn BSSStringList(comptime _count: usize, comptime _item_length: usize) type
return instance;
}
pub fn deinit(self: *const Self) void {
_ = self;
bun.default_allocator.destroy(instance);
loaded = false;
}
pub inline fn isOverflowing() bool {
return instance.slice_buf_used >= @as(u16, count);
}
@@ -547,12 +530,6 @@ pub fn BSSMap(comptime ValueType: type, comptime count: anytype, comptime store_
return instance;
}
pub fn deinit(self: *Self) void {
self.index.deinit(self.allocator);
bun.default_allocator.destroy(instance);
loaded = false;
}
pub fn isOverflowing() bool {
return instance.backing_buf_used >= @as(u16, count);
}
@@ -676,10 +653,6 @@ pub fn BSSMap(comptime ValueType: type, comptime count: anytype, comptime store_
// }
}
pub fn values(self: *Self) []ValueType {
return (&self.backing_buf)[0..self.backing_buf_used];
}
};
if (!store_keys) {
return BSSMapType;
@@ -711,12 +684,6 @@ pub fn BSSMap(comptime ValueType: type, comptime count: anytype, comptime store_
return instance;
}
pub fn deinit(self: *Self) void {
self.map.deinit();
bun.default_allocator.destroy(instance);
instance_loaded = false;
}
pub fn isOverflowing() bool {
return instance.map.backing_buf_used >= count;
}

View File

@@ -78,15 +78,6 @@ pub const Borrowed = struct {
else
null;
}
pub fn downcast(std_alloc: std.mem.Allocator) Borrowed {
bun.assertf(
isInstance(std_alloc),
"not a MimallocArena (vtable is {*})",
.{std_alloc.vtable},
);
return .fromOpaque(std_alloc.ptr);
}
};
const BorrowedHeap = if (safety_checks) *DebugHeap else *mimalloc.Heap;
@@ -124,7 +115,6 @@ pub fn borrow(self: Self) Borrowed {
/// It uses pthread_getspecific to do that.
/// We can save those extra calls if we just do it once in here
pub fn getThreadLocalDefault() std.mem.Allocator {
if (bun.Environment.enable_asan) return bun.default_allocator;
return Borrowed.getDefault().allocator();
}

View File

@@ -186,10 +186,10 @@ const State = struct {
self.history.unlock();
}
pub fn deinit(self: *Self) void {
fn deinit(self: *Self) void {
defer self.* = undefined;
var history = self.history.intoUnprotected();
defer history.deinit(self.parent);
defer history.deinit();
const count = history.allocations.count();
if (count == 0) return;

View File

@@ -1,7 +1,6 @@
#include "wtf/Compiler.h"
#if ASAN_ENABLED
// NOLINTNEXTLINE(bugprone-reserved-identifier)
const char* __asan_default_options(void)
{
// detect_stack_use_after_return causes some stack allocations to be made on the heap instead,

View File

@@ -83,14 +83,14 @@ pub const TsEnumsMap = std.ArrayHashMapUnmanaged(Ref, bun.StringHashMapUnmanaged
pub fn fromParts(parts: []Part) Ast {
return Ast{
.parts = Part.List.fromOwnedSlice(parts),
.parts = Part.List.init(parts),
.runtime_imports = .{},
};
}
pub fn initTest(parts: []const Part) Ast {
pub fn initTest(parts: []Part) Ast {
return Ast{
.parts = Part.List.fromBorrowedSliceDangerous(parts),
.parts = Part.List.init(parts),
.runtime_imports = .{},
};
}
@@ -107,9 +107,9 @@ pub fn toJSON(self: *const Ast, _: std.mem.Allocator, stream: anytype) !void {
/// Do not call this if it wasn't globally allocated!
pub fn deinit(this: *Ast) void {
// TODO: assert mimalloc-owned memory
this.parts.deinit(bun.default_allocator);
this.symbols.deinit(bun.default_allocator);
this.import_records.deinit(bun.default_allocator);
if (this.parts.len > 0) this.parts.deinitWithAllocator(bun.default_allocator);
if (this.symbols.len > 0) this.symbols.deinitWithAllocator(bun.default_allocator);
if (this.import_records.len > 0) this.import_records.deinitWithAllocator(bun.default_allocator);
}
pub const Class = G.Class;

View File

@@ -56,14 +56,7 @@ pub fn toExpr(binding: *const Binding, wrapper: anytype) Expr {
};
}
return Expr.init(
E.Array,
E.Array{
.items = ExprNodeList.fromOwnedSlice(exprs),
.is_single_line = b.is_single_line,
},
loc,
);
return Expr.init(E.Array, E.Array{ .items = ExprNodeList.init(exprs), .is_single_line = b.is_single_line }, loc);
},
.b_object => |b| {
const properties = wrapper
@@ -84,7 +77,7 @@ pub fn toExpr(binding: *const Binding, wrapper: anytype) Expr {
return Expr.init(
E.Object,
E.Object{
.properties = G.Property.List.fromOwnedSlice(properties),
.properties = G.Property.List.init(properties),
.is_single_line = b.is_single_line,
},
loc,

View File

@@ -121,7 +121,7 @@ pub fn convertStmt(ctx: *ConvertESMExportsForHmr, p: anytype, stmt: Stmt) !void
const temp_id = p.generateTempRef("default_export");
try ctx.last_part.declared_symbols.append(p.allocator, .{ .ref = temp_id, .is_top_level = true });
try ctx.last_part.symbol_uses.putNoClobber(p.allocator, temp_id, .{ .count_estimate = 1 });
try p.current_scope.generated.append(p.allocator, temp_id);
try p.current_scope.generated.push(p.allocator, temp_id);
try ctx.export_props.append(p.allocator, .{
.key = Expr.init(E.String, .{ .data = "default" }, stmt.loc),
@@ -395,7 +395,7 @@ fn visitRefToExport(
const arg1 = p.generateTempRef(symbol.original_name);
try ctx.last_part.declared_symbols.append(p.allocator, .{ .ref = arg1, .is_top_level = true });
try ctx.last_part.symbol_uses.putNoClobber(p.allocator, arg1, .{ .count_estimate = 1 });
try p.current_scope.generated.append(p.allocator, arg1);
try p.current_scope.generated.push(p.allocator, arg1);
// 'get abc() { return abc }'
try ctx.export_props.append(p.allocator, .{
@@ -438,7 +438,7 @@ pub fn finalize(ctx: *ConvertESMExportsForHmr, p: anytype, all_parts: []js_ast.P
if (ctx.export_props.items.len > 0) {
const obj = Expr.init(E.Object, .{
.properties = G.Property.List.moveFromList(&ctx.export_props),
.properties = G.Property.List.fromList(ctx.export_props),
}, logger.Loc.Empty);
// `hmr.exports = ...`
@@ -466,7 +466,7 @@ pub fn finalize(ctx: *ConvertESMExportsForHmr, p: anytype, all_parts: []js_ast.P
.name = "reactRefreshAccept",
.name_loc = .Empty,
}, .Empty),
.args = .empty,
.args = .init(&.{}),
}, .Empty),
}, .Empty));
}
@@ -474,10 +474,7 @@ pub fn finalize(ctx: *ConvertESMExportsForHmr, p: anytype, all_parts: []js_ast.P
// Merge all part metadata into the first part.
for (all_parts[0 .. all_parts.len - 1]) |*part| {
try ctx.last_part.declared_symbols.appendList(p.allocator, part.declared_symbols);
try ctx.last_part.import_record_indices.appendSlice(
p.allocator,
part.import_record_indices.slice(),
);
try ctx.last_part.import_record_indices.append(p.allocator, part.import_record_indices.slice());
for (part.symbol_uses.keys(), part.symbol_uses.values()) |k, v| {
const gop = try ctx.last_part.symbol_uses.getOrPut(p.allocator, k);
if (!gop.found_existing) {
@@ -490,16 +487,13 @@ pub fn finalize(ctx: *ConvertESMExportsForHmr, p: anytype, all_parts: []js_ast.P
part.declared_symbols.entries.len = 0;
part.tag = .dead_due_to_inlining;
part.dependencies.clearRetainingCapacity();
try part.dependencies.append(p.allocator, .{
try part.dependencies.push(p.allocator, .{
.part_index = @intCast(all_parts.len - 1),
.source_index = p.source.index,
});
}
try ctx.last_part.import_record_indices.appendSlice(
p.allocator,
p.import_records_for_current_part.items,
);
try ctx.last_part.import_record_indices.append(p.allocator, p.import_records_for_current_part.items);
try ctx.last_part.declared_symbols.appendList(p.allocator, p.declared_symbols);
ctx.last_part.stmts = ctx.stmts.items;

View File

@@ -18,7 +18,7 @@ pub const Array = struct {
close_bracket_loc: logger.Loc = logger.Loc.Empty,
pub fn push(this: *Array, allocator: std.mem.Allocator, item: Expr) !void {
try this.items.append(allocator, item);
try this.items.push(allocator, item);
}
pub inline fn slice(this: Array) []Expr {
@@ -30,13 +30,12 @@ pub const Array = struct {
allocator: std.mem.Allocator,
estimated_count: usize,
) !ExprNodeList {
var out: bun.BabyList(Expr) = try .initCapacity(
allocator,
var out = try allocator.alloc(
Expr,
// This over-allocates a little but it's fine
estimated_count + @as(usize, this.items.len),
);
out.expandToCapacity();
var remain = out.slice();
var remain = out;
for (this.items.slice()) |item| {
switch (item.data) {
.e_spread => |val| {
@@ -64,8 +63,7 @@ pub const Array = struct {
remain = remain[1..];
}
out.shrinkRetainingCapacity(out.len - remain.len);
return out;
return ExprNodeList.init(out[0 .. out.len - remain.len]);
}
pub fn toJS(this: @This(), allocator: std.mem.Allocator, globalObject: *jsc.JSGlobalObject) ToJSError!jsc.JSValue {
@@ -100,43 +98,6 @@ pub const Array = struct {
pub const Unary = struct {
op: Op.Code,
value: ExprNodeIndex,
flags: Unary.Flags = .{},
pub const Flags = packed struct(u8) {
/// The expression "typeof (0, x)" must not become "typeof x" if "x"
/// is unbound because that could suppress a ReferenceError from "x".
///
/// Also if we know a typeof operator was originally an identifier, then
/// we know that this typeof operator always has no side effects (even if
/// we consider the identifier by itself to have a side effect).
///
/// Note that there *is* actually a case where "typeof x" can throw an error:
/// when "x" is being referenced inside of its TDZ (temporal dead zone). TDZ
/// checks are not yet handled correctly by Bun, so this possibility is
/// currently ignored.
was_originally_typeof_identifier: bool = false,
/// Similarly the expression "delete (0, x)" must not become "delete x"
/// because that syntax is invalid in strict mode. We also need to make sure
/// we don't accidentally change the return value:
///
/// Returns false:
/// "var a; delete (a)"
/// "var a = Object.freeze({b: 1}); delete (a.b)"
/// "var a = Object.freeze({b: 1}); delete (a?.b)"
/// "var a = Object.freeze({b: 1}); delete (a['b'])"
/// "var a = Object.freeze({b: 1}); delete (a?.['b'])"
///
/// Returns true:
/// "var a; delete (0, a)"
/// "var a = Object.freeze({b: 1}); delete (true && a.b)"
/// "var a = Object.freeze({b: 1}); delete (false || a?.b)"
/// "var a = Object.freeze({b: 1}); delete (null ?? a?.['b'])"
///
/// "var a = Object.freeze({b: 1}); delete (true ? a['b'] : a['b'])"
was_originally_delete_of_identifier_or_property_access: bool = false,
_: u6 = 0,
};
};
pub const Binary = struct {
@@ -575,7 +536,7 @@ pub const Object = struct {
if (asProperty(self, key)) |query| {
self.properties.ptr[query.i].value = expr;
} else {
try self.properties.append(allocator, .{
try self.properties.push(allocator, .{
.key = Expr.init(E.String, E.String.init(key), expr.loc),
.value = expr,
});
@@ -590,7 +551,7 @@ pub const Object = struct {
pub fn set(self: *const Object, key: Expr, allocator: std.mem.Allocator, value: Expr) SetError!void {
if (self.hasProperty(key.data.e_string.data)) return error.Clobber;
try self.properties.append(allocator, .{
try self.properties.push(allocator, .{
.key = key,
.value = value,
});
@@ -644,7 +605,7 @@ pub const Object = struct {
value_ = obj;
}
try self.properties.append(allocator, .{
try self.properties.push(allocator, .{
.key = rope.head,
.value = value_,
});
@@ -685,7 +646,7 @@ pub const Object = struct {
if (rope.next) |next| {
var obj = Expr.init(E.Object, E.Object{ .properties = .{} }, rope.head.loc);
const out = try obj.data.e_object.getOrPutObject(next, allocator);
try self.properties.append(allocator, .{
try self.properties.push(allocator, .{
.key = rope.head,
.value = obj,
});
@@ -693,7 +654,7 @@ pub const Object = struct {
}
const out = Expr.init(E.Object, E.Object{}, rope.head.loc);
try self.properties.append(allocator, .{
try self.properties.push(allocator, .{
.key = rope.head,
.value = out,
});
@@ -734,7 +695,7 @@ pub const Object = struct {
if (rope.next) |next| {
var obj = Expr.init(E.Object, E.Object{ .properties = .{} }, rope.head.loc);
const out = try obj.data.e_object.getOrPutArray(next, allocator);
try self.properties.append(allocator, .{
try self.properties.push(allocator, .{
.key = rope.head,
.value = obj,
});
@@ -742,7 +703,7 @@ pub const Object = struct {
}
const out = Expr.init(E.Array, E.Array{}, rope.head.loc);
try self.properties.append(allocator, .{
try self.properties.push(allocator, .{
.key = rope.head,
.value = out,
});
@@ -979,30 +940,6 @@ pub const String = struct {
return bun.handleOom(this.string(allocator));
}
fn stringCompareForJavaScript(comptime T: type, a: []const T, b: []const T) std.math.Order {
const a_slice = a[0..@min(a.len, b.len)];
const b_slice = b[0..@min(a.len, b.len)];
for (a_slice, b_slice) |a_char, b_char| {
const delta: i32 = @as(i32, a_char) - @as(i32, b_char);
if (delta != 0) {
return if (delta < 0) .lt else .gt;
}
}
return std.math.order(a.len, b.len);
}
/// Compares two strings lexicographically for JavaScript semantics.
/// Both strings must share the same encoding (UTF-8 vs UTF-16).
pub inline fn order(this: *const String, other: *const String) std.math.Order {
bun.debugAssert(this.isUTF8() == other.isUTF8());
if (this.isUTF8()) {
return stringCompareForJavaScript(u8, this.data, other.data);
} else {
return stringCompareForJavaScript(u16, this.slice16(), other.slice16());
}
}
pub var empty = String{};
pub var @"true" = String{ .data = "true" };
pub var @"false" = String{ .data = "false" };

View File

@@ -273,10 +273,13 @@ pub fn set(expr: *Expr, allocator: std.mem.Allocator, name: string, value: Expr)
}
}
try expr.data.e_object.properties.append(allocator, .{
var new_props = expr.data.e_object.properties.listManaged(allocator);
try new_props.append(.{
.key = Expr.init(E.String, .{ .data = name }, logger.Loc.Empty),
.value = value,
});
expr.data.e_object.properties = BabyList(G.Property).fromList(new_props);
}
/// Don't use this if you care about performance.
@@ -295,10 +298,13 @@ pub fn setString(expr: *Expr, allocator: std.mem.Allocator, name: string, value:
}
}
try expr.data.e_object.properties.append(allocator, .{
var new_props = expr.data.e_object.properties.listManaged(allocator);
try new_props.append(.{
.key = Expr.init(E.String, .{ .data = name }, logger.Loc.Empty),
.value = Expr.init(E.String, .{ .data = value }, logger.Loc.Empty),
});
expr.data.e_object.properties = BabyList(G.Property).fromList(new_props);
}
pub fn getObject(expr: *const Expr, name: string) ?Expr {
@@ -641,29 +647,6 @@ pub fn jsonStringify(self: *const @This(), writer: anytype) !void {
return try writer.write(Serializable{ .type = std.meta.activeTag(self.data), .object = "expr", .value = self.data, .loc = self.loc });
}
pub fn extractNumericValuesInSafeRange(left: Expr.Data, right: Expr.Data) ?[2]f64 {
const l_value = left.extractNumericValue() orelse return null;
const r_value = right.extractNumericValue() orelse return null;
// Check for NaN and return null if either value is NaN
if (std.math.isNan(l_value) or std.math.isNan(r_value)) {
return null;
}
if (std.math.isInf(l_value) or std.math.isInf(r_value)) {
return .{ l_value, r_value };
}
if (l_value > bun.jsc.MAX_SAFE_INTEGER or r_value > bun.jsc.MAX_SAFE_INTEGER) {
return null;
}
if (l_value < bun.jsc.MIN_SAFE_INTEGER or r_value < bun.jsc.MIN_SAFE_INTEGER) {
return null;
}
return .{ l_value, r_value };
}
pub fn extractNumericValues(left: Expr.Data, right: Expr.Data) ?[2]f64 {
return .{
left.extractNumericValue() orelse return null,
@@ -671,20 +654,6 @@ pub fn extractNumericValues(left: Expr.Data, right: Expr.Data) ?[2]f64 {
};
}
pub fn extractStringValues(left: Expr.Data, right: Expr.Data, allocator: std.mem.Allocator) ?[2]*E.String {
const l_string = left.extractStringValue() orelse return null;
const r_string = right.extractStringValue() orelse return null;
l_string.resolveRopeIfNeeded(allocator);
r_string.resolveRopeIfNeeded(allocator);
if (l_string.isUTF8() != r_string.isUTF8()) return null;
return .{
l_string,
r_string,
};
}
pub var icount: usize = 0;
// We don't need to dynamically allocate booleans
@@ -1438,17 +1407,11 @@ pub fn init(comptime Type: type, st: Type, loc: logger.Loc) Expr {
}
}
/// If this returns true, then calling this expression captures the target of
/// the property access as "this" when calling the function in the property.
pub inline fn isPropertyAccess(this: *const Expr) bool {
return this.hasValueForThisInCall();
}
pub inline fn isPrimitiveLiteral(this: *const Expr) bool {
pub fn isPrimitiveLiteral(this: Expr) bool {
return @as(Tag, this.data).isPrimitiveLiteral();
}
pub inline fn isRef(this: *const Expr, ref: Ref) bool {
pub fn isRef(this: Expr, ref: Ref) bool {
return switch (this.data) {
.e_import_identifier => |import_identifier| import_identifier.ref.eql(ref),
.e_identifier => |ident| ident.ref.eql(ref),
@@ -1910,19 +1873,36 @@ pub const Tag = enum {
}
};
pub fn isBoolean(a: *const Expr) bool {
return switch (a.data) {
.e_boolean => true,
.e_if => |ex| ex.yes.isBoolean() and ex.no.isBoolean(),
.e_unary => |ex| ex.op == .un_not or ex.op == .un_delete,
.e_binary => |ex| switch (ex.op) {
.bin_strict_eq, .bin_strict_ne, .bin_loose_eq, .bin_loose_ne, .bin_lt, .bin_gt, .bin_le, .bin_ge, .bin_instanceof, .bin_in => true,
.bin_logical_or => ex.left.isBoolean() and ex.right.isBoolean(),
.bin_logical_and => ex.left.isBoolean() and ex.right.isBoolean(),
else => false,
pub fn isBoolean(a: Expr) bool {
switch (a.data) {
.e_boolean => {
return true;
},
else => false,
};
.e_if => |ex| {
return isBoolean(ex.yes) and isBoolean(ex.no);
},
.e_unary => |ex| {
return ex.op == .un_not or ex.op == .un_delete;
},
.e_binary => |ex| {
switch (ex.op) {
.bin_strict_eq, .bin_strict_ne, .bin_loose_eq, .bin_loose_ne, .bin_lt, .bin_gt, .bin_le, .bin_ge, .bin_instanceof, .bin_in => {
return true;
},
.bin_logical_or => {
return isBoolean(ex.left) and isBoolean(ex.right);
},
.bin_logical_and => {
return isBoolean(ex.left) and isBoolean(ex.right);
},
else => {},
}
},
else => {},
}
return false;
}
pub fn assign(a: Expr, b: Expr) Expr {
@@ -1932,7 +1912,7 @@ pub fn assign(a: Expr, b: Expr) Expr {
.right = b,
}, a.loc);
}
pub inline fn at(expr: *const Expr, comptime Type: type, t: Type, _: std.mem.Allocator) Expr {
pub inline fn at(expr: Expr, comptime Type: type, t: Type, _: std.mem.Allocator) Expr {
return init(Type, t, expr.loc);
}
@@ -1940,19 +1920,21 @@ pub inline fn at(expr: *const Expr, comptime Type: type, t: Type, _: std.mem.All
// will potentially be simplified to avoid generating unnecessary extra "!"
// operators. For example, calling this with "!!x" will return "!x" instead
// of returning "!!!x".
pub fn not(expr: *const Expr, allocator: std.mem.Allocator) Expr {
return expr.maybeSimplifyNot(allocator) orelse
Expr.init(
E.Unary,
E.Unary{
.op = .un_not,
.value = expr.*,
},
expr.loc,
);
pub fn not(expr: Expr, allocator: std.mem.Allocator) Expr {
return maybeSimplifyNot(
expr,
allocator,
) orelse Expr.init(
E.Unary,
E.Unary{
.op = .un_not,
.value = expr,
},
expr.loc,
);
}
pub inline fn hasValueForThisInCall(expr: *const Expr) bool {
pub fn hasValueForThisInCall(expr: Expr) bool {
return switch (expr.data) {
.e_dot, .e_index => true,
else => false,
@@ -1964,7 +1946,7 @@ pub inline fn hasValueForThisInCall(expr: *const Expr) bool {
/// whole operator (i.e. the "!x") if it can be simplified, or false if not.
/// It's separate from "Not()" above to avoid allocation on failure in case
/// that is undesired.
pub fn maybeSimplifyNot(expr: *const Expr, allocator: std.mem.Allocator) ?Expr {
pub fn maybeSimplifyNot(expr: Expr, allocator: std.mem.Allocator) ?Expr {
switch (expr.data) {
.e_null, .e_undefined => {
return expr.at(E.Boolean, E.Boolean{ .value = true }, allocator);
@@ -1986,7 +1968,7 @@ pub fn maybeSimplifyNot(expr: *const Expr, allocator: std.mem.Allocator) ?Expr {
},
// "!!!a" => "!a"
.e_unary => |un| {
if (un.op == Op.Code.un_not and un.value.knownPrimitive() == .boolean) {
if (un.op == Op.Code.un_not and knownPrimitive(un.value) == .boolean) {
return un.value;
}
},
@@ -1999,33 +1981,33 @@ pub fn maybeSimplifyNot(expr: *const Expr, allocator: std.mem.Allocator) ?Expr {
Op.Code.bin_loose_eq => {
// "!(a == b)" => "a != b"
ex.op = .bin_loose_ne;
return expr.*;
return expr;
},
Op.Code.bin_loose_ne => {
// "!(a != b)" => "a == b"
ex.op = .bin_loose_eq;
return expr.*;
return expr;
},
Op.Code.bin_strict_eq => {
// "!(a === b)" => "a !== b"
ex.op = .bin_strict_ne;
return expr.*;
return expr;
},
Op.Code.bin_strict_ne => {
// "!(a !== b)" => "a === b"
ex.op = .bin_strict_eq;
return expr.*;
return expr;
},
Op.Code.bin_comma => {
// "!(a, b)" => "a, !b"
ex.right = ex.right.not(allocator);
return expr.*;
return expr;
},
else => {},
}
},
.e_inlined_enum => |inlined| {
return inlined.value.maybeSimplifyNot(allocator);
return maybeSimplifyNot(inlined.value, allocator);
},
else => {},
@@ -2034,11 +2016,11 @@ pub fn maybeSimplifyNot(expr: *const Expr, allocator: std.mem.Allocator) ?Expr {
return null;
}
pub fn toStringExprWithoutSideEffects(expr: *const Expr, allocator: std.mem.Allocator) ?Expr {
pub fn toStringExprWithoutSideEffects(expr: Expr, allocator: std.mem.Allocator) ?Expr {
const unwrapped = expr.unwrapInlined();
const slice = switch (unwrapped.data) {
.e_null => "null",
.e_string => return expr.*,
.e_string => return expr,
.e_undefined => "undefined",
.e_boolean => |data| if (data.value) "true" else "false",
.e_big_int => |bigint| bigint.value,
@@ -2072,7 +2054,7 @@ pub fn isOptionalChain(self: *const @This()) bool {
};
}
pub inline fn knownPrimitive(self: *const @This()) PrimitiveType {
pub inline fn knownPrimitive(self: @This()) PrimitiveType {
return self.data.knownPrimitive();
}
@@ -2312,7 +2294,6 @@ pub const Data = union(Tag) {
const item = bun.create(allocator, E.Unary, .{
.op = el.op,
.value = try el.value.deepClone(allocator),
.flags = el.flags,
});
return .{ .e_unary = item };
},
@@ -2525,7 +2506,6 @@ pub const Data = union(Tag) {
}
},
.e_unary => |e| {
writeAnyToHasher(hasher, @as(u8, @bitCast(e.flags)));
writeAnyToHasher(hasher, .{e.op});
e.value.data.writeToHasher(hasher, symbol_table);
},
@@ -2557,7 +2537,7 @@ pub const Data = union(Tag) {
inline .e_spread, .e_await => |e| {
e.value.data.writeToHasher(hasher, symbol_table);
},
.e_yield => |e| {
inline .e_yield => |e| {
writeAnyToHasher(hasher, .{ e.is_star, e.value });
if (e.value) |value|
value.data.writeToHasher(hasher, symbol_table);
@@ -2880,17 +2860,6 @@ pub const Data = union(Tag) {
};
}
pub fn extractStringValue(data: Expr.Data) ?*E.String {
return switch (data) {
.e_string => data.e_string,
.e_inlined_enum => |inlined| switch (inlined.value.data) {
.e_string => |str| str,
else => null,
},
else => null,
};
}
pub const Equality = struct {
equal: bool = false,
ok: bool = false,
@@ -3239,6 +3208,7 @@ const JSPrinter = @import("../js_printer.zig");
const std = @import("std");
const bun = @import("bun");
const BabyList = bun.BabyList;
const Environment = bun.Environment;
const JSONParser = bun.json;
const MutableString = bun.MutableString;

View File

@@ -8,161 +8,18 @@ pub const KnownGlobal = enum {
Response,
TextEncoder,
TextDecoder,
Error,
TypeError,
SyntaxError,
RangeError,
ReferenceError,
EvalError,
URIError,
AggregateError,
Array,
Object,
Function,
RegExp,
pub const map = bun.ComptimeEnumMap(KnownGlobal);
inline fn callFromNew(e: *E.New, loc: logger.Loc) js_ast.Expr {
const call = E.Call{
.target = e.target,
.args = e.args,
.close_paren_loc = e.close_parens_loc,
.can_be_unwrapped_if_unused = e.can_be_unwrapped_if_unused,
};
return js_ast.Expr.init(E.Call, call, loc);
}
pub noinline fn minifyGlobalConstructor(allocator: std.mem.Allocator, noalias e: *E.New, symbols: []const Symbol, loc: logger.Loc, minify_whitespace: bool) ?js_ast.Expr {
const id = if (e.target.data == .e_identifier) e.target.data.e_identifier.ref else return null;
pub noinline fn maybeMarkConstructorAsPure(noalias e: *E.New, symbols: []const Symbol) void {
const id = if (e.target.data == .e_identifier) e.target.data.e_identifier.ref else return;
const symbol = &symbols[id.innerIndex()];
if (symbol.kind != .unbound)
return null;
return;
const constructor = map.get(symbol.original_name) orelse return null;
const constructor = map.get(symbol.original_name) orelse return;
return switch (constructor) {
// Error constructors can be called without 'new' with identical behavior
.Error, .TypeError, .SyntaxError, .RangeError, .ReferenceError, .EvalError, .URIError, .AggregateError => {
// Convert `new Error(...)` to `Error(...)` to save bytes
return callFromNew(e, loc);
},
.Object => {
const n = e.args.len;
if (n == 0) {
// new Object() -> {}
return js_ast.Expr.init(E.Object, E.Object{}, loc);
}
if (n == 1) {
const arg = e.args.ptr[0];
switch (arg.data) {
.e_object, .e_array => {
// new Object({a: 1}) -> {a: 1}
// new Object([1, 2]) -> [1, 2]
return arg;
},
.e_null, .e_undefined => {
// new Object(null) -> {}
// new Object(undefined) -> {}
return js_ast.Expr.init(E.Object, E.Object{}, loc);
},
else => {},
}
}
// For other cases, just remove 'new'
return callFromNew(e, loc);
},
.Array => {
const n = e.args.len;
return switch (n) {
0 => {
// new Array() -> []
return js_ast.Expr.init(E.Array, E.Array{}, loc);
},
1 => {
// For single argument, only convert to literal if we're SURE it's not a number
const arg = e.args.ptr[0];
// Check if it's an object or array literal first
switch (arg.data) {
.e_object, .e_array => {
// new Array({}) -> [{}], new Array([1]) -> [[1]]
// These are definitely not numbers, safe to convert
return js_ast.Expr.init(E.Array, .{ .items = e.args }, loc);
},
else => {},
}
// For other types, check via knownPrimitive
const primitive = arg.knownPrimitive();
// Only convert if we know for certain it's not a number
// unknown could be a number at runtime, so we must preserve Array() call
switch (primitive) {
.null, .undefined, .boolean, .string, .bigint => {
// These are definitely not numbers, safe to convert
return js_ast.Expr.init(E.Array, .{ .items = e.args }, loc);
},
.number => {
if (arg.data != .e_number) {
return callFromNew(e, loc);
}
const val = arg.data.e_number.value;
if (
// only want this with whitespace minification
minify_whitespace and
(val == 0 or
val == 1 or
val == 2 or
val == 3 or
val == 4 or
val == 5 or
val == 6 or
val == 7 or
val == 8 or
val == 9 or
val == 10))
{
const arg_loc = arg.loc;
var list = e.args.moveToListManaged(allocator);
list.clearRetainingCapacity();
bun.handleOom(list.appendNTimes(js_ast.Expr{ .data = js_parser.Prefill.Data.EMissing, .loc = arg_loc }, @intFromFloat(val)));
return js_ast.Expr.init(E.Array, .{ .items = .moveFromList(&list) }, loc);
}
return callFromNew(e, loc);
},
.unknown, .mixed => {
// Could be a number, preserve Array() call
return callFromNew(e, loc);
},
}
},
// > 1
else => {
// new Array(1, 2, 3) -> [1, 2, 3]
// But NOT new Array(3) which creates an array with 3 empty slots
return js_ast.Expr.init(E.Array, .{ .items = e.args }, loc);
},
};
},
.Function => {
// Just remove 'new' for Function
return callFromNew(e, loc);
},
.RegExp => {
// Don't optimize RegExp - the semantics are too complex:
// - new RegExp(re) creates a copy, but RegExp(re) returns the same instance
// - This affects object identity and lastIndex behavior
// - The difference only applies when flags are undefined
// Keep the original new RegExp() call to preserve correct semantics
return null;
},
switch (constructor) {
.WeakSet, .WeakMap => {
const n = e.args.len;
@@ -170,7 +27,7 @@ pub const KnownGlobal = enum {
// "new WeakSet()" is pure
e.can_be_unwrapped_if_unused = .if_unused;
return null;
return;
}
if (n == 1) {
@@ -193,7 +50,6 @@ pub const KnownGlobal = enum {
},
}
}
return null;
},
.Date => {
const n = e.args.len;
@@ -202,7 +58,7 @@ pub const KnownGlobal = enum {
// "new Date()" is pure
e.can_be_unwrapped_if_unused = .if_unused;
return null;
return;
}
if (n == 1) {
@@ -222,7 +78,6 @@ pub const KnownGlobal = enum {
},
}
}
return null;
},
.Set => {
@@ -231,7 +86,7 @@ pub const KnownGlobal = enum {
if (n == 0) {
// "new Set()" is pure
e.can_be_unwrapped_if_unused = .if_unused;
return null;
return;
}
if (n == 1) {
@@ -247,7 +102,6 @@ pub const KnownGlobal = enum {
},
}
}
return null;
},
.Headers => {
@@ -257,9 +111,8 @@ pub const KnownGlobal = enum {
// "new Headers()" is pure
e.can_be_unwrapped_if_unused = .if_unused;
return null;
return;
}
return null;
},
.Response => {
@@ -269,7 +122,7 @@ pub const KnownGlobal = enum {
// "new Response()" is pure
e.can_be_unwrapped_if_unused = .if_unused;
return null;
return;
}
if (n == 1) {
@@ -289,7 +142,6 @@ pub const KnownGlobal = enum {
},
}
}
return null;
},
.TextDecoder, .TextEncoder => {
const n = e.args.len;
@@ -299,12 +151,11 @@ pub const KnownGlobal = enum {
// "new TextDecoder()" is pure
e.can_be_unwrapped_if_unused = .if_unused;
return null;
return;
}
// We _could_ validate the encoding argument
// But let's not bother
return null;
},
.Map => {
@@ -313,7 +164,7 @@ pub const KnownGlobal = enum {
if (n == 0) {
// "new Map()" is pure
e.can_be_unwrapped_if_unused = .if_unused;
return null;
return;
}
if (n == 1) {
@@ -342,20 +193,18 @@ pub const KnownGlobal = enum {
},
}
}
return null;
},
};
}
}
};
const string = []const u8;
const std = @import("std");
const bun = @import("bun");
const js_parser = bun.js_parser;
const logger = bun.logger;
const js_ast = bun.ast;
const E = js_ast.E;
const Symbol = js_ast.Symbol;
const std = @import("std");
const Map = std.AutoHashMapUnmanaged;

View File

@@ -386,7 +386,7 @@ pub const Runner = struct {
const result = Expr.init(
E.Array,
E.Array{
.items = ExprNodeList.empty,
.items = ExprNodeList.init(&[_]Expr{}),
.was_originally_macro = true,
},
this.caller.loc,
@@ -398,7 +398,7 @@ pub const Runner = struct {
var out = Expr.init(
E.Array,
E.Array{
.items = ExprNodeList.empty,
.items = ExprNodeList.init(array[0..0]),
.was_originally_macro = true,
},
this.caller.loc,
@@ -413,7 +413,7 @@ pub const Runner = struct {
continue;
i += 1;
}
out.data.e_array.items = ExprNodeList.fromOwnedSlice(array);
out.data.e_array.items = ExprNodeList.init(array);
_entry.value_ptr.* = out;
return out;
},
@@ -438,37 +438,27 @@ pub const Runner = struct {
.include_value = true,
}).init(this.global, obj);
defer object_iter.deinit();
const out = _entry.value_ptr;
out.* = Expr.init(
var properties = this.allocator.alloc(G.Property, object_iter.len) catch unreachable;
errdefer this.allocator.free(properties);
var out = Expr.init(
E.Object,
E.Object{
.properties = bun.handleOom(
G.Property.List.initCapacity(this.allocator, object_iter.len),
),
.properties = BabyList(G.Property).init(properties),
.was_originally_macro = true,
},
this.caller.loc,
);
const properties = &out.data.e_object.properties;
errdefer properties.clearAndFree(this.allocator);
_entry.value_ptr.* = out;
while (try object_iter.next()) |prop| {
bun.assertf(
object_iter.i == properties.len,
"`properties` unexpectedly modified (length {d}, expected {d})",
.{ properties.len, object_iter.i },
);
properties.appendAssumeCapacity(G.Property{
.key = Expr.init(
E.String,
E.String.init(prop.toOwnedSlice(this.allocator) catch unreachable),
this.caller.loc,
),
properties[object_iter.i] = G.Property{
.key = Expr.init(E.String, E.String.init(prop.toOwnedSlice(this.allocator) catch unreachable), this.caller.loc),
.value = try this.run(object_iter.value),
});
};
}
return out.*;
out.data.e_object.properties = BabyList(G.Property).init(properties[0..object_iter.i]);
_entry.value_ptr.* = out;
return out;
},
.JSON => {
@@ -654,6 +644,7 @@ const Resolver = @import("../resolver/resolver.zig").Resolver;
const isPackagePath = @import("../resolver/resolver.zig").isPackagePath;
const bun = @import("bun");
const BabyList = bun.BabyList;
const Environment = bun.Environment;
const Output = bun.Output;
const Transpiler = bun.Transpiler;

View File

@@ -536,7 +536,7 @@ pub fn NewParser_(
return p.newExpr(E.Call{
.target = require_resolve_ref,
.args = ExprNodeList.fromOwnedSlice(args),
.args = ExprNodeList.init(args),
}, arg.loc);
}
@@ -570,7 +570,7 @@ pub fn NewParser_(
return p.newExpr(
E.Call{
.target = p.valueForRequire(arg.loc),
.args = ExprNodeList.fromOwnedSlice(args),
.args = ExprNodeList.init(args),
},
arg.loc,
);
@@ -648,7 +648,7 @@ pub fn NewParser_(
return p.newExpr(
E.Call{
.target = p.valueForRequire(arg.loc),
.args = ExprNodeList.fromOwnedSlice(args),
.args = ExprNodeList.init(args),
},
arg.loc,
);
@@ -955,7 +955,7 @@ pub fn NewParser_(
.e_identifier => |ident| {
// is this a require("something")
if (strings.eqlComptime(p.loadNameFromRef(ident.ref), "require") and call.args.len == 1 and std.meta.activeTag(call.args.ptr[0].data) == .e_string) {
_ = p.addImportRecord(.require, loc, call.args.at(0).data.e_string.string(p.allocator) catch unreachable);
_ = p.addImportRecord(.require, loc, call.args.first_().data.e_string.string(p.allocator) catch unreachable);
}
},
else => {},
@@ -971,7 +971,7 @@ pub fn NewParser_(
.e_identifier => |ident| {
// is this a require("something")
if (strings.eqlComptime(p.loadNameFromRef(ident.ref), "require") and call.args.len == 1 and std.meta.activeTag(call.args.ptr[0].data) == .e_string) {
_ = p.addImportRecord(.require, loc, call.args.at(0).data.e_string.string(p.allocator) catch unreachable);
_ = p.addImportRecord(.require, loc, call.args.first_().data.e_string.string(p.allocator) catch unreachable);
}
},
else => {},
@@ -1250,7 +1250,7 @@ pub fn NewParser_(
.ref = namespace_ref,
.is_top_level = true,
});
try p.module_scope.generated.append(allocator, namespace_ref);
try p.module_scope.generated.push(allocator, namespace_ref);
for (imports, clause_items) |alias, *clause_item| {
const ref = symbols.get(alias) orelse unreachable;
const alias_name = if (@TypeOf(symbols) == RuntimeImports) RuntimeImports.all[alias] else alias;
@@ -1305,7 +1305,7 @@ pub fn NewParser_(
parts.append(js_ast.Part{
.stmts = stmts,
.declared_symbols = declared_symbols,
.import_record_indices = bun.BabyList(u32).fromOwnedSlice(import_records),
.import_record_indices = bun.BabyList(u32).init(import_records),
.tag = .runtime,
}) catch unreachable;
}
@@ -1360,7 +1360,7 @@ pub fn NewParser_(
.ref = namespace_ref,
.is_top_level = true,
});
try p.module_scope.generated.append(allocator, namespace_ref);
try p.module_scope.generated.push(allocator, namespace_ref);
for (clauses) |entry| {
if (entry.enabled) {
@@ -1374,7 +1374,7 @@ pub fn NewParser_(
.name = LocRef{ .ref = entry.ref, .loc = logger.Loc{} },
});
declared_symbols.appendAssumeCapacity(.{ .ref = entry.ref, .is_top_level = true });
try p.module_scope.generated.append(allocator, entry.ref);
try p.module_scope.generated.push(allocator, entry.ref);
try p.is_import_item.put(allocator, entry.ref, {});
try p.named_imports.put(allocator, entry.ref, .{
.alias = entry.name,
@@ -2113,7 +2113,7 @@ pub fn NewParser_(
//
const hoisted_ref = p.newSymbol(.hoisted, symbol.original_name) catch unreachable;
symbols = p.symbols.items;
bun.handleOom(scope.generated.append(p.allocator, hoisted_ref));
scope.generated.push(p.allocator, hoisted_ref) catch unreachable;
p.hoisted_ref_for_sloppy_mode_block_fn.put(p.allocator, value.ref, hoisted_ref) catch unreachable;
value.ref = hoisted_ref;
symbol = &symbols[hoisted_ref.innerIndex()];
@@ -2258,7 +2258,7 @@ pub fn NewParser_(
.generated = .{},
};
try parent.children.append(allocator, scope);
try parent.children.push(allocator, scope);
scope.strict_mode = parent.strict_mode;
p.current_scope = scope;
@@ -2569,7 +2569,7 @@ pub fn NewParser_(
const name = try strings.append(p.allocator, "import_", try path_name.nonUniqueNameString(p.allocator));
stmt.namespace_ref = try p.newSymbol(.other, name);
var scope: *Scope = p.current_scope;
try scope.generated.append(p.allocator, stmt.namespace_ref);
try scope.generated.push(p.allocator, stmt.namespace_ref);
}
var item_refs = ImportItemForNamespaceMap.init(p.allocator);
@@ -2761,7 +2761,7 @@ pub fn NewParser_(
var scope = p.current_scope;
try scope.generated.append(p.allocator, name.ref.?);
try scope.generated.push(p.allocator, name.ref.?);
return name;
}
@@ -3067,7 +3067,7 @@ pub fn NewParser_(
// this module will be unable to reference this symbol. However, we must
// still add the symbol to the scope so it gets minified (automatically-
// generated code may still reference the symbol).
try p.module_scope.generated.append(p.allocator, ref);
try p.module_scope.generated.push(p.allocator, ref);
return ref;
}
@@ -3141,7 +3141,7 @@ pub fn NewParser_(
entry.key_ptr.* = name;
entry.value_ptr.* = js_ast.Scope.Member{ .ref = ref, .loc = loc };
if (comptime is_generated) {
try p.module_scope.generated.append(p.allocator, ref);
try p.module_scope.generated.push(p.allocator, ref);
}
return ref;
}
@@ -3448,10 +3448,7 @@ pub fn NewParser_(
decls[0] = Decl{
.binding = p.b(B.Identifier{ .ref = ref }, local.loc),
};
try partStmts.append(p.s(
S.Local{ .decls = G.Decl.List.fromOwnedSlice(decls) },
local.loc,
));
try partStmts.append(p.s(S.Local{ .decls = G.Decl.List.init(decls) }, local.loc));
try p.declared_symbols.append(p.allocator, .{ .ref = ref, .is_top_level = true });
}
}
@@ -3466,7 +3463,7 @@ pub fn NewParser_(
.symbol_uses = p.symbol_uses,
.import_symbol_property_uses = p.import_symbol_property_uses,
.declared_symbols = p.declared_symbols.toOwnedSlice(),
.import_record_indices = bun.BabyList(u32).fromOwnedSlice(
.import_record_indices = bun.BabyList(u32).init(
p.import_records_for_current_part.toOwnedSlice(
p.allocator,
) catch unreachable,
@@ -3978,7 +3975,7 @@ pub fn NewParser_(
// checks are not yet handled correctly by bun or esbuild, so this possibility is
// currently ignored.
.un_typeof => {
if (ex.value.data == .e_identifier and ex.flags.was_originally_typeof_identifier) {
if (ex.value.data == .e_identifier) {
return true;
}
@@ -4017,18 +4014,6 @@ pub fn NewParser_(
ex.right.data,
) and
p.exprCanBeRemovedIfUnusedWithoutDCECheck(&ex.left) and p.exprCanBeRemovedIfUnusedWithoutDCECheck(&ex.right),
// Special-case "<" and ">" with string, number, or bigint arguments
.bin_lt, .bin_gt, .bin_le, .bin_ge => {
const left = ex.left.knownPrimitive();
const right = ex.right.knownPrimitive();
switch (left) {
.string, .number, .bigint => {
return right == left and p.exprCanBeRemovedIfUnusedWithoutDCECheck(&ex.left) and p.exprCanBeRemovedIfUnusedWithoutDCECheck(&ex.right);
},
else => {},
}
},
else => {},
}
},
@@ -4249,14 +4234,13 @@ pub fn NewParser_(
// return false;
// }
fn isSideEffectFreeUnboundIdentifierRef(p: *P, value: Expr, guard_condition: Expr, is_yes_branch_: bool) bool {
fn isSideEffectFreeUnboundIdentifierRef(p: *P, value: Expr, guard_condition: Expr, is_yes_branch: bool) bool {
if (value.data != .e_identifier or
p.symbols.items[value.data.e_identifier.ref.innerIndex()].kind != .unbound or
guard_condition.data != .e_binary)
return false;
const binary = guard_condition.data.e_binary.*;
var is_yes_branch = is_yes_branch_;
switch (binary.op) {
.bin_strict_eq, .bin_strict_ne, .bin_loose_eq, .bin_loose_ne => {
@@ -4285,39 +4269,6 @@ pub fn NewParser_(
(binary.op == .bin_strict_ne or binary.op == .bin_loose_ne)) and
id.eql(id2);
},
.bin_lt, .bin_gt, .bin_le, .bin_ge => {
// Pattern match for "typeof x < <string>"
var typeof: Expr.Data = binary.left.data;
var str: Expr.Data = binary.right.data;
// Check if order is flipped: 'u' >= typeof x
if (typeof == .e_string) {
typeof = binary.right.data;
str = binary.left.data;
is_yes_branch = !is_yes_branch;
}
if (typeof == .e_unary and str == .e_string) {
const unary = typeof.e_unary.*;
if (unary.op == .un_typeof and
unary.value.data == .e_identifier and
unary.flags.was_originally_typeof_identifier and
str.e_string.eqlComptime("u"))
{
// In "typeof x < 'u' ? x : null", the reference to "x" is side-effect free
// In "typeof x > 'u' ? x : null", the reference to "x" is side-effect free
if (is_yes_branch == (binary.op == .bin_lt or binary.op == .bin_le)) {
const id = value.data.e_identifier.ref;
const id2 = unary.value.data.e_identifier.ref;
if (id.eql(id2)) {
return true;
}
}
}
}
return false;
},
else => return false,
}
}
@@ -4346,7 +4297,7 @@ pub fn NewParser_(
.ref = (p.declareGeneratedSymbol(.other, symbol_name) catch unreachable),
};
bun.handleOom(p.module_scope.generated.append(p.allocator, loc_ref.ref.?));
p.module_scope.generated.push(p.allocator, loc_ref.ref.?) catch unreachable;
p.is_import_item.put(p.allocator, loc_ref.ref.?, {}) catch unreachable;
@field(p.jsx_imports, @tagName(field)) = loc_ref;
break :brk loc_ref.ref.?;
@@ -4448,7 +4399,7 @@ pub fn NewParser_(
var local = p.s(
S.Local{
.is_export = true,
.decls = Decl.List.fromOwnedSlice(decls),
.decls = Decl.List.init(decls),
},
loc,
);
@@ -4469,7 +4420,7 @@ pub fn NewParser_(
var local = p.s(
S.Local{
.is_export = true,
.decls = Decl.List.fromOwnedSlice(decls),
.decls = Decl.List.init(decls),
},
loc,
);
@@ -4591,7 +4542,7 @@ pub fn NewParser_(
stmts.append(
p.s(S.Local{
.kind = .k_var,
.decls = G.Decl.List.fromOwnedSlice(decls),
.decls = G.Decl.List.init(decls),
.is_export = is_export,
}, stmt_loc),
) catch |err| bun.handleOom(err);
@@ -4600,7 +4551,7 @@ pub fn NewParser_(
stmts.append(
p.s(S.Local{
.kind = .k_let,
.decls = G.Decl.List.fromOwnedSlice(decls),
.decls = G.Decl.List.init(decls),
}, stmt_loc),
) catch |err| bun.handleOom(err);
}
@@ -4685,7 +4636,7 @@ pub fn NewParser_(
const call = p.newExpr(
E.Call{
.target = target,
.args = ExprNodeList.fromOwnedSlice(args_list),
.args = ExprNodeList.init(args_list),
// TODO: make these fully tree-shakable. this annotation
// as-is is incorrect. This would be done by changing all
// enum wrappers into `var Enum = ...` instead of two
@@ -4740,16 +4691,18 @@ pub fn NewParser_(
for (func.func.args, 0..) |arg, i| {
for (arg.ts_decorators.ptr[0..arg.ts_decorators.len]) |arg_decorator| {
var decorators = if (is_constructor)
&class.ts_decorators
class.ts_decorators.listManaged(p.allocator)
else
&prop.ts_decorators;
prop.ts_decorators.listManaged(p.allocator);
const args = p.allocator.alloc(Expr, 2) catch unreachable;
args[0] = p.newExpr(E.Number{ .value = @as(f64, @floatFromInt(i)) }, arg_decorator.loc);
args[1] = arg_decorator;
decorators.append(
p.allocator,
p.callRuntime(arg_decorator.loc, "__legacyDecorateParamTS", args),
) catch |err| bun.handleOom(err);
decorators.append(p.callRuntime(arg_decorator.loc, "__legacyDecorateParamTS", args)) catch unreachable;
if (is_constructor) {
class.ts_decorators.update(decorators);
} else {
prop.ts_decorators.update(decorators);
}
}
}
},
@@ -4779,7 +4732,7 @@ pub fn NewParser_(
target = p.newExpr(E.Dot{ .target = p.newExpr(E.Identifier{ .ref = class.class_name.?.ref.? }, class.class_name.?.loc), .name = "prototype", .name_loc = loc }, loc);
}
var array: std.ArrayList(Expr) = .init(p.allocator);
var array = prop.ts_decorators.listManaged(p.allocator);
if (p.options.features.emit_decorator_metadata) {
switch (prop.kind) {
@@ -4804,7 +4757,7 @@ pub fn NewParser_(
entry.* = p.serializeMetadata(method_arg.ts_metadata) catch unreachable;
}
args[1] = p.newExpr(E.Array{ .items = ExprNodeList.fromOwnedSlice(args_array) }, logger.Loc.Empty);
args[1] = p.newExpr(E.Array{ .items = ExprNodeList.init(args_array) }, logger.Loc.Empty);
array.append(p.callRuntime(loc, "__legacyMetadataTS", args)) catch unreachable;
}
@@ -4829,7 +4782,7 @@ pub fn NewParser_(
{
var args = p.allocator.alloc(Expr, 2) catch unreachable;
args[0] = p.newExpr(E.String{ .data = "design:paramtypes" }, logger.Loc.Empty);
args[1] = p.newExpr(E.Array{ .items = ExprNodeList.empty }, logger.Loc.Empty);
args[1] = p.newExpr(E.Array{ .items = ExprNodeList.init(&[_]Expr{}) }, logger.Loc.Empty);
array.append(p.callRuntime(loc, "__legacyMetadataTS", args)) catch unreachable;
}
}
@@ -4849,7 +4802,7 @@ pub fn NewParser_(
entry.* = p.serializeMetadata(method_arg.ts_metadata) catch unreachable;
}
args[1] = p.newExpr(E.Array{ .items = ExprNodeList.fromOwnedSlice(args_array) }, logger.Loc.Empty);
args[1] = p.newExpr(E.Array{ .items = ExprNodeList.init(args_array) }, logger.Loc.Empty);
array.append(p.callRuntime(loc, "__legacyMetadataTS", args)) catch unreachable;
}
@@ -4866,9 +4819,8 @@ pub fn NewParser_(
}
}
bun.handleOom(array.insertSlice(0, prop.ts_decorators.slice()));
const args = p.allocator.alloc(Expr, 4) catch unreachable;
args[0] = p.newExpr(E.Array{ .items = ExprNodeList.moveFromList(&array) }, loc);
args[0] = p.newExpr(E.Array{ .items = ExprNodeList.init(array.items) }, loc);
args[1] = target;
args[2] = descriptor_key;
args[3] = descriptor_kind;
@@ -4930,10 +4882,10 @@ pub fn NewParser_(
if (class.extends != null) {
const target = p.newExpr(E.Super{}, stmt.loc);
const arguments_ref = p.newSymbol(.unbound, arguments_str) catch unreachable;
bun.handleOom(p.current_scope.generated.append(p.allocator, arguments_ref));
p.current_scope.generated.push(p.allocator, arguments_ref) catch unreachable;
const super = p.newExpr(E.Spread{ .value = p.newExpr(E.Identifier{ .ref = arguments_ref }, stmt.loc) }, stmt.loc);
const args = bun.handleOom(ExprNodeList.initOne(p.allocator, super));
const args = ExprNodeList.one(p.allocator, super) catch unreachable;
constructor_stmts.append(p.s(S.SExpr{ .value = p.newExpr(E.Call{ .target = target, .args = args }, stmt.loc) }, stmt.loc)) catch unreachable;
}
@@ -4981,7 +4933,7 @@ pub fn NewParser_(
stmts.appendSliceAssumeCapacity(instance_decorators.items);
stmts.appendSliceAssumeCapacity(static_decorators.items);
if (class.ts_decorators.len > 0) {
var array = class.ts_decorators.moveToListManaged(p.allocator);
var array = class.ts_decorators.listManaged(p.allocator);
if (p.options.features.emit_decorator_metadata) {
if (constructor_function != null) {
@@ -4997,9 +4949,9 @@ pub fn NewParser_(
param_array[i] = p.serializeMetadata(constructor_arg.ts_metadata) catch unreachable;
}
args[1] = p.newExpr(E.Array{ .items = ExprNodeList.fromOwnedSlice(param_array) }, logger.Loc.Empty);
args[1] = p.newExpr(E.Array{ .items = ExprNodeList.init(param_array) }, logger.Loc.Empty);
} else {
args[1] = p.newExpr(E.Array{ .items = ExprNodeList.empty }, logger.Loc.Empty);
args[1] = p.newExpr(E.Array{ .items = ExprNodeList.init(&[_]Expr{}) }, logger.Loc.Empty);
}
array.append(p.callRuntime(stmt.loc, "__legacyMetadataTS", args)) catch unreachable;
@@ -5007,7 +4959,7 @@ pub fn NewParser_(
}
const args = p.allocator.alloc(Expr, 2) catch unreachable;
args[0] = p.newExpr(E.Array{ .items = ExprNodeList.fromOwnedSlice(array.items) }, stmt.loc);
args[0] = p.newExpr(E.Array{ .items = ExprNodeList.init(array.items) }, stmt.loc);
args[1] = p.newExpr(E.Identifier{ .ref = class.class_name.?.ref.? }, class.class_name.?.loc);
stmts.appendAssumeCapacity(Stmt.assign(
@@ -5417,7 +5369,7 @@ pub fn NewParser_(
name,
loc_ref.ref.?,
);
bun.handleOom(p.module_scope.generated.append(p.allocator, loc_ref.ref.?));
p.module_scope.generated.push(p.allocator, loc_ref.ref.?) catch unreachable;
return loc_ref.ref.?;
}
} else {
@@ -5441,7 +5393,7 @@ pub fn NewParser_(
return p.newExpr(
E.Call{
.target = p.runtimeIdentifier(loc, name),
.args = ExprNodeList.fromOwnedSlice(args),
.args = ExprNodeList.init(args),
},
loc,
);
@@ -5501,7 +5453,7 @@ pub fn NewParser_(
for (to_flatten.children.slice()) |item| {
item.parent = parent;
bun.handleOom(parent.children.append(p.allocator, item));
parent.children.push(p.allocator, item) catch unreachable;
}
}
@@ -5522,7 +5474,7 @@ pub fn NewParser_(
.ref = ref,
}) catch |err| bun.handleOom(err);
bun.handleOom(scope.generated.append(p.allocator, ref));
bun.handleOom(scope.generated.append(p.allocator, &.{ref}));
return ref;
}
@@ -5712,7 +5664,7 @@ pub fn NewParser_(
}
const is_top_level = scope == p.module_scope;
scope.generated.appendSlice(p.allocator, &.{
scope.generated.append(p.allocator, &.{
ctx.stack_ref,
caught_ref,
err_ref,
@@ -5752,7 +5704,7 @@ pub fn NewParser_(
const finally_stmts = finally: {
if (ctx.has_await_using) {
const promise_ref = p.generateTempRef("_promise");
bun.handleOom(scope.generated.append(p.allocator, promise_ref));
bun.handleOom(scope.generated.append(p.allocator, &.{promise_ref}));
p.declared_symbols.appendAssumeCapacity(.{ .is_top_level = is_top_level, .ref = promise_ref });
const promise_ref_expr = p.newExpr(E.Identifier{ .ref = promise_ref }, loc);
@@ -5770,7 +5722,7 @@ pub fn NewParser_(
.binding = p.b(B.Identifier{ .ref = promise_ref }, loc),
.value = call_dispose,
};
break :decls G.Decl.List.fromOwnedSlice(decls);
break :decls G.Decl.List.init(decls);
},
}, loc);
@@ -5806,7 +5758,7 @@ pub fn NewParser_(
.binding = p.b(B.Identifier{ .ref = ctx.stack_ref }, loc),
.value = p.newExpr(E.Array{}, loc),
};
break :decls G.Decl.List.fromOwnedSlice(decls);
break :decls G.Decl.List.init(decls);
},
.kind = .k_let,
}, loc));
@@ -5828,7 +5780,7 @@ pub fn NewParser_(
.binding = p.b(B.Identifier{ .ref = has_err_ref }, loc),
.value = p.newExpr(E.Number{ .value = 1 }, loc),
};
break :decls G.Decl.List.fromOwnedSlice(decls);
break :decls G.Decl.List.init(decls);
},
}, loc);
break :catch_body statements;
@@ -6105,7 +6057,7 @@ pub fn NewParser_(
.body = .{
.stmts = p.allocator.dupe(Stmt, &.{
p.s(S.Return{ .value = p.newExpr(E.Array{
.items = ExprNodeList.fromBorrowedSliceDangerous(ctx.user_hooks.values()),
.items = ExprNodeList.init(ctx.user_hooks.values()),
}, loc) }, loc),
}) catch |err| bun.handleOom(err),
.loc = loc,
@@ -6117,7 +6069,7 @@ pub fn NewParser_(
// _s(func, "<hash>", force, () => [useCustom])
return p.newExpr(E.Call{
.target = Expr.initIdentifier(ctx.signature_cb, loc),
.args = ExprNodeList.fromOwnedSlice(args),
.args = ExprNodeList.init(args),
}, loc);
}
@@ -6198,14 +6150,11 @@ pub fn NewParser_(
}
if (part.import_record_indices.len == 0) {
part.import_record_indices = .fromOwnedSlice(bun.handleOom(
p.allocator.dupe(u32, p.import_records_for_current_part.items),
));
part.import_record_indices = @TypeOf(part.import_record_indices).init(
(p.import_records_for_current_part.clone(p.allocator) catch unreachable).items,
);
} else {
part.import_record_indices.appendSlice(
p.allocator,
p.import_records_for_current_part.items,
) catch |err| bun.handleOom(err);
part.import_record_indices.append(p.allocator, p.import_records_for_current_part.items) catch unreachable;
}
parts.items[parts_end] = part;
@@ -6346,7 +6295,7 @@ pub fn NewParser_(
entry.value_ptr.* = .{};
}
bun.handleOom(entry.value_ptr.append(ctx.allocator, @as(u32, @truncate(ctx.part_index))));
entry.value_ptr.push(ctx.allocator, @as(u32, @truncate(ctx.part_index))) catch unreachable;
}
};
@@ -6372,7 +6321,7 @@ pub fn NewParser_(
entry.value_ptr.* = .{};
}
bun.handleOom(entry.value_ptr.append(p.allocator, js_ast.namespace_export_part_index));
entry.value_ptr.push(p.allocator, js_ast.namespace_export_part_index) catch unreachable;
}
}
@@ -6395,12 +6344,17 @@ pub fn NewParser_(
break :brk Ref.None;
};
const parts_list = bun.BabyList(js_ast.Part).fromList(parts);
return .{
.runtime_imports = p.runtime_imports,
.parts = parts_list,
.module_scope = p.module_scope.*,
.symbols = js_ast.Symbol.List.fromList(p.symbols),
.exports_ref = p.exports_ref,
.wrapper_ref = wrapper_ref,
.module_ref = p.module_ref,
.import_records = ImportRecord.List.fromList(p.import_records),
.export_star_import_records = p.export_star_import_records.items,
.approximate_newline_count = p.lexer.approximate_newline_count,
.exports_kind = exports_kind,
@@ -6440,14 +6394,12 @@ pub fn NewParser_(
.has_commonjs_export_names = p.has_commonjs_export_names,
.hashbang = hashbang,
// TODO: cross-module constant inlining
// .const_values = p.const_values,
.ts_enums = try p.computeTsEnumsMap(allocator),
.import_meta_ref = p.import_meta_ref,
.symbols = js_ast.Symbol.List.moveFromList(&p.symbols),
.parts = bun.BabyList(js_ast.Part).moveFromList(parts),
.import_records = ImportRecord.List.moveFromList(&p.import_records),
.import_meta_ref = p.import_meta_ref,
};
}

View File

@@ -188,7 +188,7 @@ pub const Parser = struct {
// in the `symbols` array.
bun.assert(p.symbols.items.len == 0);
var symbols_ = symbols;
p.symbols = symbols_.moveToListManaged(p.allocator);
p.symbols = symbols_.listManaged(p.allocator);
try p.prepareForVisitPass();
@@ -550,7 +550,10 @@ pub const Parser = struct {
var sliced = try ListManaged(Stmt).initCapacity(p.allocator, 1);
sliced.items.len = 1;
var _local = local.*;
_local.decls = try .initOne(p.allocator, decl);
var list = try ListManaged(G.Decl).initCapacity(p.allocator, 1);
list.items.len = 1;
list.items[0] = decl;
_local.decls.update(list);
sliced.items[0] = p.s(_local, stmt.loc);
try p.appendPart(&parts, sliced.items);
}
@@ -683,7 +686,7 @@ pub const Parser = struct {
var part_stmts = p.allocator.alloc(Stmt, 1) catch unreachable;
part_stmts[0] = p.s(S.Local{
.kind = .k_var,
.decls = Decl.List.fromOwnedSlice(decls),
.decls = Decl.List.init(decls),
}, logger.Loc.Empty);
before.append(js_ast.Part{
.stmts = part_stmts,
@@ -710,7 +713,7 @@ pub const Parser = struct {
var import_part_stmts = remaining_stmts[0..1];
remaining_stmts = remaining_stmts[1..];
bun.handleOom(p.module_scope.generated.append(p.allocator, deferred_import.namespace.ref.?));
bun.handleOom(p.module_scope.generated.push(p.allocator, deferred_import.namespace.ref.?));
import_part_stmts[0] = Stmt.alloc(
S.Import,
@@ -832,7 +835,7 @@ pub const Parser = struct {
part.symbol_uses = .{};
return js_ast.Result{
.ast = js_ast.Ast{
.import_records = ImportRecord.List.moveFromList(&p.import_records),
.import_records = ImportRecord.List.init(p.import_records.items),
.redirect_import_record_index = id,
.named_imports = p.named_imports,
.named_exports = p.named_exports,
@@ -902,10 +905,7 @@ pub const Parser = struct {
break :brk new_stmts.items;
};
part.import_record_indices.append(
p.allocator,
right.data.e_require_string.import_record_index,
) catch |err| bun.handleOom(err);
part.import_record_indices.push(p.allocator, right.data.e_require_string.import_record_index) catch unreachable;
p.symbols.items[p.module_ref.innerIndex()].use_count_estimate = 0;
p.symbols.items[namespace_ref.innerIndex()].use_count_estimate -|= 1;
_ = part.symbol_uses.swapRemove(namespace_ref);
@@ -1165,7 +1165,7 @@ pub const Parser = struct {
var part_stmts = p.allocator.alloc(Stmt, 1) catch unreachable;
part_stmts[0] = p.s(S.Local{
.kind = .k_var,
.decls = Decl.List.fromOwnedSlice(decls),
.decls = Decl.List.init(decls),
}, logger.Loc.Empty);
before.append(js_ast.Part{
.stmts = part_stmts,
@@ -1245,7 +1245,7 @@ pub const Parser = struct {
before.append(js_ast.Part{
.stmts = part_stmts,
.declared_symbols = declared_symbols,
.import_record_indices = bun.BabyList(u32).fromOwnedSlice(import_record_indices),
.import_record_indices = bun.BabyList(u32).init(import_record_indices),
.tag = .bun_test,
}) catch unreachable;

Some files were not shown because too many files have changed in this diff Show More